aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/libs/grpc/src/python/grpcio
diff options
context:
space:
mode:
authorakastornov <akastornov@yandex-team.ru>2022-02-10 16:46:03 +0300
committerDaniil Cherednik <dcherednik@yandex-team.ru>2022-02-10 16:46:03 +0300
commit3a2de774d91ca8d7325aaf81c200b1d2047725e6 (patch)
tree5674a780ce03a8bbd794733a19c7a70d587e4a14 /contrib/libs/grpc/src/python/grpcio
parent7bd11ff35e97544d119e43447e3e865f2588ee7f (diff)
downloadydb-3a2de774d91ca8d7325aaf81c200b1d2047725e6.tar.gz
Restoring authorship annotation for <akastornov@yandex-team.ru>. Commit 1 of 2.
Diffstat (limited to 'contrib/libs/grpc/src/python/grpcio')
-rw-r--r--contrib/libs/grpc/src/python/grpcio/README.rst200
-rw-r--r--contrib/libs/grpc/src/python/grpcio/_spawn_patch.py100
-rw-r--r--contrib/libs/grpc/src/python/grpcio/commands.py450
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/__init__.py1304
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/_auth.py68
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/_channel.py920
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/_common.py146
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/_cython/README.rst104
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/_cython/__init__.py6
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/__init__.py6
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/call.pxd.pxi20
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/call.pyx.pxi130
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/channel.pxd.pxi14
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/channel.pyx.pxi30
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/completion_queue.pxd.pxi22
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/completion_queue.pyx.pxi80
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/credentials.pxd.pxi42
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/credentials.pyx.pxi102
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/grpc.pxi868
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/grpc_string.pyx.pxi26
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/records.pxd.pxi42
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/records.pyx.pxi304
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/security.pxd.pxi12
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/security.pyx.pxi16
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/server.pxd.pxi34
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/server.pyx.pxi186
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/_cython/cygrpc.pxd26
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/_cython/cygrpc.pyx54
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/_plugin_wrapping.py62
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/_server.py1034
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/_utilities.py274
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/beta/__init__.py6
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/beta/_client_adaptations.py1054
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/beta/_server_adaptations.py612
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/beta/implementations.py554
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/beta/interfaces.py310
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/beta/utilities.py274
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/framework/__init__.py6
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/framework/common/__init__.py6
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/framework/common/cardinality.py32
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/framework/common/style.py28
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/framework/foundation/__init__.py6
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/framework/foundation/abandonment.py24
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/framework/foundation/callable_util.py164
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/framework/foundation/future.py422
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/framework/foundation/logging_pool.py112
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/framework/foundation/stream.py70
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/framework/foundation/stream_util.py264
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/framework/interfaces/__init__.py6
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/framework/interfaces/base/__init__.py6
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/framework/interfaces/base/base.py612
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/framework/interfaces/base/utilities.py100
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/framework/interfaces/face/__init__.py6
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/framework/interfaces/face/face.py2060
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc/framework/interfaces/face/utilities.py316
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc_core_dependencies.py16
-rw-r--r--contrib/libs/grpc/src/python/grpcio/grpc_version.py12
-rw-r--r--contrib/libs/grpc/src/python/grpcio/support.py178
-rw-r--r--contrib/libs/grpc/src/python/grpcio/ya.make58
59 files changed, 6998 insertions, 6998 deletions
diff --git a/contrib/libs/grpc/src/python/grpcio/README.rst b/contrib/libs/grpc/src/python/grpcio/README.rst
index fe34ad5adf..fc053a673a 100644
--- a/contrib/libs/grpc/src/python/grpcio/README.rst
+++ b/contrib/libs/grpc/src/python/grpcio/README.rst
@@ -1,10 +1,10 @@
-gRPC Python
-===========
-
+gRPC Python
+===========
+
|compat_check_pypi|
-Package for gRPC Python.
-
+Package for gRPC Python.
+
.. |compat_check_pypi| image:: https://python-compatibility-tools.appspot.com/one_badge_image?package=grpcio
:target: https://python-compatibility-tools.appspot.com/one_badge_target?package=grpcio
@@ -12,104 +12,104 @@ Supported Python Versions
-------------------------
Python >= 3.5
-Installation
-------------
-
+Installation
+------------
+
gRPC Python is available for Linux, macOS, and Windows.
-
+
Installing From PyPI
~~~~~~~~~~~~~~~~~~~~
-
-If you are installing locally...
-
-::
-
- $ pip install grpcio
-
-Else system wide (on Ubuntu)...
-
-::
-
- $ sudo pip install grpcio
-
-If you're on Windows make sure that you installed the :code:`pip.exe` component
-when you installed Python (if not go back and install it!) then invoke:
-
-::
-
- $ pip.exe install grpcio
-
-Windows users may need to invoke :code:`pip.exe` from a command line ran as
-administrator.
-
-n.b. On Windows and on Mac OS X one *must* have a recent release of :code:`pip`
-to retrieve the proper wheel from PyPI. Be sure to upgrade to the latest
-version!
-
+
+If you are installing locally...
+
+::
+
+ $ pip install grpcio
+
+Else system wide (on Ubuntu)...
+
+::
+
+ $ sudo pip install grpcio
+
+If you're on Windows make sure that you installed the :code:`pip.exe` component
+when you installed Python (if not go back and install it!) then invoke:
+
+::
+
+ $ pip.exe install grpcio
+
+Windows users may need to invoke :code:`pip.exe` from a command line ran as
+administrator.
+
+n.b. On Windows and on Mac OS X one *must* have a recent release of :code:`pip`
+to retrieve the proper wheel from PyPI. Be sure to upgrade to the latest
+version!
+
Installing From Source
~~~~~~~~~~~~~~~~~~~~~~
-
-Building from source requires that you have the Python headers (usually a
-package named :code:`python-dev`).
-
-::
-
- $ export REPO_ROOT=grpc # REPO_ROOT can be any directory of your choice
+
+Building from source requires that you have the Python headers (usually a
+package named :code:`python-dev`).
+
+::
+
+ $ export REPO_ROOT=grpc # REPO_ROOT can be any directory of your choice
$ git clone -b RELEASE_TAG_HERE https://github.com/grpc/grpc $REPO_ROOT
- $ cd $REPO_ROOT
- $ git submodule update --init
-
- # For the next two commands do `sudo pip install` if you get permission-denied errors
- $ pip install -rrequirements.txt
- $ GRPC_PYTHON_BUILD_WITH_CYTHON=1 pip install .
-
-You cannot currently install Python from source on Windows. Things might work
-out for you in MSYS2 (follow the Linux instructions), but it isn't officially
-supported at the moment.
-
-Troubleshooting
-~~~~~~~~~~~~~~~
-
-Help, I ...
-
-* **... see a** :code:`pkg_resources.VersionConflict` **when I try to install
- grpc**
-
- This is likely because :code:`pip` doesn't own the offending dependency,
- which in turn is likely because your operating system's package manager owns
- it. You'll need to force the installation of the dependency:
-
- :code:`pip install --ignore-installed $OFFENDING_DEPENDENCY`
-
- For example, if you get an error like the following:
-
- ::
-
- Traceback (most recent call last):
- File "<string>", line 17, in <module>
- ...
- File "/usr/lib/python2.7/dist-packages/pkg_resources.py", line 509, in find
- raise VersionConflict(dist, req)
- pkg_resources.VersionConflict: (six 1.8.0 (/usr/lib/python2.7/dist-packages), Requirement.parse('six>=1.10'))
-
- You can fix it by doing:
-
- ::
-
- sudo pip install --ignore-installed six
-
-* **... see the following error on some platforms**
-
- ::
-
- /tmp/pip-build-U8pSsr/cython/Cython/Plex/Scanners.c:4:20: fatal error: Python.h: No such file or directory
- #include "Python.h"
- ^
- compilation terminated.
-
- You can fix it by installing `python-dev` package. i.e
-
- ::
-
- sudo apt-get install python-dev
-
+ $ cd $REPO_ROOT
+ $ git submodule update --init
+
+ # For the next two commands do `sudo pip install` if you get permission-denied errors
+ $ pip install -rrequirements.txt
+ $ GRPC_PYTHON_BUILD_WITH_CYTHON=1 pip install .
+
+You cannot currently install Python from source on Windows. Things might work
+out for you in MSYS2 (follow the Linux instructions), but it isn't officially
+supported at the moment.
+
+Troubleshooting
+~~~~~~~~~~~~~~~
+
+Help, I ...
+
+* **... see a** :code:`pkg_resources.VersionConflict` **when I try to install
+ grpc**
+
+ This is likely because :code:`pip` doesn't own the offending dependency,
+ which in turn is likely because your operating system's package manager owns
+ it. You'll need to force the installation of the dependency:
+
+ :code:`pip install --ignore-installed $OFFENDING_DEPENDENCY`
+
+ For example, if you get an error like the following:
+
+ ::
+
+ Traceback (most recent call last):
+ File "<string>", line 17, in <module>
+ ...
+ File "/usr/lib/python2.7/dist-packages/pkg_resources.py", line 509, in find
+ raise VersionConflict(dist, req)
+ pkg_resources.VersionConflict: (six 1.8.0 (/usr/lib/python2.7/dist-packages), Requirement.parse('six>=1.10'))
+
+ You can fix it by doing:
+
+ ::
+
+ sudo pip install --ignore-installed six
+
+* **... see the following error on some platforms**
+
+ ::
+
+ /tmp/pip-build-U8pSsr/cython/Cython/Plex/Scanners.c:4:20: fatal error: Python.h: No such file or directory
+ #include "Python.h"
+ ^
+ compilation terminated.
+
+ You can fix it by installing `python-dev` package. i.e
+
+ ::
+
+ sudo apt-get install python-dev
+
diff --git a/contrib/libs/grpc/src/python/grpcio/_spawn_patch.py b/contrib/libs/grpc/src/python/grpcio/_spawn_patch.py
index 377cc7a9f3..240b648744 100644
--- a/contrib/libs/grpc/src/python/grpcio/_spawn_patch.py
+++ b/contrib/libs/grpc/src/python/grpcio/_spawn_patch.py
@@ -1,60 +1,60 @@
# Copyright 2016 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-"""Patches the spawn() command for windows compilers.
-
-Windows has an 8191 character command line limit, but some compilers
-support an @command_file directive where command_file is a file
-containing the full command line.
-"""
-
-from distutils import ccompiler
-import os
-import os.path
-import shutil
-import sys
-import tempfile
-
-MAX_COMMAND_LENGTH = 8191
-
-_classic_spawn = ccompiler.CCompiler.spawn
-
-
-def _commandfile_spawn(self, command):
- command_length = sum([len(arg) for arg in command])
- if os.name == 'nt' and command_length > MAX_COMMAND_LENGTH:
- # Even if this command doesn't support the @command_file, it will
- # fail as is so we try blindly
- print('Command line length exceeded, using command file')
- print(' '.join(command))
- temporary_directory = tempfile.mkdtemp()
- command_filename = os.path.abspath(
- os.path.join(temporary_directory, 'command'))
- with open(command_filename, 'w') as command_file:
- escaped_args = [
- '"' + arg.replace('\\', '\\\\') + '"' for arg in command[1:]
- ]
- command_file.write(' '.join(escaped_args))
- modified_command = command[:1] + ['@{}'.format(command_filename)]
- try:
- _classic_spawn(self, modified_command)
- finally:
- shutil.rmtree(temporary_directory)
- else:
- _classic_spawn(self, command)
-
-
-def monkeypatch_spawn():
- """Monkeypatching is dumb, but it's either that or we become maintainers of
- something much, much bigger."""
- ccompiler.CCompiler.spawn = _commandfile_spawn
+"""Patches the spawn() command for windows compilers.
+
+Windows has an 8191 character command line limit, but some compilers
+support an @command_file directive where command_file is a file
+containing the full command line.
+"""
+
+from distutils import ccompiler
+import os
+import os.path
+import shutil
+import sys
+import tempfile
+
+MAX_COMMAND_LENGTH = 8191
+
+_classic_spawn = ccompiler.CCompiler.spawn
+
+
+def _commandfile_spawn(self, command):
+ command_length = sum([len(arg) for arg in command])
+ if os.name == 'nt' and command_length > MAX_COMMAND_LENGTH:
+ # Even if this command doesn't support the @command_file, it will
+ # fail as is so we try blindly
+ print('Command line length exceeded, using command file')
+ print(' '.join(command))
+ temporary_directory = tempfile.mkdtemp()
+ command_filename = os.path.abspath(
+ os.path.join(temporary_directory, 'command'))
+ with open(command_filename, 'w') as command_file:
+ escaped_args = [
+ '"' + arg.replace('\\', '\\\\') + '"' for arg in command[1:]
+ ]
+ command_file.write(' '.join(escaped_args))
+ modified_command = command[:1] + ['@{}'.format(command_filename)]
+ try:
+ _classic_spawn(self, modified_command)
+ finally:
+ shutil.rmtree(temporary_directory)
+ else:
+ _classic_spawn(self, command)
+
+
+def monkeypatch_spawn():
+ """Monkeypatching is dumb, but it's either that or we become maintainers of
+ something much, much bigger."""
+ ccompiler.CCompiler.spawn = _commandfile_spawn
diff --git a/contrib/libs/grpc/src/python/grpcio/commands.py b/contrib/libs/grpc/src/python/grpcio/commands.py
index 8240beb295..a662dc0ff7 100644
--- a/contrib/libs/grpc/src/python/grpcio/commands.py
+++ b/contrib/libs/grpc/src/python/grpcio/commands.py
@@ -1,101 +1,101 @@
# Copyright 2015 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-"""Provides distutils command classes for the GRPC Python setup process."""
-
+"""Provides distutils command classes for the GRPC Python setup process."""
+
from __future__ import print_function
-import distutils
-import glob
-import os
-import os.path
-import platform
-import re
-import shutil
-import subprocess
-import sys
-import traceback
-
-import setuptools
-from setuptools.command import build_ext
-from setuptools.command import build_py
-from setuptools.command import easy_install
-from setuptools.command import install
-from setuptools.command import test
-
-import support
-
-PYTHON_STEM = os.path.dirname(os.path.abspath(__file__))
-GRPC_STEM = os.path.abspath(PYTHON_STEM + '../../../../')
-PROTO_STEM = os.path.join(GRPC_STEM, 'src', 'proto')
-PROTO_GEN_STEM = os.path.join(GRPC_STEM, 'src', 'python', 'gens')
-CYTHON_STEM = os.path.join(PYTHON_STEM, 'grpc', '_cython')
-
-
-class CommandError(Exception):
- """Simple exception class for GRPC custom commands."""
-
-
-# TODO(atash): Remove this once PyPI has better Linux bdist support. See
-# https://bitbucket.org/pypa/pypi/issues/120/binary-wheels-for-linux-are-not-supported
-def _get_grpc_custom_bdist(decorated_basename, target_bdist_basename):
- """Returns a string path to a bdist file for Linux to install.
-
- If we can retrieve a pre-compiled bdist from online, uses it. Else, emits a
- warning and builds from source.
- """
- # TODO(atash): somehow the name that's returned from `wheel` is different
- # between different versions of 'wheel' (but from a compatibility standpoint,
- # the names are compatible); we should have some way of determining name
- # compatibility in the same way `wheel` does to avoid having to rename all of
- # the custom wheels that we build/upload to GCS.
-
- # Break import style to ensure that setup.py has had a chance to install the
- # relevant package.
- from six.moves.urllib import request
- decorated_path = decorated_basename + GRPC_CUSTOM_BDIST_EXT
- try:
- url = BINARIES_REPOSITORY + '/{target}'.format(target=decorated_path)
- bdist_data = request.urlopen(url).read()
- except IOError as error:
- raise CommandError('{}\n\nCould not find the bdist {}: {}'.format(
- traceback.format_exc(), decorated_path, error.message))
- # Our chosen local bdist path.
- bdist_path = target_bdist_basename + GRPC_CUSTOM_BDIST_EXT
- try:
- with open(bdist_path, 'w') as bdist_file:
- bdist_file.write(bdist_data)
- except IOError as error:
+import distutils
+import glob
+import os
+import os.path
+import platform
+import re
+import shutil
+import subprocess
+import sys
+import traceback
+
+import setuptools
+from setuptools.command import build_ext
+from setuptools.command import build_py
+from setuptools.command import easy_install
+from setuptools.command import install
+from setuptools.command import test
+
+import support
+
+PYTHON_STEM = os.path.dirname(os.path.abspath(__file__))
+GRPC_STEM = os.path.abspath(PYTHON_STEM + '../../../../')
+PROTO_STEM = os.path.join(GRPC_STEM, 'src', 'proto')
+PROTO_GEN_STEM = os.path.join(GRPC_STEM, 'src', 'python', 'gens')
+CYTHON_STEM = os.path.join(PYTHON_STEM, 'grpc', '_cython')
+
+
+class CommandError(Exception):
+ """Simple exception class for GRPC custom commands."""
+
+
+# TODO(atash): Remove this once PyPI has better Linux bdist support. See
+# https://bitbucket.org/pypa/pypi/issues/120/binary-wheels-for-linux-are-not-supported
+def _get_grpc_custom_bdist(decorated_basename, target_bdist_basename):
+ """Returns a string path to a bdist file for Linux to install.
+
+ If we can retrieve a pre-compiled bdist from online, uses it. Else, emits a
+ warning and builds from source.
+ """
+ # TODO(atash): somehow the name that's returned from `wheel` is different
+ # between different versions of 'wheel' (but from a compatibility standpoint,
+ # the names are compatible); we should have some way of determining name
+ # compatibility in the same way `wheel` does to avoid having to rename all of
+ # the custom wheels that we build/upload to GCS.
+
+ # Break import style to ensure that setup.py has had a chance to install the
+ # relevant package.
+ from six.moves.urllib import request
+ decorated_path = decorated_basename + GRPC_CUSTOM_BDIST_EXT
+ try:
+ url = BINARIES_REPOSITORY + '/{target}'.format(target=decorated_path)
+ bdist_data = request.urlopen(url).read()
+ except IOError as error:
+ raise CommandError('{}\n\nCould not find the bdist {}: {}'.format(
+ traceback.format_exc(), decorated_path, error.message))
+ # Our chosen local bdist path.
+ bdist_path = target_bdist_basename + GRPC_CUSTOM_BDIST_EXT
+ try:
+ with open(bdist_path, 'w') as bdist_file:
+ bdist_file.write(bdist_data)
+ except IOError as error:
raise CommandError('{}\n\nCould not write grpcio bdist: {}'.format(
traceback.format_exc(), error.message))
- return bdist_path
-
-
-class SphinxDocumentation(setuptools.Command):
- """Command to generate documentation via sphinx."""
-
- description = 'generate sphinx documentation'
- user_options = []
-
- def initialize_options(self):
- pass
-
- def finalize_options(self):
- pass
-
- def run(self):
- # We import here to ensure that setup.py has had a chance to install the
- # relevant package eggs first.
+ return bdist_path
+
+
+class SphinxDocumentation(setuptools.Command):
+ """Command to generate documentation via sphinx."""
+
+ description = 'generate sphinx documentation'
+ user_options = []
+
+ def initialize_options(self):
+ pass
+
+ def finalize_options(self):
+ pass
+
+ def run(self):
+ # We import here to ensure that setup.py has had a chance to install the
+ # relevant package eggs first.
import sphinx.cmd.build
source_dir = os.path.join(GRPC_STEM, 'doc', 'python', 'sphinx')
target_dir = os.path.join(GRPC_STEM, 'doc', 'build')
@@ -104,114 +104,114 @@ class SphinxDocumentation(setuptools.Command):
if exit_code != 0:
raise CommandError(
"Documentation generation has warnings or errors")
-
-
-class BuildProjectMetadata(setuptools.Command):
- """Command to generate project metadata in a module."""
-
- description = 'build grpcio project metadata files'
- user_options = []
-
- def initialize_options(self):
- pass
-
- def finalize_options(self):
- pass
-
- def run(self):
- with open(os.path.join(PYTHON_STEM, 'grpc/_grpcio_metadata.py'),
- 'w') as module_file:
- module_file.write('__version__ = """{}"""'.format(
- self.distribution.get_version()))
-
-
-class BuildPy(build_py.build_py):
- """Custom project build command."""
-
- def run(self):
- self.run_command('build_project_metadata')
- build_py.build_py.run(self)
-
-
-def _poison_extensions(extensions, message):
- """Includes a file that will always fail to compile in all extensions."""
- poison_filename = os.path.join(PYTHON_STEM, 'poison.c')
- with open(poison_filename, 'w') as poison:
- poison.write('#error {}'.format(message))
- for extension in extensions:
- extension.sources = [poison_filename]
-
-
-def check_and_update_cythonization(extensions):
- """Replace .pyx files with their generated counterparts and return whether or
- not cythonization still needs to occur."""
- for extension in extensions:
- generated_pyx_sources = []
- other_sources = []
- for source in extension.sources:
- base, file_ext = os.path.splitext(source)
- if file_ext == '.pyx':
+
+
+class BuildProjectMetadata(setuptools.Command):
+ """Command to generate project metadata in a module."""
+
+ description = 'build grpcio project metadata files'
+ user_options = []
+
+ def initialize_options(self):
+ pass
+
+ def finalize_options(self):
+ pass
+
+ def run(self):
+ with open(os.path.join(PYTHON_STEM, 'grpc/_grpcio_metadata.py'),
+ 'w') as module_file:
+ module_file.write('__version__ = """{}"""'.format(
+ self.distribution.get_version()))
+
+
+class BuildPy(build_py.build_py):
+ """Custom project build command."""
+
+ def run(self):
+ self.run_command('build_project_metadata')
+ build_py.build_py.run(self)
+
+
+def _poison_extensions(extensions, message):
+ """Includes a file that will always fail to compile in all extensions."""
+ poison_filename = os.path.join(PYTHON_STEM, 'poison.c')
+ with open(poison_filename, 'w') as poison:
+ poison.write('#error {}'.format(message))
+ for extension in extensions:
+ extension.sources = [poison_filename]
+
+
+def check_and_update_cythonization(extensions):
+ """Replace .pyx files with their generated counterparts and return whether or
+ not cythonization still needs to occur."""
+ for extension in extensions:
+ generated_pyx_sources = []
+ other_sources = []
+ for source in extension.sources:
+ base, file_ext = os.path.splitext(source)
+ if file_ext == '.pyx':
generated_pyx_source = next((base + gen_ext for gen_ext in (
'.c',
'.cpp',
) if os.path.isfile(base + gen_ext)), None)
- if generated_pyx_source:
- generated_pyx_sources.append(generated_pyx_source)
- else:
- sys.stderr.write('Cython-generated files are missing...\n')
- return False
- else:
- other_sources.append(source)
- extension.sources = generated_pyx_sources + other_sources
- sys.stderr.write('Found cython-generated files...\n')
- return True
-
-
-def try_cythonize(extensions, linetracing=False, mandatory=True):
- """Attempt to cythonize the extensions.
-
- Args:
- extensions: A list of `distutils.extension.Extension`.
- linetracing: A bool indicating whether or not to enable linetracing.
- mandatory: Whether or not having Cython-generated files is mandatory. If it
- is, extensions will be poisoned when they can't be fully generated.
- """
- try:
- # Break import style to ensure we have access to Cython post-setup_requires
- import Cython.Build
- except ImportError:
- if mandatory:
- sys.stderr.write(
- "This package needs to generate C files with Cython but it cannot. "
- "Poisoning extension sources to disallow extension commands...")
- _poison_extensions(
- extensions,
- "Extensions have been poisoned due to missing Cython-generated code."
- )
- return extensions
- cython_compiler_directives = {}
- if linetracing:
- additional_define_macros = [('CYTHON_TRACE_NOGIL', '1')]
- cython_compiler_directives['linetrace'] = True
- return Cython.Build.cythonize(
- extensions,
- include_path=[
+ if generated_pyx_source:
+ generated_pyx_sources.append(generated_pyx_source)
+ else:
+ sys.stderr.write('Cython-generated files are missing...\n')
+ return False
+ else:
+ other_sources.append(source)
+ extension.sources = generated_pyx_sources + other_sources
+ sys.stderr.write('Found cython-generated files...\n')
+ return True
+
+
+def try_cythonize(extensions, linetracing=False, mandatory=True):
+ """Attempt to cythonize the extensions.
+
+ Args:
+ extensions: A list of `distutils.extension.Extension`.
+ linetracing: A bool indicating whether or not to enable linetracing.
+ mandatory: Whether or not having Cython-generated files is mandatory. If it
+ is, extensions will be poisoned when they can't be fully generated.
+ """
+ try:
+ # Break import style to ensure we have access to Cython post-setup_requires
+ import Cython.Build
+ except ImportError:
+ if mandatory:
+ sys.stderr.write(
+ "This package needs to generate C files with Cython but it cannot. "
+ "Poisoning extension sources to disallow extension commands...")
+ _poison_extensions(
+ extensions,
+ "Extensions have been poisoned due to missing Cython-generated code."
+ )
+ return extensions
+ cython_compiler_directives = {}
+ if linetracing:
+ additional_define_macros = [('CYTHON_TRACE_NOGIL', '1')]
+ cython_compiler_directives['linetrace'] = True
+ return Cython.Build.cythonize(
+ extensions,
+ include_path=[
include_dir for extension in extensions
- for include_dir in extension.include_dirs
- ] + [CYTHON_STEM],
- compiler_directives=cython_compiler_directives)
-
-
-class BuildExt(build_ext.build_ext):
- """Custom build_ext command to enable compiler-specific flags."""
-
- C_OPTIONS = {
+ for include_dir in extension.include_dirs
+ ] + [CYTHON_STEM],
+ compiler_directives=cython_compiler_directives)
+
+
+class BuildExt(build_ext.build_ext):
+ """Custom build_ext command to enable compiler-specific flags."""
+
+ C_OPTIONS = {
'unix': ('-pthread',),
- 'msvc': (),
- }
- LINK_OPTIONS = {}
-
- def build_extensions(self):
+ 'msvc': (),
+ }
+ LINK_OPTIONS = {}
+
+ def build_extensions(self):
def compiler_ok_with_extra_std():
"""Test if default compiler is okay with specifying c++ version
@@ -249,49 +249,49 @@ class BuildExt(build_ext.build_ext):
self.compiler._compile = new_compile
- compiler = self.compiler.compiler_type
- if compiler in BuildExt.C_OPTIONS:
- for extension in self.extensions:
- extension.extra_compile_args += list(
- BuildExt.C_OPTIONS[compiler])
- if compiler in BuildExt.LINK_OPTIONS:
- for extension in self.extensions:
- extension.extra_link_args += list(
- BuildExt.LINK_OPTIONS[compiler])
- if not check_and_update_cythonization(self.extensions):
- self.extensions = try_cythonize(self.extensions)
- try:
- build_ext.build_ext.build_extensions(self)
- except Exception as error:
- formatted_exception = traceback.format_exc()
- support.diagnose_build_ext_error(self, error, formatted_exception)
- raise CommandError(
- "Failed `build_ext` step:\n{}".format(formatted_exception))
-
-
-class Gather(setuptools.Command):
- """Command to gather project dependencies."""
-
- description = 'gather dependencies for grpcio'
+ compiler = self.compiler.compiler_type
+ if compiler in BuildExt.C_OPTIONS:
+ for extension in self.extensions:
+ extension.extra_compile_args += list(
+ BuildExt.C_OPTIONS[compiler])
+ if compiler in BuildExt.LINK_OPTIONS:
+ for extension in self.extensions:
+ extension.extra_link_args += list(
+ BuildExt.LINK_OPTIONS[compiler])
+ if not check_and_update_cythonization(self.extensions):
+ self.extensions = try_cythonize(self.extensions)
+ try:
+ build_ext.build_ext.build_extensions(self)
+ except Exception as error:
+ formatted_exception = traceback.format_exc()
+ support.diagnose_build_ext_error(self, error, formatted_exception)
+ raise CommandError(
+ "Failed `build_ext` step:\n{}".format(formatted_exception))
+
+
+class Gather(setuptools.Command):
+ """Command to gather project dependencies."""
+
+ description = 'gather dependencies for grpcio'
user_options = [
('test', 't', 'flag indicating to gather test dependencies'),
('install', 'i', 'flag indicating to gather install dependencies')
]
-
- def initialize_options(self):
- self.test = False
- self.install = False
-
- def finalize_options(self):
- # distutils requires this override.
- pass
-
- def run(self):
- if self.install and self.distribution.install_requires:
- self.distribution.fetch_build_eggs(
- self.distribution.install_requires)
- if self.test and self.distribution.tests_require:
- self.distribution.fetch_build_eggs(self.distribution.tests_require)
+
+ def initialize_options(self):
+ self.test = False
+ self.install = False
+
+ def finalize_options(self):
+ # distutils requires this override.
+ pass
+
+ def run(self):
+ if self.install and self.distribution.install_requires:
+ self.distribution.fetch_build_eggs(
+ self.distribution.install_requires)
+ if self.test and self.distribution.tests_require:
+ self.distribution.fetch_build_eggs(self.distribution.tests_require)
class Clean(setuptools.Command):
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/__init__.py b/contrib/libs/grpc/src/python/grpcio/grpc/__init__.py
index abe87458c4..e362fe258b 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/__init__.py
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/__init__.py
@@ -1,28 +1,28 @@
# Copyright 2015-2016 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-"""gRPC's Python API."""
-
-import abc
+"""gRPC's Python API."""
+
+import abc
import contextlib
-import enum
+import enum
import logging
-import sys
-import six
-
-from grpc._cython import cygrpc as _cygrpc
+import sys
+import six
+
+from grpc._cython import cygrpc as _cygrpc
from grpc import _compression
-
+
logging.getLogger(__name__).addHandler(logging.NullHandler())
try:
@@ -30,30 +30,30 @@ try:
except ImportError:
__version__ = "dev0"
-############################## Future Interface ###############################
-
-
-class FutureTimeoutError(Exception):
- """Indicates that a method call on a Future timed out."""
-
-
-class FutureCancelledError(Exception):
- """Indicates that the computation underlying a Future was cancelled."""
-
-
-class Future(six.with_metaclass(abc.ABCMeta)):
- """A representation of a computation in another control flow.
-
+############################## Future Interface ###############################
+
+
+class FutureTimeoutError(Exception):
+ """Indicates that a method call on a Future timed out."""
+
+
+class FutureCancelledError(Exception):
+ """Indicates that the computation underlying a Future was cancelled."""
+
+
+class Future(six.with_metaclass(abc.ABCMeta)):
+ """A representation of a computation in another control flow.
+
Computations represented by a Future may be yet to be begun,
may be ongoing, or may have already completed.
"""
-
- @abc.abstractmethod
- def cancel(self):
- """Attempts to cancel the computation.
-
+
+ @abc.abstractmethod
+ def cancel(self):
+ """Attempts to cancel the computation.
+
This method does not block.
-
+
Returns:
bool:
Returns True if the computation was canceled.
@@ -65,14 +65,14 @@ class Future(six.with_metaclass(abc.ABCMeta)):
3. computation is scheduled for execution and it is impossible
to determine its state without blocking.
"""
- raise NotImplementedError()
-
- @abc.abstractmethod
- def cancelled(self):
- """Describes whether the computation was cancelled.
-
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def cancelled(self):
+ """Describes whether the computation was cancelled.
+
This method does not block.
-
+
Returns:
bool:
Returns True if the computation was cancelled before its result became
@@ -83,28 +83,28 @@ class Future(six.with_metaclass(abc.ABCMeta)):
1. computation was not cancelled.
2. computation's result is available.
"""
- raise NotImplementedError()
-
- @abc.abstractmethod
- def running(self):
- """Describes whether the computation is taking place.
-
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def running(self):
+ """Describes whether the computation is taking place.
+
This method does not block.
-
+
Returns:
Returns True if the computation is scheduled for execution or
currently executing.
Returns False if the computation already executed or was cancelled.
"""
- raise NotImplementedError()
-
- @abc.abstractmethod
- def done(self):
- """Describes whether the computation has taken place.
-
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def done(self):
+ """Describes whether the computation has taken place.
+
This method does not block.
-
+
Returns:
bool:
Returns True if the computation already executed or was cancelled.
@@ -112,22 +112,22 @@ class Future(six.with_metaclass(abc.ABCMeta)):
currently executing.
This is exactly opposite of the running() method's result.
"""
- raise NotImplementedError()
-
- @abc.abstractmethod
- def result(self, timeout=None):
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def result(self, timeout=None):
"""Returns the result of the computation or raises its exception.
-
+
This method may return immediately or may block.
-
+
Args:
timeout: The length of time in seconds to wait for the computation to
finish or be cancelled. If None, the call will block until the
computations's termination.
-
+
Returns:
The return value of the computation.
-
+
Raises:
FutureTimeoutError: If a timeout value is passed and the computation
does not terminate within the allotted time.
@@ -135,79 +135,79 @@ class Future(six.with_metaclass(abc.ABCMeta)):
Exception: If the computation raised an exception, this call will
raise the same exception.
"""
- raise NotImplementedError()
-
- @abc.abstractmethod
- def exception(self, timeout=None):
- """Return the exception raised by the computation.
-
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def exception(self, timeout=None):
+ """Return the exception raised by the computation.
+
This method may return immediately or may block.
-
+
Args:
timeout: The length of time in seconds to wait for the computation to
terminate or be cancelled. If None, the call will block until the
computations's termination.
-
+
Returns:
The exception raised by the computation, or None if the computation
did not raise an exception.
-
+
Raises:
FutureTimeoutError: If a timeout value is passed and the computation
does not terminate within the allotted time.
FutureCancelledError: If the computation was cancelled.
"""
- raise NotImplementedError()
-
- @abc.abstractmethod
- def traceback(self, timeout=None):
- """Access the traceback of the exception raised by the computation.
-
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def traceback(self, timeout=None):
+ """Access the traceback of the exception raised by the computation.
+
This method may return immediately or may block.
-
+
Args:
timeout: The length of time in seconds to wait for the computation
to terminate or be cancelled. If None, the call will block until
the computation's termination.
-
+
Returns:
The traceback of the exception raised by the computation, or None
if the computation did not raise an exception.
-
+
Raises:
FutureTimeoutError: If a timeout value is passed and the computation
does not terminate within the allotted time.
FutureCancelledError: If the computation was cancelled.
"""
- raise NotImplementedError()
-
- @abc.abstractmethod
- def add_done_callback(self, fn):
- """Adds a function to be called at completion of the computation.
-
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def add_done_callback(self, fn):
+ """Adds a function to be called at completion of the computation.
+
The callback will be passed this Future object describing the outcome
of the computation. Callbacks will be invoked after the future is
terminated, whether successfully or not.
-
+
If the computation has already completed, the callback will be called
immediately.
-
+
Exceptions raised in the callback will be logged at ERROR level, but
will not terminate any threads of execution.
Args:
fn: A callable taking this Future object as its single parameter.
"""
- raise NotImplementedError()
-
-
-################################ gRPC Enums ##################################
-
-
-@enum.unique
-class ChannelConnectivity(enum.Enum):
- """Mirrors grpc_connectivity_state in the gRPC Core.
-
+ raise NotImplementedError()
+
+
+################################ gRPC Enums ##################################
+
+
+@enum.unique
+class ChannelConnectivity(enum.Enum):
+ """Mirrors grpc_connectivity_state in the gRPC Core.
+
Attributes:
IDLE: The channel is idle.
CONNECTING: The channel is connecting.
@@ -216,16 +216,16 @@ class ChannelConnectivity(enum.Enum):
to recover.
SHUTDOWN: The channel has seen a failure from which it cannot recover.
"""
- IDLE = (_cygrpc.ConnectivityState.idle, 'idle')
- CONNECTING = (_cygrpc.ConnectivityState.connecting, 'connecting')
- READY = (_cygrpc.ConnectivityState.ready, 'ready')
- TRANSIENT_FAILURE = (_cygrpc.ConnectivityState.transient_failure,
- 'transient failure')
- SHUTDOWN = (_cygrpc.ConnectivityState.shutdown, 'shutdown')
-
-
-@enum.unique
-class StatusCode(enum.Enum):
+ IDLE = (_cygrpc.ConnectivityState.idle, 'idle')
+ CONNECTING = (_cygrpc.ConnectivityState.connecting, 'connecting')
+ READY = (_cygrpc.ConnectivityState.ready, 'ready')
+ TRANSIENT_FAILURE = (_cygrpc.ConnectivityState.transient_failure,
+ 'transient failure')
+ SHUTDOWN = (_cygrpc.ConnectivityState.shutdown, 'shutdown')
+
+
+@enum.unique
+class StatusCode(enum.Enum):
"""Mirrors grpc_status_code in the gRPC Core.
Attributes:
@@ -253,29 +253,29 @@ class StatusCode(enum.Enum):
UNAVAILABLE: The service is currently unavailable.
DATA_LOSS: Unrecoverable data loss or corruption.
"""
- OK = (_cygrpc.StatusCode.ok, 'ok')
- CANCELLED = (_cygrpc.StatusCode.cancelled, 'cancelled')
- UNKNOWN = (_cygrpc.StatusCode.unknown, 'unknown')
- INVALID_ARGUMENT = (_cygrpc.StatusCode.invalid_argument, 'invalid argument')
- DEADLINE_EXCEEDED = (_cygrpc.StatusCode.deadline_exceeded,
- 'deadline exceeded')
- NOT_FOUND = (_cygrpc.StatusCode.not_found, 'not found')
- ALREADY_EXISTS = (_cygrpc.StatusCode.already_exists, 'already exists')
- PERMISSION_DENIED = (_cygrpc.StatusCode.permission_denied,
- 'permission denied')
- RESOURCE_EXHAUSTED = (_cygrpc.StatusCode.resource_exhausted,
- 'resource exhausted')
- FAILED_PRECONDITION = (_cygrpc.StatusCode.failed_precondition,
- 'failed precondition')
- ABORTED = (_cygrpc.StatusCode.aborted, 'aborted')
- OUT_OF_RANGE = (_cygrpc.StatusCode.out_of_range, 'out of range')
- UNIMPLEMENTED = (_cygrpc.StatusCode.unimplemented, 'unimplemented')
- INTERNAL = (_cygrpc.StatusCode.internal, 'internal')
- UNAVAILABLE = (_cygrpc.StatusCode.unavailable, 'unavailable')
- DATA_LOSS = (_cygrpc.StatusCode.data_loss, 'data loss')
- UNAUTHENTICATED = (_cygrpc.StatusCode.unauthenticated, 'unauthenticated')
-
-
+ OK = (_cygrpc.StatusCode.ok, 'ok')
+ CANCELLED = (_cygrpc.StatusCode.cancelled, 'cancelled')
+ UNKNOWN = (_cygrpc.StatusCode.unknown, 'unknown')
+ INVALID_ARGUMENT = (_cygrpc.StatusCode.invalid_argument, 'invalid argument')
+ DEADLINE_EXCEEDED = (_cygrpc.StatusCode.deadline_exceeded,
+ 'deadline exceeded')
+ NOT_FOUND = (_cygrpc.StatusCode.not_found, 'not found')
+ ALREADY_EXISTS = (_cygrpc.StatusCode.already_exists, 'already exists')
+ PERMISSION_DENIED = (_cygrpc.StatusCode.permission_denied,
+ 'permission denied')
+ RESOURCE_EXHAUSTED = (_cygrpc.StatusCode.resource_exhausted,
+ 'resource exhausted')
+ FAILED_PRECONDITION = (_cygrpc.StatusCode.failed_precondition,
+ 'failed precondition')
+ ABORTED = (_cygrpc.StatusCode.aborted, 'aborted')
+ OUT_OF_RANGE = (_cygrpc.StatusCode.out_of_range, 'out of range')
+ UNIMPLEMENTED = (_cygrpc.StatusCode.unimplemented, 'unimplemented')
+ INTERNAL = (_cygrpc.StatusCode.internal, 'internal')
+ UNAVAILABLE = (_cygrpc.StatusCode.unavailable, 'unavailable')
+ DATA_LOSS = (_cygrpc.StatusCode.data_loss, 'data loss')
+ UNAUTHENTICATED = (_cygrpc.StatusCode.unauthenticated, 'unauthenticated')
+
+
############################# gRPC Status ################################
@@ -292,108 +292,108 @@ class Status(six.with_metaclass(abc.ABCMeta)):
"""
-############################# gRPC Exceptions ################################
-
-
-class RpcError(Exception):
- """Raised by the gRPC library to indicate non-OK-status RPC termination."""
-
-
-############################## Shared Context ################################
-
-
-class RpcContext(six.with_metaclass(abc.ABCMeta)):
- """Provides RPC-related information and action."""
-
- @abc.abstractmethod
- def is_active(self):
+############################# gRPC Exceptions ################################
+
+
+class RpcError(Exception):
+ """Raised by the gRPC library to indicate non-OK-status RPC termination."""
+
+
+############################## Shared Context ################################
+
+
+class RpcContext(six.with_metaclass(abc.ABCMeta)):
+ """Provides RPC-related information and action."""
+
+ @abc.abstractmethod
+ def is_active(self):
"""Describes whether the RPC is active or has terminated.
Returns:
bool:
True if RPC is active, False otherwise.
"""
- raise NotImplementedError()
-
- @abc.abstractmethod
- def time_remaining(self):
- """Describes the length of allowed time remaining for the RPC.
-
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def time_remaining(self):
+ """Describes the length of allowed time remaining for the RPC.
+
Returns:
A nonnegative float indicating the length of allowed time in seconds
remaining for the RPC to complete before it is considered to have
timed out, or None if no deadline was specified for the RPC.
"""
- raise NotImplementedError()
-
- @abc.abstractmethod
- def cancel(self):
- """Cancels the RPC.
-
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def cancel(self):
+ """Cancels the RPC.
+
Idempotent and has no effect if the RPC has already terminated.
"""
- raise NotImplementedError()
-
- @abc.abstractmethod
- def add_callback(self, callback):
- """Registers a callback to be called on RPC termination.
-
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def add_callback(self, callback):
+ """Registers a callback to be called on RPC termination.
+
Args:
callback: A no-parameter callable to be called on RPC termination.
-
+
Returns:
True if the callback was added and will be called later; False if
the callback was not added and will not be called (because the RPC
already terminated or some other reason).
"""
- raise NotImplementedError()
-
-
-######################### Invocation-Side Context ############################
-
-
-class Call(six.with_metaclass(abc.ABCMeta, RpcContext)):
- """Invocation-side utility object for an RPC."""
-
- @abc.abstractmethod
- def initial_metadata(self):
+ raise NotImplementedError()
+
+
+######################### Invocation-Side Context ############################
+
+
+class Call(six.with_metaclass(abc.ABCMeta, RpcContext)):
+ """Invocation-side utility object for an RPC."""
+
+ @abc.abstractmethod
+ def initial_metadata(self):
"""Accesses the initial metadata sent by the server.
-
+
This method blocks until the value is available.
-
+
Returns:
The initial :term:`metadata`.
"""
- raise NotImplementedError()
-
- @abc.abstractmethod
- def trailing_metadata(self):
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def trailing_metadata(self):
"""Accesses the trailing metadata sent by the server.
-
+
This method blocks until the value is available.
-
+
Returns:
The trailing :term:`metadata`.
"""
- raise NotImplementedError()
-
- @abc.abstractmethod
- def code(self):
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def code(self):
"""Accesses the status code sent by the server.
-
+
This method blocks until the value is available.
-
+
Returns:
The StatusCode value for the RPC.
"""
- raise NotImplementedError()
-
- @abc.abstractmethod
- def details(self):
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def details(self):
"""Accesses the details sent by the server.
-
+
This method blocks until the value is available.
-
+
Returns:
The details string of the RPC.
"""
@@ -418,7 +418,7 @@ class ClientCallDetails(six.with_metaclass(abc.ABCMeta)):
flag to enable :term:`wait_for_ready` mechanism.
compression: An element of grpc.compression, e.g.
grpc.compression.Gzip. This is an EXPERIMENTAL option.
- """
+ """
class UnaryUnaryClientInterceptor(six.with_metaclass(abc.ABCMeta)):
@@ -455,9 +455,9 @@ class UnaryUnaryClientInterceptor(six.with_metaclass(abc.ABCMeta)):
Should the event terminate with non-OK status, the returned
Call-Future's exception value will be an RpcError.
"""
- raise NotImplementedError()
-
-
+ raise NotImplementedError()
+
+
class UnaryStreamClientInterceptor(six.with_metaclass(abc.ABCMeta)):
"""Affords intercepting unary-stream invocations.
@@ -567,89 +567,89 @@ class StreamStreamClientInterceptor(six.with_metaclass(abc.ABCMeta)):
raise NotImplementedError()
-############ Authentication & Authorization Interfaces & Classes #############
-
-
-class ChannelCredentials(object):
+############ Authentication & Authorization Interfaces & Classes #############
+
+
+class ChannelCredentials(object):
"""An encapsulation of the data required to create a secure Channel.
-
+
This class has no supported interface - it exists to define the type of its
instances and its instances exist to be passed to other functions. For
example, ssl_channel_credentials returns an instance of this class and
secure_channel requires an instance of this class.
"""
-
- def __init__(self, credentials):
- self._credentials = credentials
-
-
-class CallCredentials(object):
+
+ def __init__(self, credentials):
+ self._credentials = credentials
+
+
+class CallCredentials(object):
"""An encapsulation of the data required to assert an identity over a call.
-
+
A CallCredentials has to be used with secure Channel, otherwise the
metadata will not be transmitted to the server.
A CallCredentials may be composed with ChannelCredentials to always assert
identity for every call over that Channel.
-
+
This class has no supported interface - it exists to define the type of its
instances and its instances exist to be passed to other functions.
"""
-
- def __init__(self, credentials):
- self._credentials = credentials
-
-
-class AuthMetadataContext(six.with_metaclass(abc.ABCMeta)):
- """Provides information to call credentials metadata plugins.
-
+
+ def __init__(self, credentials):
+ self._credentials = credentials
+
+
+class AuthMetadataContext(six.with_metaclass(abc.ABCMeta)):
+ """Provides information to call credentials metadata plugins.
+
Attributes:
service_url: A string URL of the service being called into.
method_name: A string of the fully qualified method name being called.
"""
-
-
-class AuthMetadataPluginCallback(six.with_metaclass(abc.ABCMeta)):
- """Callback object received by a metadata plugin."""
-
- def __call__(self, metadata, error):
+
+
+class AuthMetadataPluginCallback(six.with_metaclass(abc.ABCMeta)):
+ """Callback object received by a metadata plugin."""
+
+ def __call__(self, metadata, error):
"""Passes to the gRPC runtime authentication metadata for an RPC.
-
+
Args:
metadata: The :term:`metadata` used to construct the CallCredentials.
error: An Exception to indicate error or None to indicate success.
"""
- raise NotImplementedError()
-
-
-class AuthMetadataPlugin(six.with_metaclass(abc.ABCMeta)):
- """A specification for custom authentication."""
-
- def __call__(self, context, callback):
- """Implements authentication by passing metadata to a callback.
-
+ raise NotImplementedError()
+
+
+class AuthMetadataPlugin(six.with_metaclass(abc.ABCMeta)):
+ """A specification for custom authentication."""
+
+ def __call__(self, context, callback):
+ """Implements authentication by passing metadata to a callback.
+
This method will be invoked asynchronously in a separate thread.
-
+
Args:
context: An AuthMetadataContext providing information on the RPC that
the plugin is being called to authenticate.
callback: An AuthMetadataPluginCallback to be invoked either
synchronously or asynchronously.
"""
- raise NotImplementedError()
-
-
-class ServerCredentials(object):
+ raise NotImplementedError()
+
+
+class ServerCredentials(object):
"""An encapsulation of the data required to open a secure port on a Server.
-
+
This class has no supported interface - it exists to define the type of its
instances and its instances exist to be passed to other functions.
"""
-
- def __init__(self, credentials):
- self._credentials = credentials
-
-
+
+ def __init__(self, credentials):
+ self._credentials = credentials
+
+
class ServerCertificateConfiguration(object):
"""A certificate configuration for use with an SSL-enabled Server.
@@ -665,13 +665,13 @@ class ServerCertificateConfiguration(object):
self._certificate_configuration = certificate_configuration
-######################## Multi-Callable Interfaces ###########################
-
-
-class UnaryUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)):
+######################## Multi-Callable Interfaces ###########################
+
+
+class UnaryUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)):
"""Affords invoking a unary-unary RPC from client-side."""
-
- @abc.abstractmethod
+
+ @abc.abstractmethod
def __call__(self,
request,
timeout=None,
@@ -679,8 +679,8 @@ class UnaryUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)):
credentials=None,
wait_for_ready=None,
compression=None):
- """Synchronously invokes the underlying RPC.
-
+ """Synchronously invokes the underlying RPC.
+
Args:
request: The request value for the RPC.
timeout: An optional duration of time in seconds to allow
@@ -693,18 +693,18 @@ class UnaryUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)):
flag to enable :term:`wait_for_ready` mechanism.
compression: An element of grpc.compression, e.g.
grpc.compression.Gzip. This is an EXPERIMENTAL option.
-
+
Returns:
The response value for the RPC.
-
+
Raises:
RpcError: Indicating that the RPC terminated with non-OK status. The
raised RpcError will also be a Call for the RPC affording the RPC's
metadata, status code, and details.
"""
- raise NotImplementedError()
-
- @abc.abstractmethod
+ raise NotImplementedError()
+
+ @abc.abstractmethod
def with_call(self,
request,
timeout=None,
@@ -712,8 +712,8 @@ class UnaryUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)):
credentials=None,
wait_for_ready=None,
compression=None):
- """Synchronously invokes the underlying RPC.
-
+ """Synchronously invokes the underlying RPC.
+
Args:
request: The request value for the RPC.
timeout: An optional durating of time in seconds to allow for
@@ -726,18 +726,18 @@ class UnaryUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)):
flag to enable :term:`wait_for_ready` mechanism.
compression: An element of grpc.compression, e.g.
grpc.compression.Gzip. This is an EXPERIMENTAL option.
-
+
Returns:
The response value for the RPC and a Call value for the RPC.
-
+
Raises:
RpcError: Indicating that the RPC terminated with non-OK status. The
raised RpcError will also be a Call for the RPC affording the RPC's
metadata, status code, and details.
"""
- raise NotImplementedError()
-
- @abc.abstractmethod
+ raise NotImplementedError()
+
+ @abc.abstractmethod
def future(self,
request,
timeout=None,
@@ -745,8 +745,8 @@ class UnaryUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)):
credentials=None,
wait_for_ready=None,
compression=None):
- """Asynchronously invokes the underlying RPC.
-
+ """Asynchronously invokes the underlying RPC.
+
Args:
request: The request value for the RPC.
timeout: An optional duration of time in seconds to allow for
@@ -759,7 +759,7 @@ class UnaryUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)):
flag to enable :term:`wait_for_ready` mechanism.
compression: An element of grpc.compression, e.g.
grpc.compression.Gzip. This is an EXPERIMENTAL option.
-
+
Returns:
An object that is both a Call for the RPC and a Future.
In the event of RPC completion, the return Call-Future's result
@@ -767,13 +767,13 @@ class UnaryUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)):
Should the event terminate with non-OK status,
the returned Call-Future's exception value will be an RpcError.
"""
- raise NotImplementedError()
-
-
-class UnaryStreamMultiCallable(six.with_metaclass(abc.ABCMeta)):
+ raise NotImplementedError()
+
+
+class UnaryStreamMultiCallable(six.with_metaclass(abc.ABCMeta)):
"""Affords invoking a unary-stream RPC from client-side."""
-
- @abc.abstractmethod
+
+ @abc.abstractmethod
def __call__(self,
request,
timeout=None,
@@ -781,8 +781,8 @@ class UnaryStreamMultiCallable(six.with_metaclass(abc.ABCMeta)):
credentials=None,
wait_for_ready=None,
compression=None):
- """Invokes the underlying RPC.
-
+ """Invokes the underlying RPC.
+
Args:
request: The request value for the RPC.
timeout: An optional duration of time in seconds to allow for
@@ -795,29 +795,29 @@ class UnaryStreamMultiCallable(six.with_metaclass(abc.ABCMeta)):
flag to enable :term:`wait_for_ready` mechanism.
compression: An element of grpc.compression, e.g.
grpc.compression.Gzip. This is an EXPERIMENTAL option.
-
+
Returns:
An object that is both a Call for the RPC and an iterator of
response values. Drawing response values from the returned
Call-iterator may raise RpcError indicating termination of the
RPC with non-OK status.
"""
- raise NotImplementedError()
-
-
-class StreamUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)):
+ raise NotImplementedError()
+
+
+class StreamUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)):
"""Affords invoking a stream-unary RPC from client-side."""
-
- @abc.abstractmethod
- def __call__(self,
- request_iterator,
- timeout=None,
- metadata=None,
+
+ @abc.abstractmethod
+ def __call__(self,
+ request_iterator,
+ timeout=None,
+ metadata=None,
credentials=None,
wait_for_ready=None,
compression=None):
- """Synchronously invokes the underlying RPC.
-
+ """Synchronously invokes the underlying RPC.
+
Args:
request_iterator: An iterator that yields request values for
the RPC.
@@ -831,27 +831,27 @@ class StreamUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)):
flag to enable :term:`wait_for_ready` mechanism.
compression: An element of grpc.compression, e.g.
grpc.compression.Gzip. This is an EXPERIMENTAL option.
-
+
Returns:
The response value for the RPC.
-
+
Raises:
RpcError: Indicating that the RPC terminated with non-OK status. The
raised RpcError will also implement grpc.Call, affording methods
such as metadata, code, and details.
"""
- raise NotImplementedError()
-
- @abc.abstractmethod
- def with_call(self,
- request_iterator,
- timeout=None,
- metadata=None,
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def with_call(self,
+ request_iterator,
+ timeout=None,
+ metadata=None,
credentials=None,
wait_for_ready=None,
compression=None):
"""Synchronously invokes the underlying RPC on the client.
-
+
Args:
request_iterator: An iterator that yields request values for
the RPC.
@@ -865,27 +865,27 @@ class StreamUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)):
flag to enable :term:`wait_for_ready` mechanism.
compression: An element of grpc.compression, e.g.
grpc.compression.Gzip. This is an EXPERIMENTAL option.
-
+
Returns:
The response value for the RPC and a Call object for the RPC.
-
+
Raises:
RpcError: Indicating that the RPC terminated with non-OK status. The
raised RpcError will also be a Call for the RPC affording the RPC's
metadata, status code, and details.
"""
- raise NotImplementedError()
-
- @abc.abstractmethod
- def future(self,
- request_iterator,
- timeout=None,
- metadata=None,
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def future(self,
+ request_iterator,
+ timeout=None,
+ metadata=None,
credentials=None,
wait_for_ready=None,
compression=None):
"""Asynchronously invokes the underlying RPC on the client.
-
+
Args:
request_iterator: An iterator that yields request values for the RPC.
timeout: An optional duration of time in seconds to allow for
@@ -898,7 +898,7 @@ class StreamUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)):
flag to enable :term:`wait_for_ready` mechanism.
compression: An element of grpc.compression, e.g.
grpc.compression.Gzip. This is an EXPERIMENTAL option.
-
+
Returns:
An object that is both a Call for the RPC and a Future.
In the event of RPC completion, the return Call-Future's result value
@@ -906,22 +906,22 @@ class StreamUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)):
with non-OK status, the returned Call-Future's exception value will
be an RpcError.
"""
- raise NotImplementedError()
-
-
-class StreamStreamMultiCallable(six.with_metaclass(abc.ABCMeta)):
+ raise NotImplementedError()
+
+
+class StreamStreamMultiCallable(six.with_metaclass(abc.ABCMeta)):
"""Affords invoking a stream-stream RPC on client-side."""
-
- @abc.abstractmethod
- def __call__(self,
- request_iterator,
- timeout=None,
- metadata=None,
+
+ @abc.abstractmethod
+ def __call__(self,
+ request_iterator,
+ timeout=None,
+ metadata=None,
credentials=None,
wait_for_ready=None,
compression=None):
"""Invokes the underlying RPC on the client.
-
+
Args:
request_iterator: An iterator that yields request values for the RPC.
timeout: An optional duration of time in seconds to allow for
@@ -934,30 +934,30 @@ class StreamStreamMultiCallable(six.with_metaclass(abc.ABCMeta)):
flag to enable :term:`wait_for_ready` mechanism.
compression: An element of grpc.compression, e.g.
grpc.compression.Gzip. This is an EXPERIMENTAL option.
-
+
Returns:
An object that is both a Call for the RPC and an iterator of
response values. Drawing response values from the returned
Call-iterator may raise RpcError indicating termination of the
RPC with non-OK status.
"""
- raise NotImplementedError()
-
-
-############################# Channel Interface ##############################
-
-
-class Channel(six.with_metaclass(abc.ABCMeta)):
+ raise NotImplementedError()
+
+
+############################# Channel Interface ##############################
+
+
+class Channel(six.with_metaclass(abc.ABCMeta)):
"""Affords RPC invocation via generic methods on client-side.
-
+
Channel objects implement the Context Manager type, although they need not
support being entered and exited multiple times.
"""
- @abc.abstractmethod
- def subscribe(self, callback, try_to_connect=False):
+ @abc.abstractmethod
+ def subscribe(self, callback, try_to_connect=False):
"""Subscribe to this Channel's connectivity state machine.
-
+
A Channel may be in any of the states described by ChannelConnectivity.
This method allows application to monitor the state transitions.
The typical use case is to debug or gain better visibility into gRPC
@@ -973,25 +973,25 @@ class Channel(six.with_metaclass(abc.ABCMeta)):
should attempt to connect immediately. If set to False, gRPC
runtime decides when to connect.
"""
- raise NotImplementedError()
-
- @abc.abstractmethod
- def unsubscribe(self, callback):
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def unsubscribe(self, callback):
"""Unsubscribes a subscribed callback from this Channel's connectivity.
-
+
Args:
callback: A callable previously registered with this Channel from
having been passed to its "subscribe" method.
"""
- raise NotImplementedError()
-
- @abc.abstractmethod
- def unary_unary(self,
- method,
- request_serializer=None,
- response_deserializer=None):
- """Creates a UnaryUnaryMultiCallable for a unary-unary method.
-
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def unary_unary(self,
+ method,
+ request_serializer=None,
+ response_deserializer=None):
+ """Creates a UnaryUnaryMultiCallable for a unary-unary method.
+
Args:
method: The name of the RPC method.
request_serializer: Optional :term:`serializer` for serializing the request
@@ -999,19 +999,19 @@ class Channel(six.with_metaclass(abc.ABCMeta)):
response_deserializer: Optional :term:`deserializer` for deserializing the
response message. Response goes undeserialized in case None
is passed.
-
+
Returns:
A UnaryUnaryMultiCallable value for the named unary-unary method.
"""
- raise NotImplementedError()
-
- @abc.abstractmethod
- def unary_stream(self,
- method,
- request_serializer=None,
- response_deserializer=None):
- """Creates a UnaryStreamMultiCallable for a unary-stream method.
-
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def unary_stream(self,
+ method,
+ request_serializer=None,
+ response_deserializer=None):
+ """Creates a UnaryStreamMultiCallable for a unary-stream method.
+
Args:
method: The name of the RPC method.
request_serializer: Optional :term:`serializer` for serializing the request
@@ -1019,19 +1019,19 @@ class Channel(six.with_metaclass(abc.ABCMeta)):
response_deserializer: Optional :term:`deserializer` for deserializing the
response message. Response goes undeserialized in case None is
passed.
-
+
Returns:
A UnaryStreamMultiCallable value for the name unary-stream method.
"""
- raise NotImplementedError()
-
- @abc.abstractmethod
- def stream_unary(self,
- method,
- request_serializer=None,
- response_deserializer=None):
- """Creates a StreamUnaryMultiCallable for a stream-unary method.
-
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def stream_unary(self,
+ method,
+ request_serializer=None,
+ response_deserializer=None):
+ """Creates a StreamUnaryMultiCallable for a stream-unary method.
+
Args:
method: The name of the RPC method.
request_serializer: Optional :term:`serializer` for serializing the request
@@ -1039,19 +1039,19 @@ class Channel(six.with_metaclass(abc.ABCMeta)):
response_deserializer: Optional :term:`deserializer` for deserializing the
response message. Response goes undeserialized in case None is
passed.
-
+
Returns:
A StreamUnaryMultiCallable value for the named stream-unary method.
"""
- raise NotImplementedError()
-
- @abc.abstractmethod
- def stream_stream(self,
- method,
- request_serializer=None,
- response_deserializer=None):
- """Creates a StreamStreamMultiCallable for a stream-stream method.
-
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def stream_stream(self,
+ method,
+ request_serializer=None,
+ response_deserializer=None):
+ """Creates a StreamStreamMultiCallable for a stream-stream method.
+
Args:
method: The name of the RPC method.
request_serializer: Optional :term:`serializer` for serializing the request
@@ -1059,16 +1059,16 @@ class Channel(six.with_metaclass(abc.ABCMeta)):
response_deserializer: Optional :term:`deserializer` for deserializing the
response message. Response goes undeserialized in case None
is passed.
-
+
Returns:
A StreamStreamMultiCallable value for the named stream-stream method.
"""
- raise NotImplementedError()
-
+ raise NotImplementedError()
+
@abc.abstractmethod
def close(self):
"""Closes this Channel and releases all resources held by it.
-
+
Closing the Channel will immediately terminate all RPCs active with the
Channel and it is not valid to invoke new RPCs with the Channel.
@@ -1085,32 +1085,32 @@ class Channel(six.with_metaclass(abc.ABCMeta)):
raise NotImplementedError()
-########################## Service-Side Context ##############################
-
-
-class ServicerContext(six.with_metaclass(abc.ABCMeta, RpcContext)):
- """A context object passed to method implementations."""
-
- @abc.abstractmethod
- def invocation_metadata(self):
+########################## Service-Side Context ##############################
+
+
+class ServicerContext(six.with_metaclass(abc.ABCMeta, RpcContext)):
+ """A context object passed to method implementations."""
+
+ @abc.abstractmethod
+ def invocation_metadata(self):
"""Accesses the metadata from the sent by the client.
-
+
Returns:
The invocation :term:`metadata`.
"""
- raise NotImplementedError()
-
- @abc.abstractmethod
- def peer(self):
- """Identifies the peer that invoked the RPC being serviced.
-
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def peer(self):
+ """Identifies the peer that invoked the RPC being serviced.
+
Returns:
A string identifying the peer that invoked the RPC being serviced.
The string format is determined by gRPC runtime.
"""
- raise NotImplementedError()
-
- @abc.abstractmethod
+ raise NotImplementedError()
+
+ @abc.abstractmethod
def peer_identities(self):
"""Gets one or more peer identity(s).
@@ -1157,21 +1157,21 @@ class ServicerContext(six.with_metaclass(abc.ABCMeta, RpcContext)):
raise NotImplementedError()
@abc.abstractmethod
- def send_initial_metadata(self, initial_metadata):
+ def send_initial_metadata(self, initial_metadata):
"""Sends the initial metadata value to the client.
-
+
This method need not be called by implementations if they have no
metadata to add to what the gRPC runtime will transmit.
-
+
Args:
initial_metadata: The initial :term:`metadata`.
"""
- raise NotImplementedError()
-
- @abc.abstractmethod
- def set_trailing_metadata(self, trailing_metadata):
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def set_trailing_metadata(self, trailing_metadata):
"""Sets the trailing metadata for the RPC.
-
+
Sets the trailing metadata to be sent upon completion of the RPC.
If this method is invoked multiple times throughout the lifetime of an
@@ -1180,13 +1180,13 @@ class ServicerContext(six.with_metaclass(abc.ABCMeta, RpcContext)):
This method need not be called by implementations if they have no
metadata to add to what the gRPC runtime will transmit.
-
+
Args:
trailing_metadata: The trailing :term:`metadata`.
"""
- raise NotImplementedError()
-
- @abc.abstractmethod
+ raise NotImplementedError()
+
+ @abc.abstractmethod
def abort(self, code, details):
"""Raises an exception to terminate the RPC with a non-OK status.
@@ -1225,33 +1225,33 @@ class ServicerContext(six.with_metaclass(abc.ABCMeta, RpcContext)):
raise NotImplementedError()
@abc.abstractmethod
- def set_code(self, code):
+ def set_code(self, code):
"""Sets the value to be used as status code upon RPC completion.
-
+
This method need not be called by method implementations if they wish
the gRPC runtime to determine the status code of the RPC.
-
+
Args:
code: A StatusCode object to be sent to the client.
"""
- raise NotImplementedError()
-
- @abc.abstractmethod
- def set_details(self, details):
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def set_details(self, details):
"""Sets the value to be used as detail string upon RPC completion.
-
+
This method need not be called by method implementations if they have
no details to transmit.
-
+
Args:
details: A UTF-8-encodable string to be sent to the client upon
termination of the RPC.
"""
- raise NotImplementedError()
-
+ raise NotImplementedError()
+
def disable_next_message_compression(self):
"""Disables compression for the next response message.
-
+
This is an EXPERIMENTAL method.
This method will override any compression configuration set during
@@ -1260,12 +1260,12 @@ class ServicerContext(six.with_metaclass(abc.ABCMeta, RpcContext)):
raise NotImplementedError()
-##################### Service-Side Handler Interfaces ########################
-
-
-class RpcMethodHandler(six.with_metaclass(abc.ABCMeta)):
- """An implementation of a single RPC method.
-
+##################### Service-Side Handler Interfaces ########################
+
+
+class RpcMethodHandler(six.with_metaclass(abc.ABCMeta)):
+ """An implementation of a single RPC method.
+
Attributes:
request_streaming: Whether the RPC supports exactly one request message
or any arbitrary number of request messages.
@@ -1297,48 +1297,48 @@ class RpcMethodHandler(six.with_metaclass(abc.ABCMeta)):
Only non-None if request_streaming and response_streaming are both
True.
"""
-
-
-class HandlerCallDetails(six.with_metaclass(abc.ABCMeta)):
- """Describes an RPC that has just arrived for service.
+
+
+class HandlerCallDetails(six.with_metaclass(abc.ABCMeta)):
+ """Describes an RPC that has just arrived for service.
Attributes:
method: The method name of the RPC.
invocation_metadata: The :term:`metadata` sent by the client.
"""
-
-
-class GenericRpcHandler(six.with_metaclass(abc.ABCMeta)):
- """An implementation of arbitrarily many RPC methods."""
-
- @abc.abstractmethod
- def service(self, handler_call_details):
+
+
+class GenericRpcHandler(six.with_metaclass(abc.ABCMeta)):
+ """An implementation of arbitrarily many RPC methods."""
+
+ @abc.abstractmethod
+ def service(self, handler_call_details):
"""Returns the handler for servicing the RPC.
-
+
Args:
handler_call_details: A HandlerCallDetails describing the RPC.
-
+
Returns:
An RpcMethodHandler with which the RPC may be serviced if the
implementation chooses to service this RPC, or None otherwise.
"""
- raise NotImplementedError()
-
-
-class ServiceRpcHandler(six.with_metaclass(abc.ABCMeta, GenericRpcHandler)):
- """An implementation of RPC methods belonging to a service.
-
+ raise NotImplementedError()
+
+
+class ServiceRpcHandler(six.with_metaclass(abc.ABCMeta, GenericRpcHandler)):
+ """An implementation of RPC methods belonging to a service.
+
A service handles RPC methods with structured names of the form
'/Service.Name/Service.Method', where 'Service.Name' is the value
returned by service_name(), and 'Service.Method' is the method
name. A service can have multiple method names, but only a single
service name.
"""
-
- @abc.abstractmethod
- def service_name(self):
+
+ @abc.abstractmethod
+ def service_name(self):
"""Returns this service's name.
-
+
Returns:
The service name.
"""
@@ -1352,7 +1352,7 @@ class ServerInterceptor(six.with_metaclass(abc.ABCMeta)):
"""Affords intercepting incoming RPCs on the service-side.
This is an EXPERIMENTAL API.
- """
+ """
@abc.abstractmethod
def intercept_service(self, continuation, handler_call_details):
@@ -1370,71 +1370,71 @@ class ServerInterceptor(six.with_metaclass(abc.ABCMeta)):
An RpcMethodHandler with which the RPC may be serviced if the
interceptor chooses to service this RPC, or None otherwise.
"""
- raise NotImplementedError()
-
-
-############################# Server Interface ###############################
-
-
-class Server(six.with_metaclass(abc.ABCMeta)):
- """Services RPCs."""
-
- @abc.abstractmethod
- def add_generic_rpc_handlers(self, generic_rpc_handlers):
- """Registers GenericRpcHandlers with this Server.
-
+ raise NotImplementedError()
+
+
+############################# Server Interface ###############################
+
+
+class Server(six.with_metaclass(abc.ABCMeta)):
+ """Services RPCs."""
+
+ @abc.abstractmethod
+ def add_generic_rpc_handlers(self, generic_rpc_handlers):
+ """Registers GenericRpcHandlers with this Server.
+
This method is only safe to call before the server is started.
-
+
Args:
generic_rpc_handlers: An iterable of GenericRpcHandlers that will be
used to service RPCs.
"""
- raise NotImplementedError()
-
- @abc.abstractmethod
- def add_insecure_port(self, address):
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def add_insecure_port(self, address):
"""Opens an insecure port for accepting RPCs.
-
+
This method may only be called before starting the server.
-
+
Args:
address: The address for which to open a port. If the port is 0,
or not specified in the address, then gRPC runtime will choose a port.
-
+
Returns:
An integer port on which server will accept RPC requests.
"""
- raise NotImplementedError()
-
- @abc.abstractmethod
- def add_secure_port(self, address, server_credentials):
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def add_secure_port(self, address, server_credentials):
"""Opens a secure port for accepting RPCs.
-
+
This method may only be called before starting the server.
-
+
Args:
address: The address for which to open a port.
if the port is 0, or not specified in the address, then gRPC
runtime will choose a port.
server_credentials: A ServerCredentials object.
-
+
Returns:
An integer port on which server will accept RPC requests.
"""
- raise NotImplementedError()
-
- @abc.abstractmethod
- def start(self):
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def start(self):
"""Starts this Server.
-
+
This method may only be called once. (i.e. it is not idempotent).
"""
- raise NotImplementedError()
-
- @abc.abstractmethod
- def stop(self, grace):
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def stop(self, grace):
"""Stops this Server.
-
+
This method immediately stop service of new RPCs in all cases.
If a grace period is specified, this method returns immediately
@@ -1442,7 +1442,7 @@ class Server(six.with_metaclass(abc.ABCMeta)):
If a grace period is not specified (by passing None for `grace`),
all existing RPCs are aborted immediately and this method
blocks until the last RPC handler terminates.
-
+
This method is idempotent and may be called at any time.
Passing a smaller grace value in a subsequent call will have
the effect of stopping the Server sooner (passing None will
@@ -1450,20 +1450,20 @@ class Server(six.with_metaclass(abc.ABCMeta)):
a larger grace value in a subsequent call *will not* have the
effect of stopping the server later (i.e. the most restrictive
grace value is used).
-
+
Args:
grace: A duration of time in seconds or None.
-
+
Returns:
A threading.Event that will be set when this Server has completely
stopped, i.e. when running RPCs either complete or are aborted and
all handlers have terminated.
"""
- raise NotImplementedError()
-
+ raise NotImplementedError()
+
def wait_for_termination(self, timeout=None):
"""Block current thread until the server stops.
-
+
This is an EXPERIMENTAL API.
The wait will not consume computational resources during blocking, and
@@ -1485,111 +1485,111 @@ class Server(six.with_metaclass(abc.ABCMeta)):
raise NotImplementedError()
-################################# Functions ################################
-
-
-def unary_unary_rpc_method_handler(behavior,
- request_deserializer=None,
- response_serializer=None):
- """Creates an RpcMethodHandler for a unary-unary RPC method.
-
+################################# Functions ################################
+
+
+def unary_unary_rpc_method_handler(behavior,
+ request_deserializer=None,
+ response_serializer=None):
+ """Creates an RpcMethodHandler for a unary-unary RPC method.
+
Args:
behavior: The implementation of an RPC that accepts one request
and returns one response.
request_deserializer: An optional :term:`deserializer` for request deserialization.
response_serializer: An optional :term:`serializer` for response serialization.
-
+
Returns:
An RpcMethodHandler object that is typically used by grpc.Server.
"""
from grpc import _utilities # pylint: disable=cyclic-import
- return _utilities.RpcMethodHandler(False, False, request_deserializer,
- response_serializer, behavior, None,
- None, None)
-
-
-def unary_stream_rpc_method_handler(behavior,
- request_deserializer=None,
- response_serializer=None):
- """Creates an RpcMethodHandler for a unary-stream RPC method.
-
+ return _utilities.RpcMethodHandler(False, False, request_deserializer,
+ response_serializer, behavior, None,
+ None, None)
+
+
+def unary_stream_rpc_method_handler(behavior,
+ request_deserializer=None,
+ response_serializer=None):
+ """Creates an RpcMethodHandler for a unary-stream RPC method.
+
Args:
behavior: The implementation of an RPC that accepts one request
and returns an iterator of response values.
request_deserializer: An optional :term:`deserializer` for request deserialization.
response_serializer: An optional :term:`serializer` for response serialization.
-
+
Returns:
An RpcMethodHandler object that is typically used by grpc.Server.
"""
from grpc import _utilities # pylint: disable=cyclic-import
- return _utilities.RpcMethodHandler(False, True, request_deserializer,
- response_serializer, None, behavior,
- None, None)
-
-
-def stream_unary_rpc_method_handler(behavior,
- request_deserializer=None,
- response_serializer=None):
- """Creates an RpcMethodHandler for a stream-unary RPC method.
-
+ return _utilities.RpcMethodHandler(False, True, request_deserializer,
+ response_serializer, None, behavior,
+ None, None)
+
+
+def stream_unary_rpc_method_handler(behavior,
+ request_deserializer=None,
+ response_serializer=None):
+ """Creates an RpcMethodHandler for a stream-unary RPC method.
+
Args:
behavior: The implementation of an RPC that accepts an iterator of
request values and returns a single response value.
request_deserializer: An optional :term:`deserializer` for request deserialization.
response_serializer: An optional :term:`serializer` for response serialization.
-
+
Returns:
An RpcMethodHandler object that is typically used by grpc.Server.
"""
from grpc import _utilities # pylint: disable=cyclic-import
- return _utilities.RpcMethodHandler(True, False, request_deserializer,
- response_serializer, None, None,
- behavior, None)
-
-
-def stream_stream_rpc_method_handler(behavior,
- request_deserializer=None,
- response_serializer=None):
- """Creates an RpcMethodHandler for a stream-stream RPC method.
-
+ return _utilities.RpcMethodHandler(True, False, request_deserializer,
+ response_serializer, None, None,
+ behavior, None)
+
+
+def stream_stream_rpc_method_handler(behavior,
+ request_deserializer=None,
+ response_serializer=None):
+ """Creates an RpcMethodHandler for a stream-stream RPC method.
+
Args:
behavior: The implementation of an RPC that accepts an iterator of
request values and returns an iterator of response values.
request_deserializer: An optional :term:`deserializer` for request deserialization.
response_serializer: An optional :term:`serializer` for response serialization.
-
+
Returns:
An RpcMethodHandler object that is typically used by grpc.Server.
"""
from grpc import _utilities # pylint: disable=cyclic-import
- return _utilities.RpcMethodHandler(True, True, request_deserializer,
- response_serializer, None, None, None,
- behavior)
-
-
-def method_handlers_generic_handler(service, method_handlers):
+ return _utilities.RpcMethodHandler(True, True, request_deserializer,
+ response_serializer, None, None, None,
+ behavior)
+
+
+def method_handlers_generic_handler(service, method_handlers):
"""Creates a GenericRpcHandler from RpcMethodHandlers.
-
+
Args:
service: The name of the service that is implemented by the
method_handlers.
method_handlers: A dictionary that maps method names to corresponding
RpcMethodHandler.
-
+
Returns:
A GenericRpcHandler. This is typically added to the grpc.Server object
with add_generic_rpc_handlers() before starting the server.
"""
from grpc import _utilities # pylint: disable=cyclic-import
- return _utilities.DictionaryGenericHandler(service, method_handlers)
-
-
-def ssl_channel_credentials(root_certificates=None,
- private_key=None,
- certificate_chain=None):
- """Creates a ChannelCredentials for use with an SSL-enabled Channel.
-
+ return _utilities.DictionaryGenericHandler(service, method_handlers)
+
+
+def ssl_channel_credentials(root_certificates=None,
+ private_key=None,
+ certificate_chain=None):
+ """Creates a ChannelCredentials for use with an SSL-enabled Channel.
+
Args:
root_certificates: The PEM-encoded root certificates as a byte string,
or None to retrieve them from a default location chosen by gRPC
@@ -1598,38 +1598,38 @@ def ssl_channel_credentials(root_certificates=None,
private key should be used.
certificate_chain: The PEM-encoded certificate chain as a byte string
to use or None if no certificate chain should be used.
-
+
Returns:
A ChannelCredentials for use with an SSL-enabled Channel.
"""
- return ChannelCredentials(
+ return ChannelCredentials(
_cygrpc.SSLChannelCredentials(root_certificates, private_key,
certificate_chain))
-
-
-def metadata_call_credentials(metadata_plugin, name=None):
- """Construct CallCredentials from an AuthMetadataPlugin.
-
+
+
+def metadata_call_credentials(metadata_plugin, name=None):
+ """Construct CallCredentials from an AuthMetadataPlugin.
+
Args:
metadata_plugin: An AuthMetadataPlugin to use for authentication.
name: An optional name for the plugin.
-
+
Returns:
A CallCredentials.
"""
from grpc import _plugin_wrapping # pylint: disable=cyclic-import
return _plugin_wrapping.metadata_plugin_call_credentials(
metadata_plugin, name)
-
-
-def access_token_call_credentials(access_token):
- """Construct CallCredentials from an access token.
-
+
+
+def access_token_call_credentials(access_token):
+ """Construct CallCredentials from an access token.
+
Args:
access_token: A string to place directly in the http request
authorization header, for example
"authorization: Bearer <access_token>".
-
+
Returns:
A CallCredentials.
"""
@@ -1637,46 +1637,46 @@ def access_token_call_credentials(access_token):
from grpc import _plugin_wrapping # pylint: disable=cyclic-import
return _plugin_wrapping.metadata_plugin_call_credentials(
_auth.AccessTokenAuthMetadataPlugin(access_token), None)
-
-
-def composite_call_credentials(*call_credentials):
- """Compose multiple CallCredentials to make a new CallCredentials.
-
+
+
+def composite_call_credentials(*call_credentials):
+ """Compose multiple CallCredentials to make a new CallCredentials.
+
Args:
*call_credentials: At least two CallCredentials objects.
-
+
Returns:
A CallCredentials object composed of the given CallCredentials objects.
"""
- return CallCredentials(
+ return CallCredentials(
_cygrpc.CompositeCallCredentials(
tuple(single_call_credentials._credentials
for single_call_credentials in call_credentials)))
-
-
-def composite_channel_credentials(channel_credentials, *call_credentials):
- """Compose a ChannelCredentials and one or more CallCredentials objects.
-
+
+
+def composite_channel_credentials(channel_credentials, *call_credentials):
+ """Compose a ChannelCredentials and one or more CallCredentials objects.
+
Args:
channel_credentials: A ChannelCredentials object.
*call_credentials: One or more CallCredentials objects.
-
+
Returns:
A ChannelCredentials composed of the given ChannelCredentials and
CallCredentials objects.
"""
- return ChannelCredentials(
+ return ChannelCredentials(
_cygrpc.CompositeChannelCredentials(
tuple(single_call_credentials._credentials
for single_call_credentials in call_credentials),
channel_credentials._credentials))
-
-
-def ssl_server_credentials(private_key_certificate_chain_pairs,
- root_certificates=None,
- require_client_auth=False):
- """Creates a ServerCredentials for use with an SSL-enabled Server.
-
+
+
+def ssl_server_credentials(private_key_certificate_chain_pairs,
+ root_certificates=None,
+ require_client_auth=False):
+ """Creates a ServerCredentials for use with an SSL-enabled Server.
+
Args:
private_key_certificate_chain_pairs: A list of pairs of the form
[PEM-encoded private key, PEM-encoded certificate chain].
@@ -1686,26 +1686,26 @@ def ssl_server_credentials(private_key_certificate_chain_pairs,
require_client_auth: A boolean indicating whether or not to require
clients to be authenticated. May only be True if root_certificates
is not None.
-
+
Returns:
A ServerCredentials for use with an SSL-enabled Server. Typically, this
object is an argument to add_secure_port() method during server setup.
"""
if not private_key_certificate_chain_pairs:
- raise ValueError(
- 'At least one private key-certificate chain pair is required!')
- elif require_client_auth and root_certificates is None:
- raise ValueError(
- 'Illegal to require client auth without providing root certificates!'
- )
- else:
- return ServerCredentials(
- _cygrpc.server_credentials_ssl(root_certificates, [
- _cygrpc.SslPemKeyCertPair(key, pem)
- for key, pem in private_key_certificate_chain_pairs
- ], require_client_auth))
-
-
+ raise ValueError(
+ 'At least one private key-certificate chain pair is required!')
+ elif require_client_auth and root_certificates is None:
+ raise ValueError(
+ 'Illegal to require client auth without providing root certificates!'
+ )
+ else:
+ return ServerCredentials(
+ _cygrpc.server_credentials_ssl(root_certificates, [
+ _cygrpc.SslPemKeyCertPair(key, pem)
+ for key, pem in private_key_certificate_chain_pairs
+ ], require_client_auth))
+
+
def ssl_server_certificate_configuration(private_key_certificate_chain_pairs,
root_certificates=None):
"""Creates a ServerCertificateConfiguration for use with a Server.
@@ -1885,26 +1885,26 @@ def compute_engine_channel_credentials(call_credentials):
call_credentials._credentials))
-def channel_ready_future(channel):
+def channel_ready_future(channel):
"""Creates a Future that tracks when a Channel is ready.
-
+
Cancelling the Future does not affect the channel's state machine.
It merely decouples the Future from channel state machine.
-
+
Args:
channel: A Channel object.
-
+
Returns:
A Future object that matures when the channel connectivity is
- ChannelConnectivity.READY.
+ ChannelConnectivity.READY.
"""
from grpc import _utilities # pylint: disable=cyclic-import
- return _utilities.channel_ready_future(channel)
-
-
+ return _utilities.channel_ready_future(channel)
+
+
def insecure_channel(target, options=None, compression=None):
- """Creates an insecure Channel to a server.
-
+ """Creates an insecure Channel to a server.
+
The returned Channel is thread-safe.
Args:
@@ -1913,18 +1913,18 @@ def insecure_channel(target, options=None, compression=None):
in gRPC Core runtime) to configure the channel.
compression: An optional value indicating the compression method to be
used over the lifetime of the channel. This is an EXPERIMENTAL option.
-
+
Returns:
A Channel.
"""
from grpc import _channel # pylint: disable=cyclic-import
return _channel.Channel(target, () if options is None else options, None,
compression)
-
-
+
+
def secure_channel(target, credentials, options=None, compression=None):
- """Creates a secure Channel to a server.
-
+ """Creates a secure Channel to a server.
+
The returned Channel is thread-safe.
Args:
@@ -1934,7 +1934,7 @@ def secure_channel(target, credentials, options=None, compression=None):
in gRPC Core runtime) to configure the channel.
compression: An optional value indicating the compression method to be
used over the lifetime of the channel. This is an EXPERIMENTAL option.
-
+
Returns:
A Channel.
"""
@@ -1944,10 +1944,10 @@ def secure_channel(target, credentials, options=None, compression=None):
raise ValueError(
"secure_channel cannot be called with insecure credentials." +
" Call insecure_channel instead.")
- return _channel.Channel(target, () if options is None else options,
+ return _channel.Channel(target, () if options is None else options,
credentials._credentials, compression)
-
-
+
+
def intercept_channel(channel, *interceptors):
"""Intercepts a channel through a set of interceptors.
@@ -1982,8 +1982,8 @@ def server(thread_pool,
options=None,
maximum_concurrent_rpcs=None,
compression=None):
- """Creates a Server with which RPCs can be serviced.
-
+ """Creates a Server with which RPCs can be serviced.
+
Args:
thread_pool: A futures.ThreadPoolExecutor to be used by the Server
to execute RPC handlers.
@@ -2002,7 +2002,7 @@ def server(thread_pool,
compression: An element of grpc.compression, e.g.
grpc.compression.Gzip. This compression algorithm will be used for the
lifetime of the server unless overridden. This is an EXPERIMENTAL option.
-
+
Returns:
A Server object.
"""
@@ -2012,8 +2012,8 @@ def server(thread_pool,
() if interceptors is None else interceptors,
() if options is None else options,
maximum_concurrent_rpcs, compression)
-
-
+
+
@contextlib.contextmanager
def _create_servicer_context(rpc_event, state, request_deserializer):
from grpc import _server # pylint: disable=cyclic-import
@@ -2040,8 +2040,8 @@ class Compression(enum.IntEnum):
from grpc._runtime_protos import protos, services, protos_and_services # pylint: disable=wrong-import-position
-################################### __all__ #################################
-
+################################### __all__ #################################
+
__all__ = (
'FutureTimeoutError',
'FutureCancelledError',
@@ -2104,25 +2104,25 @@ __all__ = (
'services',
'protos_and_services',
)
-
-############################### Extension Shims ################################
-
-# Here to maintain backwards compatibility; avoid using these in new code!
-try:
- import grpc_tools
- sys.modules.update({'grpc.tools': grpc_tools})
-except ImportError:
- pass
-try:
- import grpc_health
- sys.modules.update({'grpc.health': grpc_health})
-except ImportError:
- pass
-try:
- import grpc_reflection
- sys.modules.update({'grpc.reflection': grpc_reflection})
-except ImportError:
- pass
+
+############################### Extension Shims ################################
+
+# Here to maintain backwards compatibility; avoid using these in new code!
+try:
+ import grpc_tools
+ sys.modules.update({'grpc.tools': grpc_tools})
+except ImportError:
+ pass
+try:
+ import grpc_health
+ sys.modules.update({'grpc.health': grpc_health})
+except ImportError:
+ pass
+try:
+ import grpc_reflection
+ sys.modules.update({'grpc.reflection': grpc_reflection})
+except ImportError:
+ pass
# Prevents import order issue in the case of renamed path.
if sys.version_info >= (3, 6) and __name__ == "grpc":
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_auth.py b/contrib/libs/grpc/src/python/grpcio/grpc/_auth.py
index 2d38320aff..55a95ef036 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/_auth.py
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/_auth.py
@@ -1,39 +1,39 @@
# Copyright 2016 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-"""GRPCAuthMetadataPlugins for standard authentication."""
-
-import inspect
-
-import grpc
-
-
-def _sign_request(callback, token, error):
- metadata = (('authorization', 'Bearer {}'.format(token)),)
- callback(metadata, error)
-
-
-class GoogleCallCredentials(grpc.AuthMetadataPlugin):
- """Metadata wrapper for GoogleCredentials from the oauth2client library."""
-
- def __init__(self, credentials):
- self._credentials = credentials
- # Hack to determine if these are JWT creds and we need to pass
- # additional_claims when getting a token
+"""GRPCAuthMetadataPlugins for standard authentication."""
+
+import inspect
+
+import grpc
+
+
+def _sign_request(callback, token, error):
+ metadata = (('authorization', 'Bearer {}'.format(token)),)
+ callback(metadata, error)
+
+
+class GoogleCallCredentials(grpc.AuthMetadataPlugin):
+ """Metadata wrapper for GoogleCredentials from the oauth2client library."""
+
+ def __init__(self, credentials):
+ self._credentials = credentials
+ # Hack to determine if these are JWT creds and we need to pass
+ # additional_claims when getting a token
self._is_jwt = 'additional_claims' in inspect.getargspec( # pylint: disable=deprecated-method
credentials.get_access_token).args
-
- def __call__(self, context, callback):
+
+ def __call__(self, context, callback):
try:
if self._is_jwt:
access_token = self._credentials.get_access_token(
@@ -44,15 +44,15 @@ class GoogleCallCredentials(grpc.AuthMetadataPlugin):
access_token = self._credentials.get_access_token().access_token
except Exception as exception: # pylint: disable=broad-except
_sign_request(callback, None, exception)
- else:
+ else:
_sign_request(callback, access_token, None)
-
-
+
+
class AccessTokenAuthMetadataPlugin(grpc.AuthMetadataPlugin):
- """Metadata wrapper for raw access token credentials."""
-
- def __init__(self, access_token):
- self._access_token = access_token
-
- def __call__(self, context, callback):
- _sign_request(callback, self._access_token, None)
+ """Metadata wrapper for raw access token credentials."""
+
+ def __init__(self, access_token):
+ self._access_token = access_token
+
+ def __call__(self, context, callback):
+ _sign_request(callback, self._access_token, None)
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_channel.py b/contrib/libs/grpc/src/python/grpcio/grpc/_channel.py
index 11921d7883..ddd861ad42 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/_channel.py
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/_channel.py
@@ -1,39 +1,39 @@
# Copyright 2016 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-"""Invocation-side implementation of gRPC Python."""
-
+"""Invocation-side implementation of gRPC Python."""
+
import copy
import functools
import logging
import os
-import sys
-import threading
-import time
-
-import grpc
+import sys
+import threading
+import time
+
+import grpc
import grpc.experimental
from grpc import _compression
-from grpc import _common
-from grpc import _grpcio_metadata
-from grpc._cython import cygrpc
-
+from grpc import _common
+from grpc import _grpcio_metadata
+from grpc._cython import cygrpc
+
_LOGGER = logging.getLogger(__name__)
-_USER_AGENT = 'grpc-python/{}'.format(_grpcio_metadata.__version__)
-
-_EMPTY_FLAGS = 0
-
+_USER_AGENT = 'grpc-python/{}'.format(_grpcio_metadata.__version__)
+
+_EMPTY_FLAGS = 0
+
# NOTE(rbellevi): No guarantees are given about the maintenance of this
# environment variable.
_DEFAULT_SINGLE_THREADED_UNARY_STREAM = os.getenv(
@@ -65,15 +65,15 @@ _STREAM_STREAM_INITIAL_DUE = (
cygrpc.OperationType.receive_initial_metadata,
cygrpc.OperationType.receive_status_on_client,
)
-
-_CHANNEL_SUBSCRIPTION_CALLBACK_ERROR_LOG_MESSAGE = (
- 'Exception calling channel subscription callback!')
-
+
+_CHANNEL_SUBSCRIPTION_CALLBACK_ERROR_LOG_MESSAGE = (
+ 'Exception calling channel subscription callback!')
+
_OK_RENDEZVOUS_REPR_FORMAT = ('<{} of RPC that terminated with:\n'
'\tstatus = {}\n'
'\tdetails = "{}"\n'
'>')
-
+
_NON_OK_RENDEZVOUS_REPR_FORMAT = ('<{} of RPC that terminated with:\n'
'\tstatus = {}\n'
'\tdetails = "{}"\n'
@@ -81,92 +81,92 @@ _NON_OK_RENDEZVOUS_REPR_FORMAT = ('<{} of RPC that terminated with:\n'
'>')
-def _deadline(timeout):
+def _deadline(timeout):
return None if timeout is None else time.time() + timeout
-
-
-def _unknown_code_details(unknown_cygrpc_code, details):
- return 'Server sent unknown code {} and details "{}"'.format(
- unknown_cygrpc_code, details)
-
-
-class _RPCState(object):
-
- def __init__(self, due, initial_metadata, trailing_metadata, code, details):
- self.condition = threading.Condition()
- # The cygrpc.OperationType objects representing events due from the RPC's
- # completion queue.
- self.due = set(due)
- self.initial_metadata = initial_metadata
- self.response = None
- self.trailing_metadata = trailing_metadata
- self.code = code
- self.details = details
+
+
+def _unknown_code_details(unknown_cygrpc_code, details):
+ return 'Server sent unknown code {} and details "{}"'.format(
+ unknown_cygrpc_code, details)
+
+
+class _RPCState(object):
+
+ def __init__(self, due, initial_metadata, trailing_metadata, code, details):
+ self.condition = threading.Condition()
+ # The cygrpc.OperationType objects representing events due from the RPC's
+ # completion queue.
+ self.due = set(due)
+ self.initial_metadata = initial_metadata
+ self.response = None
+ self.trailing_metadata = trailing_metadata
+ self.code = code
+ self.details = details
self.debug_error_string = None
- # The semantics of grpc.Future.cancel and grpc.Future.cancelled are
- # slightly wonky, so they have to be tracked separately from the rest of the
- # result of the RPC. This field tracks whether cancellation was requested
- # prior to termination of the RPC.
- self.cancelled = False
- self.callbacks = []
+ # The semantics of grpc.Future.cancel and grpc.Future.cancelled are
+ # slightly wonky, so they have to be tracked separately from the rest of the
+ # result of the RPC. This field tracks whether cancellation was requested
+ # prior to termination of the RPC.
+ self.cancelled = False
+ self.callbacks = []
self.fork_epoch = cygrpc.get_fork_epoch()
-
+
def reset_postfork_child(self):
self.condition = threading.Condition()
+
-
-def _abort(state, code, details):
- if state.code is None:
- state.code = code
- state.details = details
- if state.initial_metadata is None:
+def _abort(state, code, details):
+ if state.code is None:
+ state.code = code
+ state.details = details
+ if state.initial_metadata is None:
state.initial_metadata = ()
state.trailing_metadata = ()
-
-
-def _handle_event(event, state, response_deserializer):
- callbacks = []
- for batch_operation in event.batch_operations:
+
+
+def _handle_event(event, state, response_deserializer):
+ callbacks = []
+ for batch_operation in event.batch_operations:
operation_type = batch_operation.type()
- state.due.remove(operation_type)
- if operation_type == cygrpc.OperationType.receive_initial_metadata:
+ state.due.remove(operation_type)
+ if operation_type == cygrpc.OperationType.receive_initial_metadata:
state.initial_metadata = batch_operation.initial_metadata()
- elif operation_type == cygrpc.OperationType.receive_message:
+ elif operation_type == cygrpc.OperationType.receive_message:
serialized_response = batch_operation.message()
- if serialized_response is not None:
- response = _common.deserialize(serialized_response,
- response_deserializer)
- if response is None:
- details = 'Exception deserializing response!'
- _abort(state, grpc.StatusCode.INTERNAL, details)
- else:
- state.response = response
- elif operation_type == cygrpc.OperationType.receive_status_on_client:
+ if serialized_response is not None:
+ response = _common.deserialize(serialized_response,
+ response_deserializer)
+ if response is None:
+ details = 'Exception deserializing response!'
+ _abort(state, grpc.StatusCode.INTERNAL, details)
+ else:
+ state.response = response
+ elif operation_type == cygrpc.OperationType.receive_status_on_client:
state.trailing_metadata = batch_operation.trailing_metadata()
- if state.code is None:
- code = _common.CYGRPC_STATUS_CODE_TO_STATUS_CODE.get(
+ if state.code is None:
+ code = _common.CYGRPC_STATUS_CODE_TO_STATUS_CODE.get(
batch_operation.code())
- if code is None:
- state.code = grpc.StatusCode.UNKNOWN
- state.details = _unknown_code_details(
+ if code is None:
+ state.code = grpc.StatusCode.UNKNOWN
+ state.details = _unknown_code_details(
code, batch_operation.details())
- else:
- state.code = code
+ else:
+ state.code = code
state.details = batch_operation.details()
state.debug_error_string = batch_operation.error_string()
- callbacks.extend(state.callbacks)
- state.callbacks = None
- return callbacks
-
-
+ callbacks.extend(state.callbacks)
+ state.callbacks = None
+ return callbacks
+
+
def _event_handler(state, response_deserializer):
-
- def handle_event(event):
- with state.condition:
- callbacks = _handle_event(event, state, response_deserializer)
- state.condition.notify_all()
- done = not state.due
- for callback in callbacks:
+
+ def handle_event(event):
+ with state.condition:
+ callbacks = _handle_event(event, state, response_deserializer)
+ state.condition.notify_all()
+ done = not state.due
+ for callback in callbacks:
try:
callback()
except Exception as e: # pylint: disable=broad-except
@@ -175,26 +175,26 @@ def _event_handler(state, response_deserializer):
logging.error('Exception in callback %s: %s',
repr(callback.func), repr(e))
return done and state.fork_epoch >= cygrpc.get_fork_epoch()
-
- return handle_event
-
-
+
+ return handle_event
+
+
#pylint: disable=too-many-statements
def _consume_request_iterator(request_iterator, state, call, request_serializer,
event_handler):
"""Consume a request iterator supplied by the user."""
-
+
def consume_request_iterator(): # pylint: disable=too-many-branches
# Iterate over the request iterator until it is exhausted or an error
# condition is encountered.
- while True:
+ while True:
return_from_user_request_generator_invoked = False
- try:
+ try:
# The thread may die in user-code. Do not block fork for this.
cygrpc.enter_user_request_generator()
- request = next(request_iterator)
- except StopIteration:
- break
+ request = next(request_iterator)
+ except StopIteration:
+ break
except Exception: # pylint: disable=broad-except
cygrpc.return_from_user_request_generator()
return_from_user_request_generator_invoked = True
@@ -204,24 +204,24 @@ def _consume_request_iterator(request_iterator, state, call, request_serializer,
call.cancel(_common.STATUS_CODE_TO_CYGRPC_STATUS_CODE[code],
details)
_abort(state, code, details)
- return
+ return
finally:
if not return_from_user_request_generator_invoked:
cygrpc.return_from_user_request_generator()
- serialized_request = _common.serialize(request, request_serializer)
- with state.condition:
- if state.code is None and not state.cancelled:
- if serialized_request is None:
+ serialized_request = _common.serialize(request, request_serializer)
+ with state.condition:
+ if state.code is None and not state.cancelled:
+ if serialized_request is None:
code = grpc.StatusCode.INTERNAL
- details = 'Exception serializing request!'
+ details = 'Exception serializing request!'
call.cancel(
_common.STATUS_CODE_TO_CYGRPC_STATUS_CODE[code],
details)
_abort(state, code, details)
- return
- else:
+ return
+ else:
operations = (cygrpc.SendMessageOperation(
- serialized_request, _EMPTY_FLAGS),)
+ serialized_request, _EMPTY_FLAGS),)
operating = call.operate(operations, event_handler)
if operating:
state.due.add(cygrpc.OperationType.send_message)
@@ -240,22 +240,22 @@ def _consume_request_iterator(request_iterator, state, call, request_serializer,
state))
if state.code is not None:
return
- else:
- return
- with state.condition:
- if state.code is None:
- operations = (
+ else:
+ return
+ with state.condition:
+ if state.code is None:
+ operations = (
cygrpc.SendCloseFromClientOperation(_EMPTY_FLAGS),)
operating = call.operate(operations, event_handler)
if operating:
state.due.add(cygrpc.OperationType.send_close_from_client)
-
+
consumption_thread = cygrpc.ForkManagedThread(
target=consume_request_iterator)
consumption_thread.setDaemon(True)
- consumption_thread.start()
-
-
+ consumption_thread.start()
+
+
def _rpc_state_string(class_name, rpc_state):
"""Calculates error string for RPC."""
with rpc_state.condition:
@@ -268,7 +268,7 @@ def _rpc_state_string(class_name, rpc_state):
return _NON_OK_RENDEZVOUS_REPR_FORMAT.format(
class_name, rpc_state.code, rpc_state.details,
rpc_state.debug_error_string)
-
+
class _InactiveRpcError(grpc.RpcError, grpc.Call, grpc.Future):
"""An RPC error not tied to the execution of a particular RPC.
@@ -362,13 +362,13 @@ class _Rendezvous(grpc.RpcError, grpc.RpcContext):
possibly None, to represent an RPC with no deadline at all.
"""
- def __init__(self, state, call, response_deserializer, deadline):
- super(_Rendezvous, self).__init__()
- self._state = state
- self._call = call
- self._response_deserializer = response_deserializer
- self._deadline = deadline
-
+ def __init__(self, state, call, response_deserializer, deadline):
+ super(_Rendezvous, self).__init__()
+ self._state = state
+ self._call = call
+ self._response_deserializer = response_deserializer
+ self._deadline = deadline
+
def is_active(self):
"""See grpc.RpcContext.is_active"""
with self._state.condition:
@@ -382,21 +382,21 @@ class _Rendezvous(grpc.RpcError, grpc.RpcContext):
else:
return max(self._deadline - time.time(), 0)
- def cancel(self):
+ def cancel(self):
"""See grpc.RpcContext.cancel"""
- with self._state.condition:
- if self._state.code is None:
+ with self._state.condition:
+ if self._state.code is None:
code = grpc.StatusCode.CANCELLED
details = 'Locally cancelled by application!'
self._call.cancel(
_common.STATUS_CODE_TO_CYGRPC_STATUS_CODE[code], details)
- self._state.cancelled = True
+ self._state.cancelled = True
_abort(self._state, code, details)
- self._state.condition.notify_all()
+ self._state.condition.notify_all()
return True
else:
return False
-
+
def add_callback(self, callback):
"""See grpc.RpcContext.add_callback"""
with self._state.condition:
@@ -687,27 +687,27 @@ class _MultiThreadedRendezvous(_Rendezvous, grpc.Call, grpc.Future): # pylint:
_common.wait(self._state.condition.wait, _done)
return _common.decode(self._state.debug_error_string)
- def cancelled(self):
- with self._state.condition:
- return self._state.cancelled
-
- def running(self):
- with self._state.condition:
- return self._state.code is None
-
- def done(self):
- with self._state.condition:
- return self._state.code is not None
-
+ def cancelled(self):
+ with self._state.condition:
+ return self._state.cancelled
+
+ def running(self):
+ with self._state.condition:
+ return self._state.code is None
+
+ def done(self):
+ with self._state.condition:
+ return self._state.code is not None
+
def _is_complete(self):
return self._state.code is not None
- def result(self, timeout=None):
+ def result(self, timeout=None):
"""Returns the result of the computation or raises its exception.
See grpc.Future.result for the full API contract.
"""
- with self._state.condition:
+ with self._state.condition:
timed_out = _common.wait(self._state.condition.wait,
self._is_complete,
timeout=timeout)
@@ -715,18 +715,18 @@ class _MultiThreadedRendezvous(_Rendezvous, grpc.Call, grpc.Future): # pylint:
raise grpc.FutureTimeoutError()
else:
if self._state.code is grpc.StatusCode.OK:
- return self._state.response
- elif self._state.cancelled:
- raise grpc.FutureCancelledError()
- else:
- raise self
-
- def exception(self, timeout=None):
+ return self._state.response
+ elif self._state.cancelled:
+ raise grpc.FutureCancelledError()
+ else:
+ raise self
+
+ def exception(self, timeout=None):
"""Return the exception raised by the computation.
See grpc.Future.exception for the full API contract.
"""
- with self._state.condition:
+ with self._state.condition:
timed_out = _common.wait(self._state.condition.wait,
self._is_complete,
timeout=timeout)
@@ -734,18 +734,18 @@ class _MultiThreadedRendezvous(_Rendezvous, grpc.Call, grpc.Future): # pylint:
raise grpc.FutureTimeoutError()
else:
if self._state.code is grpc.StatusCode.OK:
- return None
- elif self._state.cancelled:
- raise grpc.FutureCancelledError()
- else:
- return self
-
- def traceback(self, timeout=None):
+ return None
+ elif self._state.cancelled:
+ raise grpc.FutureCancelledError()
+ else:
+ return self
+
+ def traceback(self, timeout=None):
"""Access the traceback of the exception raised by the computation.
See grpc.future.traceback for the full API contract.
"""
- with self._state.condition:
+ with self._state.condition:
timed_out = _common.wait(self._state.condition.wait,
self._is_complete,
timeout=timeout)
@@ -753,44 +753,44 @@ class _MultiThreadedRendezvous(_Rendezvous, grpc.Call, grpc.Future): # pylint:
raise grpc.FutureTimeoutError()
else:
if self._state.code is grpc.StatusCode.OK:
- return None
- elif self._state.cancelled:
- raise grpc.FutureCancelledError()
- else:
- try:
- raise self
- except grpc.RpcError:
- return sys.exc_info()[2]
-
- def add_done_callback(self, fn):
- with self._state.condition:
- if self._state.code is None:
+ return None
+ elif self._state.cancelled:
+ raise grpc.FutureCancelledError()
+ else:
+ try:
+ raise self
+ except grpc.RpcError:
+ return sys.exc_info()[2]
+
+ def add_done_callback(self, fn):
+ with self._state.condition:
+ if self._state.code is None:
self._state.callbacks.append(functools.partial(fn, self))
- return
-
- fn(self)
-
- def _next(self):
- with self._state.condition:
- if self._state.code is None:
+ return
+
+ fn(self)
+
+ def _next(self):
+ with self._state.condition:
+ if self._state.code is None:
event_handler = _event_handler(self._state,
- self._response_deserializer)
+ self._response_deserializer)
operating = self._call.operate(
(cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS),),
- event_handler)
+ event_handler)
if operating:
self._state.due.add(cygrpc.OperationType.receive_message)
- elif self._state.code is grpc.StatusCode.OK:
- raise StopIteration()
- else:
- raise self
-
+ elif self._state.code is grpc.StatusCode.OK:
+ raise StopIteration()
+ else:
+ raise self
+
def _response_ready():
return (
self._state.response is not None or
(cygrpc.OperationType.receive_message not in self._state.due
and self._state.code is not None))
-
+
_common.wait(self._state.condition.wait, _response_ready)
if self._state.response is not None:
response = self._state.response
@@ -801,31 +801,31 @@ class _MultiThreadedRendezvous(_Rendezvous, grpc.Call, grpc.Future): # pylint:
raise StopIteration()
elif self._state.code is not None:
raise self
-
-
-def _start_unary_request(request, timeout, request_serializer):
+
+
+def _start_unary_request(request, timeout, request_serializer):
deadline = _deadline(timeout)
- serialized_request = _common.serialize(request, request_serializer)
- if serialized_request is None:
+ serialized_request = _common.serialize(request, request_serializer)
+ if serialized_request is None:
state = _RPCState((), (), (), grpc.StatusCode.INTERNAL,
- 'Exception serializing request!')
+ 'Exception serializing request!')
error = _InactiveRpcError(state)
return deadline, None, error
- else:
+ else:
return deadline, serialized_request, None
-
-
+
+
def _end_unary_response_blocking(state, call, with_call, deadline):
- if state.code is grpc.StatusCode.OK:
- if with_call:
+ if state.code is grpc.StatusCode.OK:
+ if with_call:
rendezvous = _MultiThreadedRendezvous(state, call, None, deadline)
- return state.response, rendezvous
- else:
- return state.response
- else:
+ return state.response, rendezvous
+ else:
+ return state.response
+ else:
raise _InactiveRpcError(state)
-
-
+
+
def _stream_unary_invocation_operationses(metadata, initial_metadata_flags):
return (
(
@@ -859,18 +859,18 @@ def _determine_deadline(user_deadline):
return min(parent_deadline, user_deadline)
-class _UnaryUnaryMultiCallable(grpc.UnaryUnaryMultiCallable):
-
+class _UnaryUnaryMultiCallable(grpc.UnaryUnaryMultiCallable):
+
# pylint: disable=too-many-arguments
- def __init__(self, channel, managed_call, method, request_serializer,
- response_deserializer):
- self._channel = channel
- self._managed_call = managed_call
- self._method = method
- self._request_serializer = request_serializer
- self._response_deserializer = response_deserializer
+ def __init__(self, channel, managed_call, method, request_serializer,
+ response_deserializer):
+ self._channel = channel
+ self._managed_call = managed_call
+ self._method = method
+ self._request_serializer = request_serializer
+ self._response_deserializer = response_deserializer
self._context = cygrpc.build_census_context()
-
+
def _prepare(self, request, timeout, metadata, wait_for_ready, compression):
deadline, serialized_request, rendezvous = _start_unary_request(
request, timeout, self._request_serializer)
@@ -878,11 +878,11 @@ class _UnaryUnaryMultiCallable(grpc.UnaryUnaryMultiCallable):
wait_for_ready)
augmented_metadata = _compression.augment_metadata(
metadata, compression)
- if serialized_request is None:
+ if serialized_request is None:
return None, None, None, rendezvous
- else:
- state = _RPCState(_UNARY_UNARY_INITIAL_DUE, None, None, None, None)
- operations = (
+ else:
+ state = _RPCState(_UNARY_UNARY_INITIAL_DUE, None, None, None, None)
+ operations = (
cygrpc.SendInitialMetadataOperation(augmented_metadata,
initial_metadata_flags),
cygrpc.SendMessageOperation(serialized_request, _EMPTY_FLAGS),
@@ -892,14 +892,14 @@ class _UnaryUnaryMultiCallable(grpc.UnaryUnaryMultiCallable):
cygrpc.ReceiveStatusOnClientOperation(_EMPTY_FLAGS),
)
return state, operations, deadline, None
-
+
def _blocking(self, request, timeout, metadata, credentials, wait_for_ready,
compression):
state, operations, deadline, rendezvous = self._prepare(
request, timeout, metadata, wait_for_ready, compression)
if state is None:
raise rendezvous # pylint: disable-msg=raising-bad-type
- else:
+ else:
call = self._channel.segregated_call(
cygrpc.PropagationConstants.GRPC_PROPAGATE_DEFAULTS,
self._method, None, _determine_deadline(deadline), metadata,
@@ -910,7 +910,7 @@ class _UnaryUnaryMultiCallable(grpc.UnaryUnaryMultiCallable):
event = call.next_event()
_handle_event(event, state, self._response_deserializer)
return state, call
-
+
def __call__(self,
request,
timeout=None,
@@ -921,7 +921,7 @@ class _UnaryUnaryMultiCallable(grpc.UnaryUnaryMultiCallable):
state, call, = self._blocking(request, timeout, metadata, credentials,
wait_for_ready, compression)
return _end_unary_response_blocking(state, call, False, None)
-
+
def with_call(self,
request,
timeout=None,
@@ -932,7 +932,7 @@ class _UnaryUnaryMultiCallable(grpc.UnaryUnaryMultiCallable):
state, call, = self._blocking(request, timeout, metadata, credentials,
wait_for_ready, compression)
return _end_unary_response_blocking(state, call, True, None)
-
+
def future(self,
request,
timeout=None,
@@ -944,7 +944,7 @@ class _UnaryUnaryMultiCallable(grpc.UnaryUnaryMultiCallable):
request, timeout, metadata, wait_for_ready, compression)
if state is None:
raise rendezvous # pylint: disable-msg=raising-bad-type
- else:
+ else:
event_handler = _event_handler(state, self._response_deserializer)
call = self._managed_call(
cygrpc.PropagationConstants.GRPC_PROPAGATE_DEFAULTS,
@@ -954,8 +954,8 @@ class _UnaryUnaryMultiCallable(grpc.UnaryUnaryMultiCallable):
return _MultiThreadedRendezvous(state, call,
self._response_deserializer,
deadline)
-
-
+
+
class _SingleThreadedUnaryStreamMultiCallable(grpc.UnaryStreamMultiCallable):
# pylint: disable=too-many-arguments
@@ -1006,18 +1006,18 @@ class _SingleThreadedUnaryStreamMultiCallable(grpc.UnaryStreamMultiCallable):
self._response_deserializer, deadline)
-class _UnaryStreamMultiCallable(grpc.UnaryStreamMultiCallable):
-
+class _UnaryStreamMultiCallable(grpc.UnaryStreamMultiCallable):
+
# pylint: disable=too-many-arguments
- def __init__(self, channel, managed_call, method, request_serializer,
- response_deserializer):
- self._channel = channel
- self._managed_call = managed_call
- self._method = method
- self._request_serializer = request_serializer
- self._response_deserializer = response_deserializer
+ def __init__(self, channel, managed_call, method, request_serializer,
+ response_deserializer):
+ self._channel = channel
+ self._managed_call = managed_call
+ self._method = method
+ self._request_serializer = request_serializer
+ self._response_deserializer = response_deserializer
self._context = cygrpc.build_census_context()
-
+
def __call__( # pylint: disable=too-many-locals
self,
request,
@@ -1030,12 +1030,12 @@ class _UnaryStreamMultiCallable(grpc.UnaryStreamMultiCallable):
request, timeout, self._request_serializer)
initial_metadata_flags = _InitialMetadataFlags().with_wait_for_ready(
wait_for_ready)
- if serialized_request is None:
+ if serialized_request is None:
raise rendezvous # pylint: disable-msg=raising-bad-type
- else:
+ else:
augmented_metadata = _compression.augment_metadata(
metadata, compression)
- state = _RPCState(_UNARY_STREAM_INITIAL_DUE, None, None, None, None)
+ state = _RPCState(_UNARY_STREAM_INITIAL_DUE, None, None, None, None)
operationses = (
(
cygrpc.SendInitialMetadataOperation(augmented_metadata,
@@ -1057,24 +1057,24 @@ class _UnaryStreamMultiCallable(grpc.UnaryStreamMultiCallable):
return _MultiThreadedRendezvous(state, call,
self._response_deserializer,
deadline)
-
-
-class _StreamUnaryMultiCallable(grpc.StreamUnaryMultiCallable):
-
+
+
+class _StreamUnaryMultiCallable(grpc.StreamUnaryMultiCallable):
+
# pylint: disable=too-many-arguments
- def __init__(self, channel, managed_call, method, request_serializer,
- response_deserializer):
- self._channel = channel
- self._managed_call = managed_call
- self._method = method
- self._request_serializer = request_serializer
- self._response_deserializer = response_deserializer
+ def __init__(self, channel, managed_call, method, request_serializer,
+ response_deserializer):
+ self._channel = channel
+ self._managed_call = managed_call
+ self._method = method
+ self._request_serializer = request_serializer
+ self._response_deserializer = response_deserializer
self._context = cygrpc.build_census_context()
-
+
def _blocking(self, request_iterator, timeout, metadata, credentials,
wait_for_ready, compression):
deadline = _deadline(timeout)
- state = _RPCState(_STREAM_UNARY_INITIAL_DUE, None, None, None, None)
+ state = _RPCState(_STREAM_UNARY_INITIAL_DUE, None, None, None, None)
initial_metadata_flags = _InitialMetadataFlags().with_wait_for_ready(
wait_for_ready)
augmented_metadata = _compression.augment_metadata(
@@ -1087,46 +1087,46 @@ class _StreamUnaryMultiCallable(grpc.StreamUnaryMultiCallable):
augmented_metadata, initial_metadata_flags), self._context)
_consume_request_iterator(request_iterator, state, call,
self._request_serializer, None)
- while True:
+ while True:
event = call.next_event()
- with state.condition:
- _handle_event(event, state, self._response_deserializer)
- state.condition.notify_all()
- if not state.due:
- break
+ with state.condition:
+ _handle_event(event, state, self._response_deserializer)
+ state.condition.notify_all()
+ if not state.due:
+ break
return state, call
-
- def __call__(self,
- request_iterator,
- timeout=None,
- metadata=None,
+
+ def __call__(self,
+ request_iterator,
+ timeout=None,
+ metadata=None,
credentials=None,
wait_for_ready=None,
compression=None):
state, call, = self._blocking(request_iterator, timeout, metadata,
credentials, wait_for_ready, compression)
return _end_unary_response_blocking(state, call, False, None)
-
- def with_call(self,
- request_iterator,
- timeout=None,
- metadata=None,
+
+ def with_call(self,
+ request_iterator,
+ timeout=None,
+ metadata=None,
credentials=None,
wait_for_ready=None,
compression=None):
state, call, = self._blocking(request_iterator, timeout, metadata,
credentials, wait_for_ready, compression)
return _end_unary_response_blocking(state, call, True, None)
-
- def future(self,
- request_iterator,
- timeout=None,
- metadata=None,
+
+ def future(self,
+ request_iterator,
+ timeout=None,
+ metadata=None,
credentials=None,
wait_for_ready=None,
compression=None):
deadline = _deadline(timeout)
- state = _RPCState(_STREAM_UNARY_INITIAL_DUE, None, None, None, None)
+ state = _RPCState(_STREAM_UNARY_INITIAL_DUE, None, None, None, None)
event_handler = _event_handler(state, self._response_deserializer)
initial_metadata_flags = _InitialMetadataFlags().with_wait_for_ready(
wait_for_ready)
@@ -1143,29 +1143,29 @@ class _StreamUnaryMultiCallable(grpc.StreamUnaryMultiCallable):
self._request_serializer, event_handler)
return _MultiThreadedRendezvous(state, call,
self._response_deserializer, deadline)
-
-
-class _StreamStreamMultiCallable(grpc.StreamStreamMultiCallable):
-
+
+
+class _StreamStreamMultiCallable(grpc.StreamStreamMultiCallable):
+
# pylint: disable=too-many-arguments
- def __init__(self, channel, managed_call, method, request_serializer,
- response_deserializer):
- self._channel = channel
- self._managed_call = managed_call
- self._method = method
- self._request_serializer = request_serializer
- self._response_deserializer = response_deserializer
+ def __init__(self, channel, managed_call, method, request_serializer,
+ response_deserializer):
+ self._channel = channel
+ self._managed_call = managed_call
+ self._method = method
+ self._request_serializer = request_serializer
+ self._response_deserializer = response_deserializer
self._context = cygrpc.build_census_context()
-
- def __call__(self,
- request_iterator,
- timeout=None,
- metadata=None,
+
+ def __call__(self,
+ request_iterator,
+ timeout=None,
+ metadata=None,
credentials=None,
wait_for_ready=None,
compression=None):
deadline = _deadline(timeout)
- state = _RPCState(_STREAM_STREAM_INITIAL_DUE, None, None, None, None)
+ state = _RPCState(_STREAM_STREAM_INITIAL_DUE, None, None, None, None)
initial_metadata_flags = _InitialMetadataFlags().with_wait_for_ready(
wait_for_ready)
augmented_metadata = _compression.augment_metadata(
@@ -1188,8 +1188,8 @@ class _StreamStreamMultiCallable(grpc.StreamStreamMultiCallable):
self._request_serializer, event_handler)
return _MultiThreadedRendezvous(state, call,
self._response_deserializer, deadline)
-
-
+
+
class _InitialMetadataFlags(int):
"""Stores immutable initial metadata flags"""
@@ -1208,17 +1208,17 @@ class _InitialMetadataFlags(int):
return self
-class _ChannelCallState(object):
-
- def __init__(self, channel):
- self.lock = threading.Lock()
- self.channel = channel
+class _ChannelCallState(object):
+
+ def __init__(self, channel):
+ self.lock = threading.Lock()
+ self.channel = channel
self.managed_calls = 0
self.threading = False
-
+
def reset_postfork_child(self):
self.managed_calls = 0
-
+
def __del__(self):
try:
self.channel.close(cygrpc.StatusCode.cancelled,
@@ -1227,33 +1227,33 @@ class _ChannelCallState(object):
pass
-def _run_channel_spin_thread(state):
-
- def channel_spin():
- while True:
+def _run_channel_spin_thread(state):
+
+ def channel_spin():
+ while True:
cygrpc.block_if_fork_in_progress(state)
event = state.channel.next_call_event()
if event.completion_type == cygrpc.CompletionType.queue_timeout:
continue
call_completed = event.tag(event)
if call_completed:
- with state.lock:
+ with state.lock:
state.managed_calls -= 1
if state.managed_calls == 0:
- return
-
+ return
+
channel_spin_thread = cygrpc.ForkManagedThread(target=channel_spin)
channel_spin_thread.setDaemon(True)
- channel_spin_thread.start()
-
-
-def _channel_managed_call_management(state):
-
+ channel_spin_thread.start()
+
+
+def _channel_managed_call_management(state):
+
# pylint: disable=too-many-arguments
def create(flags, method, host, deadline, metadata, credentials,
operationses, event_handler, context):
"""Creates a cygrpc.IntegratedCall.
-
+
Args:
flags: An integer bitfield of call flags.
method: The RPC method.
@@ -1284,60 +1284,60 @@ def _channel_managed_call_management(state):
else:
state.managed_calls += 1
return call
-
- return create
-
-
-class _ChannelConnectivityState(object):
-
- def __init__(self, channel):
+
+ return create
+
+
+class _ChannelConnectivityState(object):
+
+ def __init__(self, channel):
self.lock = threading.RLock()
- self.channel = channel
- self.polling = False
- self.connectivity = None
- self.try_to_connect = False
- self.callbacks_and_connectivities = []
- self.delivering = False
-
+ self.channel = channel
+ self.polling = False
+ self.connectivity = None
+ self.try_to_connect = False
+ self.callbacks_and_connectivities = []
+ self.delivering = False
+
def reset_postfork_child(self):
self.polling = False
self.connectivity = None
self.try_to_connect = False
self.callbacks_and_connectivities = []
self.delivering = False
-
-
-def _deliveries(state):
- callbacks_needing_update = []
- for callback_and_connectivity in state.callbacks_and_connectivities:
- callback, callback_connectivity, = callback_and_connectivity
- if callback_connectivity is not state.connectivity:
- callbacks_needing_update.append(callback)
- callback_and_connectivity[1] = state.connectivity
- return callbacks_needing_update
-
-
-def _deliver(state, initial_connectivity, initial_callbacks):
- connectivity = initial_connectivity
- callbacks = initial_callbacks
- while True:
- for callback in callbacks:
+
+
+def _deliveries(state):
+ callbacks_needing_update = []
+ for callback_and_connectivity in state.callbacks_and_connectivities:
+ callback, callback_connectivity, = callback_and_connectivity
+ if callback_connectivity is not state.connectivity:
+ callbacks_needing_update.append(callback)
+ callback_and_connectivity[1] = state.connectivity
+ return callbacks_needing_update
+
+
+def _deliver(state, initial_connectivity, initial_callbacks):
+ connectivity = initial_connectivity
+ callbacks = initial_callbacks
+ while True:
+ for callback in callbacks:
cygrpc.block_if_fork_in_progress(state)
try:
callback(connectivity)
except Exception: # pylint: disable=broad-except
_LOGGER.exception(
_CHANNEL_SUBSCRIPTION_CALLBACK_ERROR_LOG_MESSAGE)
- with state.lock:
- callbacks = _deliveries(state)
- if callbacks:
- connectivity = state.connectivity
- else:
- state.delivering = False
- return
-
-
-def _spawn_delivery(state, callbacks):
+ with state.lock:
+ callbacks = _deliveries(state)
+ if callbacks:
+ connectivity = state.connectivity
+ else:
+ state.delivering = False
+ return
+
+
+def _spawn_delivery(state, callbacks):
delivering_thread = cygrpc.ForkManagedThread(target=_deliver,
args=(
state,
@@ -1345,85 +1345,85 @@ def _spawn_delivery(state, callbacks):
callbacks,
))
delivering_thread.setDaemon(True)
- delivering_thread.start()
- state.delivering = True
-
-
-# NOTE(https://github.com/grpc/grpc/issues/3064): We'd rather not poll.
-def _poll_connectivity(state, channel, initial_try_to_connect):
- try_to_connect = initial_try_to_connect
- connectivity = channel.check_connectivity_state(try_to_connect)
- with state.lock:
- state.connectivity = (
+ delivering_thread.start()
+ state.delivering = True
+
+
+# NOTE(https://github.com/grpc/grpc/issues/3064): We'd rather not poll.
+def _poll_connectivity(state, channel, initial_try_to_connect):
+ try_to_connect = initial_try_to_connect
+ connectivity = channel.check_connectivity_state(try_to_connect)
+ with state.lock:
+ state.connectivity = (
_common.
CYGRPC_CONNECTIVITY_STATE_TO_CHANNEL_CONNECTIVITY[connectivity])
callbacks = tuple(
callback for callback, unused_but_known_to_be_none_connectivity in
state.callbacks_and_connectivities)
- for callback_and_connectivity in state.callbacks_and_connectivities:
- callback_and_connectivity[1] = state.connectivity
- if callbacks:
- _spawn_delivery(state, callbacks)
- while True:
+ for callback_and_connectivity in state.callbacks_and_connectivities:
+ callback_and_connectivity[1] = state.connectivity
+ if callbacks:
+ _spawn_delivery(state, callbacks)
+ while True:
event = channel.watch_connectivity_state(connectivity,
time.time() + 0.2)
cygrpc.block_if_fork_in_progress(state)
- with state.lock:
- if not state.callbacks_and_connectivities and not state.try_to_connect:
- state.polling = False
- state.connectivity = None
- break
- try_to_connect = state.try_to_connect
- state.try_to_connect = False
- if event.success or try_to_connect:
- connectivity = channel.check_connectivity_state(try_to_connect)
- with state.lock:
- state.connectivity = (
- _common.CYGRPC_CONNECTIVITY_STATE_TO_CHANNEL_CONNECTIVITY[
- connectivity])
- if not state.delivering:
+ with state.lock:
+ if not state.callbacks_and_connectivities and not state.try_to_connect:
+ state.polling = False
+ state.connectivity = None
+ break
+ try_to_connect = state.try_to_connect
+ state.try_to_connect = False
+ if event.success or try_to_connect:
+ connectivity = channel.check_connectivity_state(try_to_connect)
+ with state.lock:
+ state.connectivity = (
+ _common.CYGRPC_CONNECTIVITY_STATE_TO_CHANNEL_CONNECTIVITY[
+ connectivity])
+ if not state.delivering:
callbacks = _deliveries(state)
- if callbacks:
- _spawn_delivery(state, callbacks)
-
-
-def _subscribe(state, callback, try_to_connect):
- with state.lock:
- if not state.callbacks_and_connectivities and not state.polling:
+ if callbacks:
+ _spawn_delivery(state, callbacks)
+
+
+def _subscribe(state, callback, try_to_connect):
+ with state.lock:
+ if not state.callbacks_and_connectivities and not state.polling:
polling_thread = cygrpc.ForkManagedThread(
- target=_poll_connectivity,
- args=(state, state.channel, bool(try_to_connect)))
+ target=_poll_connectivity,
+ args=(state, state.channel, bool(try_to_connect)))
polling_thread.setDaemon(True)
- polling_thread.start()
- state.polling = True
- state.callbacks_and_connectivities.append([callback, None])
- elif not state.delivering and state.connectivity is not None:
- _spawn_delivery(state, (callback,))
- state.try_to_connect |= bool(try_to_connect)
- state.callbacks_and_connectivities.append(
- [callback, state.connectivity])
- else:
- state.try_to_connect |= bool(try_to_connect)
- state.callbacks_and_connectivities.append([callback, None])
-
-
-def _unsubscribe(state, callback):
- with state.lock:
+ polling_thread.start()
+ state.polling = True
+ state.callbacks_and_connectivities.append([callback, None])
+ elif not state.delivering and state.connectivity is not None:
+ _spawn_delivery(state, (callback,))
+ state.try_to_connect |= bool(try_to_connect)
+ state.callbacks_and_connectivities.append(
+ [callback, state.connectivity])
+ else:
+ state.try_to_connect |= bool(try_to_connect)
+ state.callbacks_and_connectivities.append([callback, None])
+
+
+def _unsubscribe(state, callback):
+ with state.lock:
for index, (subscribed_callback, unused_connectivity) in enumerate(
state.callbacks_and_connectivities):
- if callback == subscribed_callback:
- state.callbacks_and_connectivities.pop(index)
- break
-
-
+ if callback == subscribed_callback:
+ state.callbacks_and_connectivities.pop(index)
+ break
+
+
def _augment_options(base_options, compression):
compression_option = _compression.create_channel_option(compression)
return tuple(base_options) + compression_option + ((
cygrpc.ChannelArgKey.primary_user_agent_string,
_USER_AGENT,
),)
-
-
+
+
def _separate_channel_options(options):
"""Separates core channel options from Python channel options."""
core_options = []
@@ -1436,12 +1436,12 @@ def _separate_channel_options(options):
return python_options, core_options
-class Channel(grpc.Channel):
- """A cygrpc.Channel-backed implementation of grpc.Channel."""
-
+class Channel(grpc.Channel):
+ """A cygrpc.Channel-backed implementation of grpc.Channel."""
+
def __init__(self, target, options, credentials, compression):
- """Constructor.
-
+ """Constructor.
+
Args:
target: The target to which to connect.
options: Configuration options for the channel.
@@ -1452,37 +1452,37 @@ class Channel(grpc.Channel):
python_options, core_options = _separate_channel_options(options)
self._single_threaded_unary_stream = _DEFAULT_SINGLE_THREADED_UNARY_STREAM
self._process_python_options(python_options)
- self._channel = cygrpc.Channel(
+ self._channel = cygrpc.Channel(
_common.encode(target), _augment_options(core_options, compression),
credentials)
- self._call_state = _ChannelCallState(self._channel)
- self._connectivity_state = _ChannelConnectivityState(self._channel)
+ self._call_state = _ChannelCallState(self._channel)
+ self._connectivity_state = _ChannelConnectivityState(self._channel)
cygrpc.fork_register_channel(self)
-
+
def _process_python_options(self, python_options):
"""Sets channel attributes according to python-only channel options."""
for pair in python_options:
if pair[0] == grpc.experimental.ChannelOptions.SingleThreadedUnaryStream:
self._single_threaded_unary_stream = True
- def subscribe(self, callback, try_to_connect=None):
- _subscribe(self._connectivity_state, callback, try_to_connect)
-
- def unsubscribe(self, callback):
- _unsubscribe(self._connectivity_state, callback)
-
- def unary_unary(self,
- method,
- request_serializer=None,
- response_deserializer=None):
- return _UnaryUnaryMultiCallable(
+ def subscribe(self, callback, try_to_connect=None):
+ _subscribe(self._connectivity_state, callback, try_to_connect)
+
+ def unsubscribe(self, callback):
+ _unsubscribe(self._connectivity_state, callback)
+
+ def unary_unary(self,
+ method,
+ request_serializer=None,
+ response_deserializer=None):
+ return _UnaryUnaryMultiCallable(
self._channel, _channel_managed_call_management(self._call_state),
- _common.encode(method), request_serializer, response_deserializer)
-
- def unary_stream(self,
- method,
- request_serializer=None,
- response_deserializer=None):
+ _common.encode(method), request_serializer, response_deserializer)
+
+ def unary_stream(self,
+ method,
+ request_serializer=None,
+ response_deserializer=None):
# NOTE(rbellevi): Benchmarks have shown that running a unary-stream RPC
# on a single Python thread results in an appreciable speed-up. However,
# due to slight differences in capability, the multi-threaded variant
@@ -1497,23 +1497,23 @@ class Channel(grpc.Channel):
_channel_managed_call_management(self._call_state),
_common.encode(method), request_serializer,
response_deserializer)
-
- def stream_unary(self,
- method,
- request_serializer=None,
- response_deserializer=None):
- return _StreamUnaryMultiCallable(
+
+ def stream_unary(self,
+ method,
+ request_serializer=None,
+ response_deserializer=None):
+ return _StreamUnaryMultiCallable(
self._channel, _channel_managed_call_management(self._call_state),
- _common.encode(method), request_serializer, response_deserializer)
-
- def stream_stream(self,
- method,
- request_serializer=None,
- response_deserializer=None):
- return _StreamStreamMultiCallable(
+ _common.encode(method), request_serializer, response_deserializer)
+
+ def stream_stream(self,
+ method,
+ request_serializer=None,
+ response_deserializer=None):
+ return _StreamStreamMultiCallable(
self._channel, _channel_managed_call_management(self._call_state),
- _common.encode(method), request_serializer, response_deserializer)
-
+ _common.encode(method), request_serializer, response_deserializer)
+
def _unsubscribe_all(self):
state = self._connectivity_state
if state:
@@ -1540,7 +1540,7 @@ class Channel(grpc.Channel):
def close(self):
self._close()
- def __del__(self):
+ def __del__(self):
# TODO(https://github.com/grpc/grpc/issues/12531): Several releases
# after 1.12 (1.16 or thereabouts?) add a "self._channel.close" call
# here (or more likely, call self._close() here). We don't do this today
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_common.py b/contrib/libs/grpc/src/python/grpcio/grpc/_common.py
index 128124c325..ddf4512541 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/_common.py
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/_common.py
@@ -1,105 +1,105 @@
# Copyright 2016 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-"""Shared implementation."""
-
-import logging
+"""Shared implementation."""
+
+import logging
import time
-import six
-
-import grpc
-from grpc._cython import cygrpc
-
+import six
+
+import grpc
+from grpc._cython import cygrpc
+
_LOGGER = logging.getLogger(__name__)
-CYGRPC_CONNECTIVITY_STATE_TO_CHANNEL_CONNECTIVITY = {
- cygrpc.ConnectivityState.idle:
+CYGRPC_CONNECTIVITY_STATE_TO_CHANNEL_CONNECTIVITY = {
+ cygrpc.ConnectivityState.idle:
grpc.ChannelConnectivity.IDLE,
- cygrpc.ConnectivityState.connecting:
+ cygrpc.ConnectivityState.connecting:
grpc.ChannelConnectivity.CONNECTING,
- cygrpc.ConnectivityState.ready:
+ cygrpc.ConnectivityState.ready:
grpc.ChannelConnectivity.READY,
- cygrpc.ConnectivityState.transient_failure:
+ cygrpc.ConnectivityState.transient_failure:
grpc.ChannelConnectivity.TRANSIENT_FAILURE,
- cygrpc.ConnectivityState.shutdown:
+ cygrpc.ConnectivityState.shutdown:
grpc.ChannelConnectivity.SHUTDOWN,
-}
-
-CYGRPC_STATUS_CODE_TO_STATUS_CODE = {
- cygrpc.StatusCode.ok: grpc.StatusCode.OK,
- cygrpc.StatusCode.cancelled: grpc.StatusCode.CANCELLED,
- cygrpc.StatusCode.unknown: grpc.StatusCode.UNKNOWN,
- cygrpc.StatusCode.invalid_argument: grpc.StatusCode.INVALID_ARGUMENT,
- cygrpc.StatusCode.deadline_exceeded: grpc.StatusCode.DEADLINE_EXCEEDED,
- cygrpc.StatusCode.not_found: grpc.StatusCode.NOT_FOUND,
- cygrpc.StatusCode.already_exists: grpc.StatusCode.ALREADY_EXISTS,
- cygrpc.StatusCode.permission_denied: grpc.StatusCode.PERMISSION_DENIED,
- cygrpc.StatusCode.unauthenticated: grpc.StatusCode.UNAUTHENTICATED,
- cygrpc.StatusCode.resource_exhausted: grpc.StatusCode.RESOURCE_EXHAUSTED,
- cygrpc.StatusCode.failed_precondition: grpc.StatusCode.FAILED_PRECONDITION,
- cygrpc.StatusCode.aborted: grpc.StatusCode.ABORTED,
- cygrpc.StatusCode.out_of_range: grpc.StatusCode.OUT_OF_RANGE,
- cygrpc.StatusCode.unimplemented: grpc.StatusCode.UNIMPLEMENTED,
- cygrpc.StatusCode.internal: grpc.StatusCode.INTERNAL,
- cygrpc.StatusCode.unavailable: grpc.StatusCode.UNAVAILABLE,
- cygrpc.StatusCode.data_loss: grpc.StatusCode.DATA_LOSS,
-}
-STATUS_CODE_TO_CYGRPC_STATUS_CODE = {
+}
+
+CYGRPC_STATUS_CODE_TO_STATUS_CODE = {
+ cygrpc.StatusCode.ok: grpc.StatusCode.OK,
+ cygrpc.StatusCode.cancelled: grpc.StatusCode.CANCELLED,
+ cygrpc.StatusCode.unknown: grpc.StatusCode.UNKNOWN,
+ cygrpc.StatusCode.invalid_argument: grpc.StatusCode.INVALID_ARGUMENT,
+ cygrpc.StatusCode.deadline_exceeded: grpc.StatusCode.DEADLINE_EXCEEDED,
+ cygrpc.StatusCode.not_found: grpc.StatusCode.NOT_FOUND,
+ cygrpc.StatusCode.already_exists: grpc.StatusCode.ALREADY_EXISTS,
+ cygrpc.StatusCode.permission_denied: grpc.StatusCode.PERMISSION_DENIED,
+ cygrpc.StatusCode.unauthenticated: grpc.StatusCode.UNAUTHENTICATED,
+ cygrpc.StatusCode.resource_exhausted: grpc.StatusCode.RESOURCE_EXHAUSTED,
+ cygrpc.StatusCode.failed_precondition: grpc.StatusCode.FAILED_PRECONDITION,
+ cygrpc.StatusCode.aborted: grpc.StatusCode.ABORTED,
+ cygrpc.StatusCode.out_of_range: grpc.StatusCode.OUT_OF_RANGE,
+ cygrpc.StatusCode.unimplemented: grpc.StatusCode.UNIMPLEMENTED,
+ cygrpc.StatusCode.internal: grpc.StatusCode.INTERNAL,
+ cygrpc.StatusCode.unavailable: grpc.StatusCode.UNAVAILABLE,
+ cygrpc.StatusCode.data_loss: grpc.StatusCode.DATA_LOSS,
+}
+STATUS_CODE_TO_CYGRPC_STATUS_CODE = {
grpc_code: cygrpc_code for cygrpc_code, grpc_code in six.iteritems(
- CYGRPC_STATUS_CODE_TO_STATUS_CODE)
-}
-
+ CYGRPC_STATUS_CODE_TO_STATUS_CODE)
+}
+
MAXIMUM_WAIT_TIMEOUT = 0.1
-
+
_ERROR_MESSAGE_PORT_BINDING_FAILED = 'Failed to bind to address %s; set ' \
'GRPC_VERBOSITY=debug environment variable to see detailed error message.'
-def encode(s):
- if isinstance(s, bytes):
- return s
- else:
+def encode(s):
+ if isinstance(s, bytes):
+ return s
+ else:
return s.encode('utf8')
-
-
-def decode(b):
+
+
+def decode(b):
if isinstance(b, bytes):
return b.decode('utf-8', 'replace')
return b
-
-
-def _transform(message, transformer, exception_message):
- if transformer is None:
- return message
- else:
- try:
- return transformer(message)
- except Exception: # pylint: disable=broad-except
+
+
+def _transform(message, transformer, exception_message):
+ if transformer is None:
+ return message
+ else:
+ try:
+ return transformer(message)
+ except Exception: # pylint: disable=broad-except
_LOGGER.exception(exception_message)
- return None
-
-
-def serialize(message, serializer):
- return _transform(message, serializer, 'Exception serializing message!')
-
-
-def deserialize(serialized_message, deserializer):
- return _transform(serialized_message, deserializer,
- 'Exception deserializing message!')
-
-
-def fully_qualified_method(group, method):
- return '/{}/{}'.format(group, method)
+ return None
+
+
+def serialize(message, serializer):
+ return _transform(message, serializer, 'Exception serializing message!')
+
+
+def deserialize(serialized_message, deserializer):
+ return _transform(serialized_message, deserializer,
+ 'Exception deserializing message!')
+
+
+def fully_qualified_method(group, method):
+ return '/{}/{}'.format(group, method)
def _wait_once(wait_fn, timeout, spin_cb):
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/README.rst b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/README.rst
index c0e66734e8..f39a14397e 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/README.rst
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/README.rst
@@ -1,52 +1,52 @@
-GRPC Python Cython layer
-========================
-
-Package for the GRPC Python Cython layer.
-
-What is Cython?
----------------
-
-Cython is both a superset of the Python language with extensions for dealing
-with C types and a tool that transpiles this superset into C code. It provides
-convenient means of statically typing expressions and of converting Python
-strings to pointers (among other niceties), thus dramatically smoothing the
-Python/C interop by allowing fluid use of APIs in both from the same source.
-See the wonderful `Cython website`_.
-
-Why Cython?
------------
-
-- **Python 2 and 3 support**
- Cython generated C code has precompiler macros to target both Python 2 and
- Python 3 C APIs, even while acting as a superset of just the Python 2
- language (e.g. using ``basestring``).
-- **Significantly less semantic noise**
- A lot of CPython code is just glue, especially human-error-prone
- ``Py_INCREF``-ing and ``Py_DECREF``-ing around error handlers and such.
- Cython takes care of that automagically.
-- **Possible PyPy support**
- One of the major developments in Cython over the past few years was the
- addition of support for PyPy. We might soon be able to provide such support
- ourselves through our use of Cython.
-- **Less Python glue code**
- There existed several adapter layers in and around the original CPython code
- to smooth the surface exposed to Python due to how much trouble it was to
- make such a smooth surface via the CPython API alone. Cython makes writing
- such a surface incredibly easy, so these adapter layers may be removed.
-
-Implications for Users
-----------------------
-
-Nothing additional will be required for users. PyPI packages will contain
-Cython generated C code and thus not necessitate a Cython installation.
-
-Implications for GRPC Developers
---------------------------------
-
-A typical edit-compile-debug cycle now requires Cython. We install Cython in
-the ``virtualenv`` generated for the Python tests in this repository, so
-initial test runs may take an extra 2+ minutes to complete. Subsequent test
-runs won't reinstall ``Cython`` (unless required versions change and the
-``virtualenv`` doesn't have installed versions that satisfy the change).
-
-.. _`Cython website`: http://cython.org/
+GRPC Python Cython layer
+========================
+
+Package for the GRPC Python Cython layer.
+
+What is Cython?
+---------------
+
+Cython is both a superset of the Python language with extensions for dealing
+with C types and a tool that transpiles this superset into C code. It provides
+convenient means of statically typing expressions and of converting Python
+strings to pointers (among other niceties), thus dramatically smoothing the
+Python/C interop by allowing fluid use of APIs in both from the same source.
+See the wonderful `Cython website`_.
+
+Why Cython?
+-----------
+
+- **Python 2 and 3 support**
+ Cython generated C code has precompiler macros to target both Python 2 and
+ Python 3 C APIs, even while acting as a superset of just the Python 2
+ language (e.g. using ``basestring``).
+- **Significantly less semantic noise**
+ A lot of CPython code is just glue, especially human-error-prone
+ ``Py_INCREF``-ing and ``Py_DECREF``-ing around error handlers and such.
+ Cython takes care of that automagically.
+- **Possible PyPy support**
+ One of the major developments in Cython over the past few years was the
+ addition of support for PyPy. We might soon be able to provide such support
+ ourselves through our use of Cython.
+- **Less Python glue code**
+ There existed several adapter layers in and around the original CPython code
+ to smooth the surface exposed to Python due to how much trouble it was to
+ make such a smooth surface via the CPython API alone. Cython makes writing
+ such a surface incredibly easy, so these adapter layers may be removed.
+
+Implications for Users
+----------------------
+
+Nothing additional will be required for users. PyPI packages will contain
+Cython generated C code and thus not necessitate a Cython installation.
+
+Implications for GRPC Developers
+--------------------------------
+
+A typical edit-compile-debug cycle now requires Cython. We install Cython in
+the ``virtualenv`` generated for the Python tests in this repository, so
+initial test runs may take an extra 2+ minutes to complete. Subsequent test
+runs won't reinstall ``Cython`` (unless required versions change and the
+``virtualenv`` doesn't have installed versions that satisfy the change).
+
+.. _`Cython website`: http://cython.org/
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/__init__.py b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/__init__.py
index 5fb4f3c3cf..1841020f80 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/__init__.py
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/__init__.py
@@ -1,11 +1,11 @@
# Copyright 2015 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/__init__.py b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/__init__.py
index 5fb4f3c3cf..1841020f80 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/__init__.py
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/__init__.py
@@ -1,11 +1,11 @@
# Copyright 2015 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/call.pxd.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/call.pxd.pxi
index 8babeb4536..2c03670aba 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/call.pxd.pxi
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/call.pxd.pxi
@@ -1,20 +1,20 @@
# Copyright 2015 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
-
-cdef class Call:
-
- cdef grpc_call *c_call
- cdef list references
-
+
+
+cdef class Call:
+
+ cdef grpc_call *c_call
+ cdef list references
+
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/call.pyx.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/call.pyx.pxi
index bdd155bea9..c5fcf4e37a 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/call.pyx.pxi
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/call.pyx.pxi
@@ -1,97 +1,97 @@
# Copyright 2015 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
-
-cdef class Call:
-
- def __cinit__(self):
- # Create an *empty* call
+
+
+cdef class Call:
+
+ def __cinit__(self):
+ # Create an *empty* call
fork_handlers_and_grpc_init()
- self.c_call = NULL
- self.references = []
-
- def _start_batch(self, operations, tag, retain_self):
- if not self.is_valid:
- raise ValueError("invalid call object cannot be used from Python")
+ self.c_call = NULL
+ self.references = []
+
+ def _start_batch(self, operations, tag, retain_self):
+ if not self.is_valid:
+ raise ValueError("invalid call object cannot be used from Python")
cdef _BatchOperationTag batch_operation_tag = _BatchOperationTag(
tag, operations, self if retain_self else None)
batch_operation_tag.prepare()
cpython.Py_INCREF(batch_operation_tag)
cdef grpc_call_error error
- with nogil:
+ with nogil:
error = grpc_call_start_batch(
self.c_call, batch_operation_tag.c_ops, batch_operation_tag.c_nops,
<cpython.PyObject *>batch_operation_tag, NULL)
return error
-
- def start_client_batch(self, operations, tag):
- # We don't reference this call in the operations tag because
- # it should be cancelled when it goes out of scope
- return self._start_batch(operations, tag, False)
-
- def start_server_batch(self, operations, tag):
- return self._start_batch(operations, tag, True)
-
- def cancel(
- self, grpc_status_code error_code=GRPC_STATUS__DO_NOT_USE,
- details=None):
- details = str_to_bytes(details)
- if not self.is_valid:
- raise ValueError("invalid call object cannot be used from Python")
- if (details is None) != (error_code == GRPC_STATUS__DO_NOT_USE):
- raise ValueError("if error_code is specified, so must details "
- "(and vice-versa)")
- cdef grpc_call_error result
- cdef char *c_details = NULL
- if error_code != GRPC_STATUS__DO_NOT_USE:
- self.references.append(details)
- c_details = details
- with nogil:
- result = grpc_call_cancel_with_status(
- self.c_call, error_code, c_details, NULL)
- return result
- else:
- with nogil:
- result = grpc_call_cancel(self.c_call, NULL)
- return result
-
+
+ def start_client_batch(self, operations, tag):
+ # We don't reference this call in the operations tag because
+ # it should be cancelled when it goes out of scope
+ return self._start_batch(operations, tag, False)
+
+ def start_server_batch(self, operations, tag):
+ return self._start_batch(operations, tag, True)
+
+ def cancel(
+ self, grpc_status_code error_code=GRPC_STATUS__DO_NOT_USE,
+ details=None):
+ details = str_to_bytes(details)
+ if not self.is_valid:
+ raise ValueError("invalid call object cannot be used from Python")
+ if (details is None) != (error_code == GRPC_STATUS__DO_NOT_USE):
+ raise ValueError("if error_code is specified, so must details "
+ "(and vice-versa)")
+ cdef grpc_call_error result
+ cdef char *c_details = NULL
+ if error_code != GRPC_STATUS__DO_NOT_USE:
+ self.references.append(details)
+ c_details = details
+ with nogil:
+ result = grpc_call_cancel_with_status(
+ self.c_call, error_code, c_details, NULL)
+ return result
+ else:
+ with nogil:
+ result = grpc_call_cancel(self.c_call, NULL)
+ return result
+
def set_credentials(self, CallCredentials call_credentials not None):
cdef grpc_call_credentials *c_call_credentials = call_credentials.c()
cdef grpc_call_error call_error = grpc_call_set_credentials(
self.c_call, c_call_credentials)
grpc_call_credentials_release(c_call_credentials)
return call_error
-
- def peer(self):
- cdef char *peer = NULL
- with nogil:
- peer = grpc_call_get_peer(self.c_call)
- result = <bytes>peer
- with nogil:
- gpr_free(peer)
- return result
-
- def __dealloc__(self):
+
+ def peer(self):
+ cdef char *peer = NULL
+ with nogil:
+ peer = grpc_call_get_peer(self.c_call)
+ result = <bytes>peer
+ with nogil:
+ gpr_free(peer)
+ return result
+
+ def __dealloc__(self):
with nogil:
if self.c_call != NULL:
grpc_call_unref(self.c_call)
grpc_shutdown_blocking()
-
- # The object *should* always be valid from Python. Used for debugging.
- @property
- def is_valid(self):
- return self.c_call != NULL
-
+
+ # The object *should* always be valid from Python. Used for debugging.
+ @property
+ def is_valid(self):
+ return self.c_call != NULL
+
def _custom_op_on_c_call(self, int op):
return _custom_op_on_c_call(op, self.c_call)
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/channel.pxd.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/channel.pxd.pxi
index eb27f2df7a..1c38d29bee 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/channel.pxd.pxi
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/channel.pxd.pxi
@@ -1,18 +1,18 @@
# Copyright 2015 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
-
+
+
cdef _check_call_error_no_metadata(c_call_error)
@@ -66,8 +66,8 @@ cdef class SegregatedCall:
cdef grpc_completion_queue *_c_completion_queue
-cdef class Channel:
-
+cdef class Channel:
+
cdef _ChannelState _state
# TODO(https://github.com/grpc/grpc/issues/15662): Eliminate this.
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/channel.pyx.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/channel.pyx.pxi
index 74c7f6c140..1de48a3690 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/channel.pyx.pxi
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/channel.pyx.pxi
@@ -1,18 +1,18 @@
# Copyright 2015 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
-
+
+
_INTERNAL_CALL_ERROR_MESSAGE_FORMAT = (
'Internal gRPC call error %d. ' +
'Please report to https://github.com/grpc/grpc/issues')
@@ -438,8 +438,8 @@ cdef _calls_drained(_ChannelState state):
return not (state.integrated_call_states or state.segregated_call_states or
state.connectivity_due)
-cdef class Channel:
-
+cdef class Channel:
+
def __cinit__(
self, bytes target, object arguments,
ChannelCredentials channel_credentials):
@@ -452,15 +452,15 @@ cdef class Channel:
grpc_completion_queue_create_for_next(NULL))
self._arguments = arguments
cdef _ChannelArgs channel_args = _ChannelArgs(arguments)
- if channel_credentials is None:
+ if channel_credentials is None:
self._state.c_channel = grpc_insecure_channel_create(
<char *>target, channel_args.c_args(), NULL)
- else:
+ else:
c_channel_credentials = channel_credentials.c()
self._state.c_channel = grpc_secure_channel_create(
c_channel_credentials, <char *>target, channel_args.c_args(), NULL)
grpc_channel_credentials_release(c_channel_credentials)
-
+
def target(self):
cdef char *c_target
with self._state.condition:
@@ -468,7 +468,7 @@ cdef class Channel:
target = <bytes>c_target
gpr_free(c_target)
return target
-
+
def integrated_call(
self, int flags, method, host, object deadline, object metadata,
CallCredentials credentials, operationses_and_tags,
@@ -499,18 +499,18 @@ cdef class Channel:
self._state, flags, method, host, deadline, metadata, credentials,
operationses_and_tags, context)
- def check_connectivity_state(self, bint try_to_connect):
+ def check_connectivity_state(self, bint try_to_connect):
with self._state.condition:
if self._state.open:
return grpc_channel_check_connectivity_state(
self._state.c_channel, try_to_connect)
else:
raise ValueError('Cannot invoke RPC: %s' % self._state.closed_reason)
-
- def watch_connectivity_state(
+
+ def watch_connectivity_state(
self, grpc_connectivity_state last_observed_state, object deadline):
return _watch_connectivity_state(self._state, last_observed_state, deadline)
-
+
def close(self, code, details):
_close(self, code, details, False)
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/completion_queue.pxd.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/completion_queue.pxd.pxi
index 0307f74cbe..303d5b3bd1 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/completion_queue.pxd.pxi
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/completion_queue.pxd.pxi
@@ -1,28 +1,28 @@
# Copyright 2015 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
-
+
+
cdef grpc_event _next(grpc_completion_queue *c_completion_queue, deadline) except *
cdef _interpret_event(grpc_event c_event)
-cdef class CompletionQueue:
-
- cdef grpc_completion_queue *c_completion_queue
- cdef bint is_shutting_down
- cdef bint is_shutdown
-
+cdef class CompletionQueue:
+
+ cdef grpc_completion_queue *c_completion_queue
+ cdef bint is_shutting_down
+ cdef bint is_shutdown
+
cdef _interpret_event(self, grpc_event c_event)
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/completion_queue.pyx.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/completion_queue.pyx.pxi
index a47403ac51..816e88b69c 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/completion_queue.pyx.pxi
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/completion_queue.pyx.pxi
@@ -1,21 +1,21 @@
# Copyright 2015 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
-
-cdef int _INTERRUPT_CHECK_PERIOD_MS = 200
-
-
+
+
+cdef int _INTERRUPT_CHECK_PERIOD_MS = 200
+
+
cdef grpc_event _next(grpc_completion_queue *c_completion_queue, deadline) except *:
cdef gpr_timespec c_increment
cdef gpr_timespec c_timeout
@@ -64,8 +64,8 @@ cdef _latent_event(grpc_completion_queue *c_completion_queue, object deadline):
return _interpret_event(c_event)
-cdef class CompletionQueue:
-
+cdef class CompletionQueue:
+
def __cinit__(self, shutdown_cq=False):
cdef grpc_completion_queue_attributes c_attrs
fork_handlers_and_grpc_init()
@@ -77,42 +77,42 @@ cdef class CompletionQueue:
grpc_completion_queue_factory_lookup(&c_attrs), &c_attrs, NULL);
else:
self.c_completion_queue = grpc_completion_queue_create_for_next(NULL)
- self.is_shutting_down = False
- self.is_shutdown = False
-
+ self.is_shutting_down = False
+ self.is_shutdown = False
+
cdef _interpret_event(self, grpc_event c_event):
unused_tag, event = _interpret_event(c_event)
if event.completion_type == GRPC_QUEUE_SHUTDOWN:
- self.is_shutdown = True
+ self.is_shutdown = True
return event
-
+
# We name this 'poll' to avoid problems with CPython's expectations for
# 'special' methods (like next and __next__).
def poll(self, deadline=None):
return self._interpret_event(_next(self.c_completion_queue, deadline))
-
- def shutdown(self):
- with nogil:
- grpc_completion_queue_shutdown(self.c_completion_queue)
- self.is_shutting_down = True
-
- def clear(self):
- if not self.is_shutting_down:
- raise ValueError('queue must be shutting down to be cleared')
- while self.poll().type != GRPC_QUEUE_SHUTDOWN:
- pass
-
- def __dealloc__(self):
- cdef gpr_timespec c_deadline
- c_deadline = gpr_inf_future(GPR_CLOCK_REALTIME)
- if self.c_completion_queue != NULL:
- # Ensure shutdown
- if not self.is_shutting_down:
- grpc_completion_queue_shutdown(self.c_completion_queue)
- # Pump the queue (All outstanding calls should have been cancelled)
- while not self.is_shutdown:
- event = grpc_completion_queue_next(
- self.c_completion_queue, c_deadline, NULL)
- self._interpret_event(event)
- grpc_completion_queue_destroy(self.c_completion_queue)
+
+ def shutdown(self):
+ with nogil:
+ grpc_completion_queue_shutdown(self.c_completion_queue)
+ self.is_shutting_down = True
+
+ def clear(self):
+ if not self.is_shutting_down:
+ raise ValueError('queue must be shutting down to be cleared')
+ while self.poll().type != GRPC_QUEUE_SHUTDOWN:
+ pass
+
+ def __dealloc__(self):
+ cdef gpr_timespec c_deadline
+ c_deadline = gpr_inf_future(GPR_CLOCK_REALTIME)
+ if self.c_completion_queue != NULL:
+ # Ensure shutdown
+ if not self.is_shutting_down:
+ grpc_completion_queue_shutdown(self.c_completion_queue)
+ # Pump the queue (All outstanding calls should have been cancelled)
+ while not self.is_shutdown:
+ event = grpc_completion_queue_next(
+ self.c_completion_queue, c_deadline, NULL)
+ self._interpret_event(event)
+ grpc_completion_queue_destroy(self.c_completion_queue)
grpc_shutdown_blocking()
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/credentials.pxd.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/credentials.pxd.pxi
index ddaedb30bd..8c71da02a8 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/credentials.pxd.pxi
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/credentials.pxd.pxi
@@ -1,20 +1,20 @@
# Copyright 2015 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
-
+
+
cdef class CallCredentials:
-
+
cdef grpc_call_credentials *c(self) except *
# TODO(https://github.com/grpc/grpc/issues/12531): remove.
@@ -49,44 +49,44 @@ cdef class CompositeCallCredentials(CallCredentials):
cdef grpc_call_credentials *c(self) except *
-cdef class ChannelCredentials:
-
+cdef class ChannelCredentials:
+
cdef grpc_channel_credentials *c(self) except *
-
+
cdef class SSLSessionCacheLRU:
cdef grpc_ssl_session_cache *_cache
cdef class SSLChannelCredentials(ChannelCredentials):
-
+
cdef readonly object _pem_root_certificates
cdef readonly object _private_key
cdef readonly object _certificate_chain
-
+
cdef grpc_channel_credentials *c(self) except *
-
-
+
+
cdef class CompositeChannelCredentials(ChannelCredentials):
-
+
cdef readonly tuple _call_credentialses
cdef readonly ChannelCredentials _channel_credentials
-
+
cdef grpc_channel_credentials *c(self) except *
-
-
+
+
cdef class ServerCertificateConfig:
-
+
cdef grpc_ssl_server_certificate_config *c_cert_config
cdef const char *c_pem_root_certs
cdef grpc_ssl_pem_key_cert_pair *c_ssl_pem_key_cert_pairs
cdef size_t c_ssl_pem_key_cert_pairs_count
cdef list references
-
-
+
+
cdef class ServerCredentials:
-
+
cdef grpc_server_credentials *c_credentials
cdef grpc_ssl_pem_key_cert_pair *c_ssl_pem_key_cert_pairs
cdef size_t c_ssl_pem_key_cert_pairs_count
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/credentials.pyx.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/credentials.pyx.pxi
index c75579cc04..4a2e1d324b 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/credentials.pyx.pxi
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/credentials.pyx.pxi
@@ -1,18 +1,18 @@
# Copyright 2015 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
-
+
+
def _spawn_callback_in_thread(cb_func, args):
t = ForkManagedThread(target=cb_func, args=args)
t.setDaemon(True)
@@ -29,11 +29,11 @@ def _spawn_callback_async(callback, args):
cdef class CallCredentials:
-
+
cdef grpc_call_credentials *c(self) except *:
raise NotImplementedError()
-
-
+
+
cdef int _get_metadata(void *state,
grpc_auth_metadata_context context,
grpc_credentials_plugin_metadata_cb cb,
@@ -54,19 +54,19 @@ cdef int _get_metadata(void *state,
args = context.service_url, context.method_name, callback,
_spawn_callback_async(<object>state, args)
return 0 # Asynchronous return
-
-
+
+
cdef void _destroy(void *state) except * with gil:
cpython.Py_DECREF(<object>state)
grpc_shutdown_blocking()
-
-
+
+
cdef class MetadataPluginCallCredentials(CallCredentials):
-
+
def __cinit__(self, metadata_plugin, name):
self._metadata_plugin = metadata_plugin
self._name = name
-
+
cdef grpc_call_credentials *c(self) except *:
cdef grpc_metadata_credentials_plugin c_metadata_plugin
c_metadata_plugin.get_metadata = _get_metadata
@@ -78,8 +78,8 @@ cdef class MetadataPluginCallCredentials(CallCredentials):
# TODO(yihuazhang): Expose min_security_level via the Python API so that
# applications can decide what minimum security level their plugins require.
return grpc_metadata_credentials_create_from_plugin(c_metadata_plugin, GRPC_PRIVACY_AND_INTEGRITY, NULL)
-
-
+
+
cdef grpc_call_credentials *_composition(call_credentialses):
call_credentials_iterator = iter(call_credentialses)
cdef CallCredentials composition = next(call_credentials_iterator)
@@ -95,23 +95,23 @@ cdef grpc_call_credentials *_composition(call_credentialses):
grpc_call_credentials_release(c_additional_call_credentials)
c_composition = c_next_composition
return c_composition
-
-
+
+
cdef class CompositeCallCredentials(CallCredentials):
-
+
def __cinit__(self, call_credentialses):
self._call_credentialses = call_credentialses
-
+
cdef grpc_call_credentials *c(self) except *:
return _composition(self._call_credentialses)
-
-
+
+
cdef class ChannelCredentials:
-
+
cdef grpc_channel_credentials *c(self) except *:
raise NotImplementedError()
-
-
+
+
cdef class SSLSessionCacheLRU:
def __cinit__(self, capacity):
@@ -128,14 +128,14 @@ cdef class SSLSessionCacheLRU:
cdef class SSLChannelCredentials(ChannelCredentials):
-
+
def __cinit__(self, pem_root_certificates, private_key, certificate_chain):
if pem_root_certificates is not None and not isinstance(pem_root_certificates, bytes):
raise TypeError('expected certificate to be bytes, got %s' % (type(pem_root_certificates)))
self._pem_root_certificates = pem_root_certificates
self._private_key = private_key
self._certificate_chain = certificate_chain
-
+
cdef grpc_channel_credentials *c(self) except *:
cdef const char *c_pem_root_certificates
cdef grpc_ssl_pem_key_cert_pair c_pem_key_certificate_pair
@@ -157,14 +157,14 @@ cdef class SSLChannelCredentials(ChannelCredentials):
c_pem_key_certificate_pair.certificate_chain = NULL
return grpc_ssl_credentials_create(
c_pem_root_certificates, &c_pem_key_certificate_pair, NULL, NULL)
-
-
+
+
cdef class CompositeChannelCredentials(ChannelCredentials):
-
+
def __cinit__(self, call_credentialses, channel_credentials):
self._call_credentialses = call_credentialses
self._channel_credentials = channel_credentials
-
+
cdef grpc_channel_credentials *c(self) except *:
cdef grpc_channel_credentials *c_channel_credentials
c_channel_credentials = self._channel_credentials.c()
@@ -176,25 +176,25 @@ cdef class CompositeChannelCredentials(ChannelCredentials):
grpc_channel_credentials_release(c_channel_credentials)
grpc_call_credentials_release(c_call_credentials_composition)
return c_composition
-
-
+
+
cdef class ServerCertificateConfig:
-
+
def __cinit__(self):
fork_handlers_and_grpc_init()
self.c_cert_config = NULL
self.c_pem_root_certs = NULL
self.c_ssl_pem_key_cert_pairs = NULL
self.references = []
-
+
def __dealloc__(self):
grpc_ssl_server_certificate_config_destroy(self.c_cert_config)
gpr_free(self.c_ssl_pem_key_cert_pairs)
grpc_shutdown_blocking()
-
-
+
+
cdef class ServerCredentials:
-
+
def __cinit__(self):
fork_handlers_and_grpc_init()
self.c_credentials = NULL
@@ -202,12 +202,12 @@ cdef class ServerCredentials:
self.initial_cert_config = None
self.cert_config_fetcher = None
self.initial_cert_config_fetched = False
-
+
def __dealloc__(self):
if self.c_credentials != NULL:
grpc_server_credentials_release(self.c_credentials)
grpc_shutdown_blocking()
-
+
cdef const char* _get_c_pem_root_certs(pem_root_certs):
if pem_root_certs is None:
return NULL
@@ -222,7 +222,7 @@ cdef grpc_ssl_pem_key_cert_pair* _create_c_ssl_pem_key_cert_pairs(pem_key_cert_p
"SslPemKeyCertPair")
cdef size_t c_ssl_pem_key_cert_pairs_count = len(pem_key_cert_pairs)
cdef grpc_ssl_pem_key_cert_pair* c_ssl_pem_key_cert_pairs = NULL
- with nogil:
+ with nogil:
c_ssl_pem_key_cert_pairs = (
<grpc_ssl_pem_key_cert_pair *>gpr_malloc(
sizeof(grpc_ssl_pem_key_cert_pair) * c_ssl_pem_key_cert_pairs_count))
@@ -230,16 +230,16 @@ cdef grpc_ssl_pem_key_cert_pair* _create_c_ssl_pem_key_cert_pairs(pem_key_cert_p
c_ssl_pem_key_cert_pairs[i] = (
(<SslPemKeyCertPair>pem_key_cert_pairs[i]).c_pair)
return c_ssl_pem_key_cert_pairs
-
-def server_credentials_ssl(pem_root_certs, pem_key_cert_pairs,
- bint force_client_auth):
- pem_root_certs = str_to_bytes(pem_root_certs)
- pem_key_cert_pairs = list(pem_key_cert_pairs)
- cdef ServerCredentials credentials = ServerCredentials()
+
+def server_credentials_ssl(pem_root_certs, pem_key_cert_pairs,
+ bint force_client_auth):
+ pem_root_certs = str_to_bytes(pem_root_certs)
+ pem_key_cert_pairs = list(pem_key_cert_pairs)
+ cdef ServerCredentials credentials = ServerCredentials()
credentials.references.append(pem_root_certs)
- credentials.references.append(pem_key_cert_pairs)
+ credentials.references.append(pem_key_cert_pairs)
cdef const char * c_pem_root_certs = _get_c_pem_root_certs(pem_root_certs)
- credentials.c_ssl_pem_key_cert_pairs_count = len(pem_key_cert_pairs)
+ credentials.c_ssl_pem_key_cert_pairs_count = len(pem_key_cert_pairs)
credentials.c_ssl_pem_key_cert_pairs = _create_c_ssl_pem_key_cert_pairs(pem_key_cert_pairs)
cdef grpc_ssl_server_certificate_config *c_cert_config = NULL
c_cert_config = grpc_ssl_server_certificate_config_create(
@@ -254,8 +254,8 @@ def server_credentials_ssl(pem_root_certs, pem_key_cert_pairs,
c_cert_config)
# C-core assumes ownership of c_options
credentials.c_credentials = grpc_ssl_server_credentials_create_with_options(c_options)
- return credentials
-
+ return credentials
+
def server_certificate_config_ssl(pem_root_certs, pem_key_cert_pairs):
pem_root_certs = str_to_bytes(pem_root_certs)
pem_key_cert_pairs = list(pem_key_cert_pairs)
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/grpc.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/grpc.pxi
index 54eb7fdffc..ed2aa95766 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/grpc.pxi
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/grpc.pxi
@@ -1,19 +1,19 @@
# Copyright 2015 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
-cimport libc.time
-
+
+cimport libc.time
+
ctypedef ssize_t intptr_t
ctypedef size_t uintptr_t
ctypedef signed char int8_t
@@ -24,205 +24,205 @@ ctypedef unsigned char uint8_t
ctypedef unsigned short uint16_t
ctypedef unsigned int uint32_t
ctypedef unsigned long long uint64_t
-
-
-cdef extern from "grpc/support/alloc.h":
-
- void *gpr_malloc(size_t size) nogil
+
+
+cdef extern from "grpc/support/alloc.h":
+
+ void *gpr_malloc(size_t size) nogil
void *gpr_zalloc(size_t size) nogil
- void gpr_free(void *ptr) nogil
- void *gpr_realloc(void *p, size_t size) nogil
-
-
-cdef extern from "grpc/byte_buffer_reader.h":
-
- struct grpc_byte_buffer_reader:
- # We don't care about the internals
- pass
-
-
+ void gpr_free(void *ptr) nogil
+ void *gpr_realloc(void *p, size_t size) nogil
+
+
+cdef extern from "grpc/byte_buffer_reader.h":
+
+ struct grpc_byte_buffer_reader:
+ # We don't care about the internals
+ pass
+
+
cdef extern from "grpc/impl/codegen/grpc_types.h":
ctypedef struct grpc_experimental_completion_queue_functor:
void (*functor_run)(grpc_experimental_completion_queue_functor*, int);
-cdef extern from "grpc/grpc.h":
-
- ctypedef struct grpc_slice:
- # don't worry about writing out the members of grpc_slice; we never access
- # them directly.
- pass
-
- grpc_slice grpc_slice_ref(grpc_slice s) nogil
- void grpc_slice_unref(grpc_slice s) nogil
+cdef extern from "grpc/grpc.h":
+
+ ctypedef struct grpc_slice:
+ # don't worry about writing out the members of grpc_slice; we never access
+ # them directly.
+ pass
+
+ grpc_slice grpc_slice_ref(grpc_slice s) nogil
+ void grpc_slice_unref(grpc_slice s) nogil
grpc_slice grpc_empty_slice() nogil
- grpc_slice grpc_slice_new(void *p, size_t len, void (*destroy)(void *)) nogil
- grpc_slice grpc_slice_new_with_len(
- void *p, size_t len, void (*destroy)(void *, size_t)) nogil
- grpc_slice grpc_slice_malloc(size_t length) nogil
- grpc_slice grpc_slice_from_copied_string(const char *source) nogil
- grpc_slice grpc_slice_from_copied_buffer(const char *source, size_t len) nogil
+ grpc_slice grpc_slice_new(void *p, size_t len, void (*destroy)(void *)) nogil
+ grpc_slice grpc_slice_new_with_len(
+ void *p, size_t len, void (*destroy)(void *, size_t)) nogil
+ grpc_slice grpc_slice_malloc(size_t length) nogil
+ grpc_slice grpc_slice_from_copied_string(const char *source) nogil
+ grpc_slice grpc_slice_from_copied_buffer(const char *source, size_t len) nogil
grpc_slice grpc_slice_copy(grpc_slice s) nogil
-
- # Declare functions for function-like macros (because Cython)...
- void *grpc_slice_start_ptr "GRPC_SLICE_START_PTR" (grpc_slice s) nogil
- size_t grpc_slice_length "GRPC_SLICE_LENGTH" (grpc_slice s) nogil
-
+
+ # Declare functions for function-like macros (because Cython)...
+ void *grpc_slice_start_ptr "GRPC_SLICE_START_PTR" (grpc_slice s) nogil
+ size_t grpc_slice_length "GRPC_SLICE_LENGTH" (grpc_slice s) nogil
+
const int GPR_MS_PER_SEC
const int GPR_US_PER_SEC
const int GPR_NS_PER_SEC
- ctypedef enum gpr_clock_type:
- GPR_CLOCK_MONOTONIC
- GPR_CLOCK_REALTIME
- GPR_CLOCK_PRECISE
- GPR_TIMESPAN
-
- ctypedef struct gpr_timespec:
- int64_t seconds "tv_sec"
- int32_t nanoseconds "tv_nsec"
- gpr_clock_type clock_type
-
- gpr_timespec gpr_time_0(gpr_clock_type type) nogil
- gpr_timespec gpr_inf_future(gpr_clock_type type) nogil
- gpr_timespec gpr_inf_past(gpr_clock_type type) nogil
-
- gpr_timespec gpr_now(gpr_clock_type clock) nogil
-
- gpr_timespec gpr_convert_clock_type(gpr_timespec t,
- gpr_clock_type target_clock) nogil
-
- gpr_timespec gpr_time_from_millis(int64_t ms, gpr_clock_type type) nogil
+ ctypedef enum gpr_clock_type:
+ GPR_CLOCK_MONOTONIC
+ GPR_CLOCK_REALTIME
+ GPR_CLOCK_PRECISE
+ GPR_TIMESPAN
+
+ ctypedef struct gpr_timespec:
+ int64_t seconds "tv_sec"
+ int32_t nanoseconds "tv_nsec"
+ gpr_clock_type clock_type
+
+ gpr_timespec gpr_time_0(gpr_clock_type type) nogil
+ gpr_timespec gpr_inf_future(gpr_clock_type type) nogil
+ gpr_timespec gpr_inf_past(gpr_clock_type type) nogil
+
+ gpr_timespec gpr_now(gpr_clock_type clock) nogil
+
+ gpr_timespec gpr_convert_clock_type(gpr_timespec t,
+ gpr_clock_type target_clock) nogil
+
+ gpr_timespec gpr_time_from_millis(int64_t ms, gpr_clock_type type) nogil
gpr_timespec gpr_time_from_nanos(int64_t ns, gpr_clock_type type) nogil
double gpr_timespec_to_micros(gpr_timespec t) nogil
-
- gpr_timespec gpr_time_add(gpr_timespec a, gpr_timespec b) nogil
-
- int gpr_time_cmp(gpr_timespec a, gpr_timespec b) nogil
-
- ctypedef struct grpc_byte_buffer:
- # We don't care about the internals.
- pass
-
- grpc_byte_buffer *grpc_raw_byte_buffer_create(grpc_slice *slices,
- size_t nslices) nogil
- size_t grpc_byte_buffer_length(grpc_byte_buffer *bb) nogil
- void grpc_byte_buffer_destroy(grpc_byte_buffer *byte_buffer) nogil
-
- int grpc_byte_buffer_reader_init(grpc_byte_buffer_reader *reader,
- grpc_byte_buffer *buffer) nogil
- int grpc_byte_buffer_reader_next(grpc_byte_buffer_reader *reader,
- grpc_slice *slice) nogil
- void grpc_byte_buffer_reader_destroy(grpc_byte_buffer_reader *reader) nogil
-
- ctypedef enum grpc_status_code:
- GRPC_STATUS_OK
- GRPC_STATUS_CANCELLED
- GRPC_STATUS_UNKNOWN
- GRPC_STATUS_INVALID_ARGUMENT
- GRPC_STATUS_DEADLINE_EXCEEDED
- GRPC_STATUS_NOT_FOUND
- GRPC_STATUS_ALREADY_EXISTS
- GRPC_STATUS_PERMISSION_DENIED
- GRPC_STATUS_UNAUTHENTICATED
- GRPC_STATUS_RESOURCE_EXHAUSTED
- GRPC_STATUS_FAILED_PRECONDITION
- GRPC_STATUS_ABORTED
- GRPC_STATUS_OUT_OF_RANGE
- GRPC_STATUS_UNIMPLEMENTED
- GRPC_STATUS_INTERNAL
- GRPC_STATUS_UNAVAILABLE
- GRPC_STATUS_DATA_LOSS
- GRPC_STATUS__DO_NOT_USE
-
- const char *GRPC_ARG_ENABLE_CENSUS
- const char *GRPC_ARG_MAX_CONCURRENT_STREAMS
- const char *GRPC_ARG_MAX_RECEIVE_MESSAGE_LENGTH
- const char *GRPC_ARG_MAX_SEND_MESSAGE_LENGTH
- const char *GRPC_ARG_HTTP2_INITIAL_SEQUENCE_NUMBER
- const char *GRPC_ARG_DEFAULT_AUTHORITY
- const char *GRPC_ARG_PRIMARY_USER_AGENT_STRING
- const char *GRPC_ARG_SECONDARY_USER_AGENT_STRING
- const char *GRPC_SSL_TARGET_NAME_OVERRIDE_ARG
+
+ gpr_timespec gpr_time_add(gpr_timespec a, gpr_timespec b) nogil
+
+ int gpr_time_cmp(gpr_timespec a, gpr_timespec b) nogil
+
+ ctypedef struct grpc_byte_buffer:
+ # We don't care about the internals.
+ pass
+
+ grpc_byte_buffer *grpc_raw_byte_buffer_create(grpc_slice *slices,
+ size_t nslices) nogil
+ size_t grpc_byte_buffer_length(grpc_byte_buffer *bb) nogil
+ void grpc_byte_buffer_destroy(grpc_byte_buffer *byte_buffer) nogil
+
+ int grpc_byte_buffer_reader_init(grpc_byte_buffer_reader *reader,
+ grpc_byte_buffer *buffer) nogil
+ int grpc_byte_buffer_reader_next(grpc_byte_buffer_reader *reader,
+ grpc_slice *slice) nogil
+ void grpc_byte_buffer_reader_destroy(grpc_byte_buffer_reader *reader) nogil
+
+ ctypedef enum grpc_status_code:
+ GRPC_STATUS_OK
+ GRPC_STATUS_CANCELLED
+ GRPC_STATUS_UNKNOWN
+ GRPC_STATUS_INVALID_ARGUMENT
+ GRPC_STATUS_DEADLINE_EXCEEDED
+ GRPC_STATUS_NOT_FOUND
+ GRPC_STATUS_ALREADY_EXISTS
+ GRPC_STATUS_PERMISSION_DENIED
+ GRPC_STATUS_UNAUTHENTICATED
+ GRPC_STATUS_RESOURCE_EXHAUSTED
+ GRPC_STATUS_FAILED_PRECONDITION
+ GRPC_STATUS_ABORTED
+ GRPC_STATUS_OUT_OF_RANGE
+ GRPC_STATUS_UNIMPLEMENTED
+ GRPC_STATUS_INTERNAL
+ GRPC_STATUS_UNAVAILABLE
+ GRPC_STATUS_DATA_LOSS
+ GRPC_STATUS__DO_NOT_USE
+
+ const char *GRPC_ARG_ENABLE_CENSUS
+ const char *GRPC_ARG_MAX_CONCURRENT_STREAMS
+ const char *GRPC_ARG_MAX_RECEIVE_MESSAGE_LENGTH
+ const char *GRPC_ARG_MAX_SEND_MESSAGE_LENGTH
+ const char *GRPC_ARG_HTTP2_INITIAL_SEQUENCE_NUMBER
+ const char *GRPC_ARG_DEFAULT_AUTHORITY
+ const char *GRPC_ARG_PRIMARY_USER_AGENT_STRING
+ const char *GRPC_ARG_SECONDARY_USER_AGENT_STRING
+ const char *GRPC_SSL_TARGET_NAME_OVERRIDE_ARG
const char *GRPC_SSL_SESSION_CACHE_ARG
const char *_GRPC_COMPRESSION_CHANNEL_DEFAULT_ALGORITHM \
"GRPC_COMPRESSION_CHANNEL_DEFAULT_ALGORITHM"
- const char *GRPC_COMPRESSION_CHANNEL_DEFAULT_LEVEL
- const char *GRPC_COMPRESSION_CHANNEL_ENABLED_ALGORITHMS_BITSET
-
- const int GRPC_WRITE_BUFFER_HINT
- const int GRPC_WRITE_NO_COMPRESS
- const int GRPC_WRITE_USED_MASK
-
+ const char *GRPC_COMPRESSION_CHANNEL_DEFAULT_LEVEL
+ const char *GRPC_COMPRESSION_CHANNEL_ENABLED_ALGORITHMS_BITSET
+
+ const int GRPC_WRITE_BUFFER_HINT
+ const int GRPC_WRITE_NO_COMPRESS
+ const int GRPC_WRITE_USED_MASK
+
const int GRPC_INITIAL_METADATA_WAIT_FOR_READY
const int GRPC_INITIAL_METADATA_WAIT_FOR_READY_EXPLICITLY_SET
const int GRPC_INITIAL_METADATA_USED_MASK
- const int GRPC_MAX_COMPLETION_QUEUE_PLUCKERS
-
- ctypedef struct grpc_completion_queue:
- # We don't care about the internals (and in fact don't know them)
- pass
-
- ctypedef struct grpc_channel:
- # We don't care about the internals (and in fact don't know them)
- pass
-
- ctypedef struct grpc_server:
- # We don't care about the internals (and in fact don't know them)
- pass
-
- ctypedef struct grpc_call:
- # We don't care about the internals (and in fact don't know them)
- pass
-
- ctypedef enum grpc_arg_type:
- GRPC_ARG_STRING
- GRPC_ARG_INTEGER
- GRPC_ARG_POINTER
-
- ctypedef struct grpc_arg_pointer_vtable:
- void *(*copy)(void *)
+ const int GRPC_MAX_COMPLETION_QUEUE_PLUCKERS
+
+ ctypedef struct grpc_completion_queue:
+ # We don't care about the internals (and in fact don't know them)
+ pass
+
+ ctypedef struct grpc_channel:
+ # We don't care about the internals (and in fact don't know them)
+ pass
+
+ ctypedef struct grpc_server:
+ # We don't care about the internals (and in fact don't know them)
+ pass
+
+ ctypedef struct grpc_call:
+ # We don't care about the internals (and in fact don't know them)
+ pass
+
+ ctypedef enum grpc_arg_type:
+ GRPC_ARG_STRING
+ GRPC_ARG_INTEGER
+ GRPC_ARG_POINTER
+
+ ctypedef struct grpc_arg_pointer_vtable:
+ void *(*copy)(void *)
void (*destroy)(void *)
- int (*cmp)(void *, void *)
-
- ctypedef struct grpc_arg_value_pointer:
- void *address "p"
- grpc_arg_pointer_vtable *vtable
-
- union grpc_arg_value:
- char *string
- int integer
- grpc_arg_value_pointer pointer
-
- ctypedef struct grpc_arg:
- grpc_arg_type type
- char *key
- grpc_arg_value value
-
- ctypedef struct grpc_channel_args:
- size_t arguments_length "num_args"
- grpc_arg *arguments "args"
-
+ int (*cmp)(void *, void *)
+
+ ctypedef struct grpc_arg_value_pointer:
+ void *address "p"
+ grpc_arg_pointer_vtable *vtable
+
+ union grpc_arg_value:
+ char *string
+ int integer
+ grpc_arg_value_pointer pointer
+
+ ctypedef struct grpc_arg:
+ grpc_arg_type type
+ char *key
+ grpc_arg_value value
+
+ ctypedef struct grpc_channel_args:
+ size_t arguments_length "num_args"
+ grpc_arg *arguments "args"
+
ctypedef enum grpc_stream_compression_level:
GRPC_STREAM_COMPRESS_LEVEL_NONE
GRPC_STREAM_COMPRESS_LEVEL_LOW
GRPC_STREAM_COMPRESS_LEVEL_MED
GRPC_STREAM_COMPRESS_LEVEL_HIGH
- ctypedef enum grpc_call_error:
- GRPC_CALL_OK
- GRPC_CALL_ERROR
- GRPC_CALL_ERROR_NOT_ON_SERVER
- GRPC_CALL_ERROR_NOT_ON_CLIENT
- GRPC_CALL_ERROR_ALREADY_ACCEPTED
- GRPC_CALL_ERROR_ALREADY_INVOKED
- GRPC_CALL_ERROR_NOT_INVOKED
- GRPC_CALL_ERROR_ALREADY_FINISHED
- GRPC_CALL_ERROR_TOO_MANY_OPERATIONS
- GRPC_CALL_ERROR_INVALID_FLAGS
- GRPC_CALL_ERROR_INVALID_METADATA
-
+ ctypedef enum grpc_call_error:
+ GRPC_CALL_OK
+ GRPC_CALL_ERROR
+ GRPC_CALL_ERROR_NOT_ON_SERVER
+ GRPC_CALL_ERROR_NOT_ON_CLIENT
+ GRPC_CALL_ERROR_ALREADY_ACCEPTED
+ GRPC_CALL_ERROR_ALREADY_INVOKED
+ GRPC_CALL_ERROR_NOT_INVOKED
+ GRPC_CALL_ERROR_ALREADY_FINISHED
+ GRPC_CALL_ERROR_TOO_MANY_OPERATIONS
+ GRPC_CALL_ERROR_INVALID_FLAGS
+ GRPC_CALL_ERROR_INVALID_METADATA
+
ctypedef enum grpc_cq_completion_type:
GRPC_CQ_NEXT
GRPC_CQ_PLUCK
@@ -237,107 +237,107 @@ cdef extern from "grpc/grpc.h":
grpc_cq_completion_type cq_completion_type
grpc_cq_polling_type cq_polling_type
- ctypedef enum grpc_connectivity_state:
- GRPC_CHANNEL_IDLE
- GRPC_CHANNEL_CONNECTING
- GRPC_CHANNEL_READY
- GRPC_CHANNEL_TRANSIENT_FAILURE
- GRPC_CHANNEL_SHUTDOWN
-
- ctypedef struct grpc_metadata:
+ ctypedef enum grpc_connectivity_state:
+ GRPC_CHANNEL_IDLE
+ GRPC_CHANNEL_CONNECTING
+ GRPC_CHANNEL_READY
+ GRPC_CHANNEL_TRANSIENT_FAILURE
+ GRPC_CHANNEL_SHUTDOWN
+
+ ctypedef struct grpc_metadata:
grpc_slice key
grpc_slice value
- # ignore the 'internal_data.obfuscated' fields.
-
- ctypedef enum grpc_completion_type:
- GRPC_QUEUE_SHUTDOWN
- GRPC_QUEUE_TIMEOUT
- GRPC_OP_COMPLETE
-
- ctypedef struct grpc_event:
- grpc_completion_type type
- int success
- void *tag
-
- ctypedef struct grpc_metadata_array:
- size_t count
- size_t capacity
- grpc_metadata *metadata
-
- void grpc_metadata_array_init(grpc_metadata_array *array) nogil
- void grpc_metadata_array_destroy(grpc_metadata_array *array) nogil
-
- ctypedef struct grpc_call_details:
+ # ignore the 'internal_data.obfuscated' fields.
+
+ ctypedef enum grpc_completion_type:
+ GRPC_QUEUE_SHUTDOWN
+ GRPC_QUEUE_TIMEOUT
+ GRPC_OP_COMPLETE
+
+ ctypedef struct grpc_event:
+ grpc_completion_type type
+ int success
+ void *tag
+
+ ctypedef struct grpc_metadata_array:
+ size_t count
+ size_t capacity
+ grpc_metadata *metadata
+
+ void grpc_metadata_array_init(grpc_metadata_array *array) nogil
+ void grpc_metadata_array_destroy(grpc_metadata_array *array) nogil
+
+ ctypedef struct grpc_call_details:
grpc_slice method
grpc_slice host
- gpr_timespec deadline
-
- void grpc_call_details_init(grpc_call_details *details) nogil
- void grpc_call_details_destroy(grpc_call_details *details) nogil
-
- ctypedef enum grpc_op_type:
- GRPC_OP_SEND_INITIAL_METADATA
- GRPC_OP_SEND_MESSAGE
- GRPC_OP_SEND_CLOSE_FROM_CLIENT
- GRPC_OP_SEND_STATUS_FROM_SERVER
- GRPC_OP_RECV_INITIAL_METADATA
- GRPC_OP_RECV_MESSAGE
- GRPC_OP_RECV_STATUS_ON_CLIENT
- GRPC_OP_RECV_CLOSE_ON_SERVER
-
+ gpr_timespec deadline
+
+ void grpc_call_details_init(grpc_call_details *details) nogil
+ void grpc_call_details_destroy(grpc_call_details *details) nogil
+
+ ctypedef enum grpc_op_type:
+ GRPC_OP_SEND_INITIAL_METADATA
+ GRPC_OP_SEND_MESSAGE
+ GRPC_OP_SEND_CLOSE_FROM_CLIENT
+ GRPC_OP_SEND_STATUS_FROM_SERVER
+ GRPC_OP_RECV_INITIAL_METADATA
+ GRPC_OP_RECV_MESSAGE
+ GRPC_OP_RECV_STATUS_ON_CLIENT
+ GRPC_OP_RECV_CLOSE_ON_SERVER
+
ctypedef struct grpc_op_send_initial_metadata_maybe_compression_level:
uint8_t is_set
grpc_compression_level level
- ctypedef struct grpc_op_data_send_initial_metadata:
- size_t count
- grpc_metadata *metadata
+ ctypedef struct grpc_op_data_send_initial_metadata:
+ size_t count
+ grpc_metadata *metadata
grpc_op_send_initial_metadata_maybe_compression_level maybe_compression_level
-
- ctypedef struct grpc_op_data_send_status_from_server:
- size_t trailing_metadata_count
- grpc_metadata *trailing_metadata
- grpc_status_code status
+
+ ctypedef struct grpc_op_data_send_status_from_server:
+ size_t trailing_metadata_count
+ grpc_metadata *trailing_metadata
+ grpc_status_code status
grpc_slice *status_details
-
- ctypedef struct grpc_op_data_recv_status_on_client:
- grpc_metadata_array *trailing_metadata
- grpc_status_code *status
+
+ ctypedef struct grpc_op_data_recv_status_on_client:
+ grpc_metadata_array *trailing_metadata
+ grpc_status_code *status
grpc_slice *status_details
char** error_string
-
- ctypedef struct grpc_op_data_recv_close_on_server:
- int *cancelled
-
- ctypedef struct grpc_op_data_send_message:
- grpc_byte_buffer *send_message
-
- ctypedef struct grpc_op_data_receive_message:
- grpc_byte_buffer **receive_message "recv_message"
-
- ctypedef struct grpc_op_data_receive_initial_metadata:
- grpc_metadata_array *receive_initial_metadata "recv_initial_metadata"
-
- union grpc_op_data:
- grpc_op_data_send_initial_metadata send_initial_metadata
- grpc_op_data_send_message send_message
- grpc_op_data_send_status_from_server send_status_from_server
- grpc_op_data_receive_initial_metadata receive_initial_metadata "recv_initial_metadata"
- grpc_op_data_receive_message receive_message "recv_message"
- grpc_op_data_recv_status_on_client receive_status_on_client "recv_status_on_client"
- grpc_op_data_recv_close_on_server receive_close_on_server "recv_close_on_server"
-
- ctypedef struct grpc_op:
- grpc_op_type type "op"
- uint32_t flags
+
+ ctypedef struct grpc_op_data_recv_close_on_server:
+ int *cancelled
+
+ ctypedef struct grpc_op_data_send_message:
+ grpc_byte_buffer *send_message
+
+ ctypedef struct grpc_op_data_receive_message:
+ grpc_byte_buffer **receive_message "recv_message"
+
+ ctypedef struct grpc_op_data_receive_initial_metadata:
+ grpc_metadata_array *receive_initial_metadata "recv_initial_metadata"
+
+ union grpc_op_data:
+ grpc_op_data_send_initial_metadata send_initial_metadata
+ grpc_op_data_send_message send_message
+ grpc_op_data_send_status_from_server send_status_from_server
+ grpc_op_data_receive_initial_metadata receive_initial_metadata "recv_initial_metadata"
+ grpc_op_data_receive_message receive_message "recv_message"
+ grpc_op_data_recv_status_on_client receive_status_on_client "recv_status_on_client"
+ grpc_op_data_recv_close_on_server receive_close_on_server "recv_close_on_server"
+
+ ctypedef struct grpc_op:
+ grpc_op_type type "op"
+ uint32_t flags
void * reserved
- grpc_op_data data
-
+ grpc_op_data data
+
void grpc_dont_init_openssl() nogil
- void grpc_init() nogil
+ void grpc_init() nogil
void grpc_shutdown_blocking() nogil
int grpc_is_initialized() nogil
-
+
ctypedef struct grpc_completion_queue_factory:
pass
@@ -348,63 +348,63 @@ cdef extern from "grpc/grpc.h":
const grpc_completion_queue_attributes* attr, void* reserved) nogil
grpc_completion_queue *grpc_completion_queue_create_for_next(void *reserved) nogil
- grpc_event grpc_completion_queue_next(grpc_completion_queue *cq,
- gpr_timespec deadline,
- void *reserved) nogil
- grpc_event grpc_completion_queue_pluck(grpc_completion_queue *cq, void *tag,
- gpr_timespec deadline,
- void *reserved) nogil
- void grpc_completion_queue_shutdown(grpc_completion_queue *cq) nogil
- void grpc_completion_queue_destroy(grpc_completion_queue *cq) nogil
-
+ grpc_event grpc_completion_queue_next(grpc_completion_queue *cq,
+ gpr_timespec deadline,
+ void *reserved) nogil
+ grpc_event grpc_completion_queue_pluck(grpc_completion_queue *cq, void *tag,
+ gpr_timespec deadline,
+ void *reserved) nogil
+ void grpc_completion_queue_shutdown(grpc_completion_queue *cq) nogil
+ void grpc_completion_queue_destroy(grpc_completion_queue *cq) nogil
+
grpc_completion_queue *grpc_completion_queue_create_for_callback(
grpc_experimental_completion_queue_functor* shutdown_callback,
void *reserved) nogil
- grpc_call_error grpc_call_start_batch(
- grpc_call *call, const grpc_op *ops, size_t nops, void *tag,
- void *reserved) nogil
- grpc_call_error grpc_call_cancel(grpc_call *call, void *reserved) nogil
- grpc_call_error grpc_call_cancel_with_status(grpc_call *call,
- grpc_status_code status,
- const char *description,
- void *reserved) nogil
- char *grpc_call_get_peer(grpc_call *call) nogil
+ grpc_call_error grpc_call_start_batch(
+ grpc_call *call, const grpc_op *ops, size_t nops, void *tag,
+ void *reserved) nogil
+ grpc_call_error grpc_call_cancel(grpc_call *call, void *reserved) nogil
+ grpc_call_error grpc_call_cancel_with_status(grpc_call *call,
+ grpc_status_code status,
+ const char *description,
+ void *reserved) nogil
+ char *grpc_call_get_peer(grpc_call *call) nogil
void grpc_call_unref(grpc_call *call) nogil
-
- grpc_channel *grpc_insecure_channel_create(const char *target,
- const grpc_channel_args *args,
- void *reserved) nogil
- grpc_call *grpc_channel_create_call(
+
+ grpc_channel *grpc_insecure_channel_create(const char *target,
+ const grpc_channel_args *args,
+ void *reserved) nogil
+ grpc_call *grpc_channel_create_call(
grpc_channel *channel, grpc_call *parent_call, uint32_t propagation_mask,
grpc_completion_queue *completion_queue, grpc_slice method,
const grpc_slice *host, gpr_timespec deadline, void *reserved) nogil
- grpc_connectivity_state grpc_channel_check_connectivity_state(
- grpc_channel *channel, int try_to_connect) nogil
- void grpc_channel_watch_connectivity_state(
- grpc_channel *channel, grpc_connectivity_state last_observed_state,
- gpr_timespec deadline, grpc_completion_queue *cq, void *tag) nogil
- char *grpc_channel_get_target(grpc_channel *channel) nogil
- void grpc_channel_destroy(grpc_channel *channel) nogil
-
- grpc_server *grpc_server_create(
- const grpc_channel_args *args, void *reserved) nogil
- grpc_call_error grpc_server_request_call(
- grpc_server *server, grpc_call **call, grpc_call_details *details,
- grpc_metadata_array *request_metadata, grpc_completion_queue
- *cq_bound_to_call, grpc_completion_queue *cq_for_notification, void
- *tag_new) nogil
- void grpc_server_register_completion_queue(grpc_server *server,
- grpc_completion_queue *cq,
- void *reserved) nogil
- int grpc_server_add_insecure_http2_port(
- grpc_server *server, const char *addr) nogil
- void grpc_server_start(grpc_server *server) nogil
- void grpc_server_shutdown_and_notify(
- grpc_server *server, grpc_completion_queue *cq, void *tag) nogil
- void grpc_server_cancel_all_calls(grpc_server *server) nogil
- void grpc_server_destroy(grpc_server *server) nogil
-
+ grpc_connectivity_state grpc_channel_check_connectivity_state(
+ grpc_channel *channel, int try_to_connect) nogil
+ void grpc_channel_watch_connectivity_state(
+ grpc_channel *channel, grpc_connectivity_state last_observed_state,
+ gpr_timespec deadline, grpc_completion_queue *cq, void *tag) nogil
+ char *grpc_channel_get_target(grpc_channel *channel) nogil
+ void grpc_channel_destroy(grpc_channel *channel) nogil
+
+ grpc_server *grpc_server_create(
+ const grpc_channel_args *args, void *reserved) nogil
+ grpc_call_error grpc_server_request_call(
+ grpc_server *server, grpc_call **call, grpc_call_details *details,
+ grpc_metadata_array *request_metadata, grpc_completion_queue
+ *cq_bound_to_call, grpc_completion_queue *cq_for_notification, void
+ *tag_new) nogil
+ void grpc_server_register_completion_queue(grpc_server *server,
+ grpc_completion_queue *cq,
+ void *reserved) nogil
+ int grpc_server_add_insecure_http2_port(
+ grpc_server *server, const char *addr) nogil
+ void grpc_server_start(grpc_server *server) nogil
+ void grpc_server_shutdown_and_notify(
+ grpc_server *server, grpc_completion_queue *cq, void *tag) nogil
+ void grpc_server_cancel_all_calls(grpc_server *server) nogil
+ void grpc_server_destroy(grpc_server *server) nogil
+
char* grpc_channelz_get_top_channels(intptr_t start_channel_id)
char* grpc_channelz_get_servers(intptr_t start_server_id)
char* grpc_channelz_get_server(intptr_t server_id)
@@ -414,26 +414,26 @@ cdef extern from "grpc/grpc.h":
char* grpc_channelz_get_channel(intptr_t channel_id)
char* grpc_channelz_get_subchannel(intptr_t subchannel_id)
char* grpc_channelz_get_socket(intptr_t socket_id)
+
-
-cdef extern from "grpc/grpc_security.h":
-
+cdef extern from "grpc/grpc_security.h":
+
# Declare this as an enum, this is the only way to make it a const in
# cython
enum: GRPC_METADATA_CREDENTIALS_PLUGIN_SYNC_MAX
- ctypedef enum grpc_ssl_roots_override_result:
- GRPC_SSL_ROOTS_OVERRIDE_OK
- GRPC_SSL_ROOTS_OVERRIDE_FAILED_PERMANENTLY
- GRPC_SSL_ROOTS_OVERRIDE_FAILED
-
- ctypedef enum grpc_ssl_client_certificate_request_type:
- GRPC_SSL_DONT_REQUEST_CLIENT_CERTIFICATE,
- GRPC_SSL_REQUEST_CLIENT_CERTIFICATE_BUT_DONT_VERIFY
- GRPC_SSL_REQUEST_CLIENT_CERTIFICATE_AND_VERIFY
- GRPC_SSL_REQUEST_AND_REQUIRE_CLIENT_CERTIFICATE_BUT_DONT_VERIFY
- GRPC_SSL_REQUEST_AND_REQUIRE_CLIENT_CERTIFICATE_AND_VERIFY
-
+ ctypedef enum grpc_ssl_roots_override_result:
+ GRPC_SSL_ROOTS_OVERRIDE_OK
+ GRPC_SSL_ROOTS_OVERRIDE_FAILED_PERMANENTLY
+ GRPC_SSL_ROOTS_OVERRIDE_FAILED
+
+ ctypedef enum grpc_ssl_client_certificate_request_type:
+ GRPC_SSL_DONT_REQUEST_CLIENT_CERTIFICATE,
+ GRPC_SSL_REQUEST_CLIENT_CERTIFICATE_BUT_DONT_VERIFY
+ GRPC_SSL_REQUEST_CLIENT_CERTIFICATE_AND_VERIFY
+ GRPC_SSL_REQUEST_AND_REQUIRE_CLIENT_CERTIFICATE_BUT_DONT_VERIFY
+ GRPC_SSL_REQUEST_AND_REQUIRE_CLIENT_CERTIFICATE_AND_VERIFY
+
ctypedef enum grpc_security_level:
GRPC_SECURITY_MIN
GRPC_SECURITY_NONE = GRPC_SECURITY_MIN
@@ -477,18 +477,18 @@ cdef extern from "grpc/grpc_security.h":
grpc_server_credentials *grpc_ssl_server_credentials_create_with_options(
grpc_ssl_server_credentials_options *options)
- ctypedef struct grpc_ssl_pem_key_cert_pair:
- const char *private_key
- const char *certificate_chain "cert_chain"
-
- ctypedef struct grpc_channel_credentials:
- # We don't care about the internals (and in fact don't know them)
- pass
-
- ctypedef struct grpc_call_credentials:
- # We don't care about the internals (and in fact don't know them)
- pass
-
+ ctypedef struct grpc_ssl_pem_key_cert_pair:
+ const char *private_key
+ const char *certificate_chain "cert_chain"
+
+ ctypedef struct grpc_channel_credentials:
+ # We don't care about the internals (and in fact don't know them)
+ pass
+
+ ctypedef struct grpc_call_credentials:
+ # We don't care about the internals (and in fact don't know them)
+ pass
+
ctypedef struct grpc_ssl_session_cache:
# We don't care about the internals (and in fact don't know them)
pass
@@ -497,84 +497,84 @@ cdef extern from "grpc/grpc_security.h":
# We don't care about the internals (and in fact don't know them)
pass
- ctypedef void (*grpc_ssl_roots_override_callback)(char **pem_root_certs)
-
+ ctypedef void (*grpc_ssl_roots_override_callback)(char **pem_root_certs)
+
grpc_ssl_session_cache *grpc_ssl_session_cache_create_lru(size_t capacity)
void grpc_ssl_session_cache_destroy(grpc_ssl_session_cache* cache)
- void grpc_set_ssl_roots_override_callback(
- grpc_ssl_roots_override_callback cb) nogil
-
+ void grpc_set_ssl_roots_override_callback(
+ grpc_ssl_roots_override_callback cb) nogil
+
grpc_channel_credentials *grpc_google_default_credentials_create(grpc_call_credentials* call_credentials) nogil
- grpc_channel_credentials *grpc_ssl_credentials_create(
- const char *pem_root_certs, grpc_ssl_pem_key_cert_pair *pem_key_cert_pair,
+ grpc_channel_credentials *grpc_ssl_credentials_create(
+ const char *pem_root_certs, grpc_ssl_pem_key_cert_pair *pem_key_cert_pair,
verify_peer_options *verify_options, void *reserved) nogil
- grpc_channel_credentials *grpc_composite_channel_credentials_create(
- grpc_channel_credentials *creds1, grpc_call_credentials *creds2,
- void *reserved) nogil
- void grpc_channel_credentials_release(grpc_channel_credentials *creds) nogil
-
- grpc_call_credentials *grpc_composite_call_credentials_create(
- grpc_call_credentials *creds1, grpc_call_credentials *creds2,
- void *reserved) nogil
- grpc_call_credentials *grpc_google_compute_engine_credentials_create(
- void *reserved) nogil
- grpc_call_credentials *grpc_service_account_jwt_access_credentials_create(
- const char *json_key,
- gpr_timespec token_lifetime, void *reserved) nogil
- grpc_call_credentials *grpc_google_refresh_token_credentials_create(
- const char *json_refresh_token, void *reserved) nogil
- grpc_call_credentials *grpc_google_iam_credentials_create(
- const char *authorization_token, const char *authority_selector,
- void *reserved) nogil
- void grpc_call_credentials_release(grpc_call_credentials *creds) nogil
-
- grpc_channel *grpc_secure_channel_create(
- grpc_channel_credentials *creds, const char *target,
- const grpc_channel_args *args, void *reserved) nogil
-
- ctypedef struct grpc_server_credentials:
- # We don't care about the internals (and in fact don't know them)
- pass
-
- void grpc_server_credentials_release(grpc_server_credentials *creds) nogil
-
- int grpc_server_add_secure_http2_port(grpc_server *server, const char *addr,
- grpc_server_credentials *creds) nogil
-
- grpc_call_error grpc_call_set_credentials(grpc_call *call,
- grpc_call_credentials *creds) nogil
-
- ctypedef struct grpc_auth_context:
- # We don't care about the internals (and in fact don't know them)
- pass
-
- ctypedef struct grpc_auth_metadata_context:
- const char *service_url
- const char *method_name
- const grpc_auth_context *channel_auth_context
-
- ctypedef void (*grpc_credentials_plugin_metadata_cb)(
- void *user_data, const grpc_metadata *creds_md, size_t num_creds_md,
- grpc_status_code status, const char *error_details)
-
- ctypedef struct grpc_metadata_credentials_plugin:
+ grpc_channel_credentials *grpc_composite_channel_credentials_create(
+ grpc_channel_credentials *creds1, grpc_call_credentials *creds2,
+ void *reserved) nogil
+ void grpc_channel_credentials_release(grpc_channel_credentials *creds) nogil
+
+ grpc_call_credentials *grpc_composite_call_credentials_create(
+ grpc_call_credentials *creds1, grpc_call_credentials *creds2,
+ void *reserved) nogil
+ grpc_call_credentials *grpc_google_compute_engine_credentials_create(
+ void *reserved) nogil
+ grpc_call_credentials *grpc_service_account_jwt_access_credentials_create(
+ const char *json_key,
+ gpr_timespec token_lifetime, void *reserved) nogil
+ grpc_call_credentials *grpc_google_refresh_token_credentials_create(
+ const char *json_refresh_token, void *reserved) nogil
+ grpc_call_credentials *grpc_google_iam_credentials_create(
+ const char *authorization_token, const char *authority_selector,
+ void *reserved) nogil
+ void grpc_call_credentials_release(grpc_call_credentials *creds) nogil
+
+ grpc_channel *grpc_secure_channel_create(
+ grpc_channel_credentials *creds, const char *target,
+ const grpc_channel_args *args, void *reserved) nogil
+
+ ctypedef struct grpc_server_credentials:
+ # We don't care about the internals (and in fact don't know them)
+ pass
+
+ void grpc_server_credentials_release(grpc_server_credentials *creds) nogil
+
+ int grpc_server_add_secure_http2_port(grpc_server *server, const char *addr,
+ grpc_server_credentials *creds) nogil
+
+ grpc_call_error grpc_call_set_credentials(grpc_call *call,
+ grpc_call_credentials *creds) nogil
+
+ ctypedef struct grpc_auth_context:
+ # We don't care about the internals (and in fact don't know them)
+ pass
+
+ ctypedef struct grpc_auth_metadata_context:
+ const char *service_url
+ const char *method_name
+ const grpc_auth_context *channel_auth_context
+
+ ctypedef void (*grpc_credentials_plugin_metadata_cb)(
+ void *user_data, const grpc_metadata *creds_md, size_t num_creds_md,
+ grpc_status_code status, const char *error_details)
+
+ ctypedef struct grpc_metadata_credentials_plugin:
int (*get_metadata)(
- void *state, grpc_auth_metadata_context context,
+ void *state, grpc_auth_metadata_context context,
grpc_credentials_plugin_metadata_cb cb, void *user_data,
grpc_metadata creds_md[GRPC_METADATA_CREDENTIALS_PLUGIN_SYNC_MAX],
size_t *num_creds_md, grpc_status_code *status,
const char **error_details) except *
void (*destroy)(void *state) except *
- void *state
- const char *type
-
- grpc_call_credentials *grpc_metadata_credentials_create_from_plugin(
+ void *state
+ const char *type
+
+ grpc_call_credentials *grpc_metadata_credentials_create_from_plugin(
grpc_metadata_credentials_plugin plugin, grpc_security_level min_security_level, void *reserved) nogil
-
+
ctypedef struct grpc_auth_property_iterator:
pass
-
+
ctypedef struct grpc_auth_property:
char *name
char *value
@@ -623,41 +623,41 @@ cdef extern from "grpc/grpc_security.h":
-cdef extern from "grpc/compression.h":
-
- ctypedef enum grpc_compression_algorithm:
- GRPC_COMPRESS_NONE
- GRPC_COMPRESS_DEFLATE
- GRPC_COMPRESS_GZIP
+cdef extern from "grpc/compression.h":
+
+ ctypedef enum grpc_compression_algorithm:
+ GRPC_COMPRESS_NONE
+ GRPC_COMPRESS_DEFLATE
+ GRPC_COMPRESS_GZIP
GRPC_COMPRESS_STREAM_GZIP
- GRPC_COMPRESS_ALGORITHMS_COUNT
-
- ctypedef enum grpc_compression_level:
- GRPC_COMPRESS_LEVEL_NONE
- GRPC_COMPRESS_LEVEL_LOW
- GRPC_COMPRESS_LEVEL_MED
- GRPC_COMPRESS_LEVEL_HIGH
- GRPC_COMPRESS_LEVEL_COUNT
-
- ctypedef struct grpc_compression_options:
- uint32_t enabled_algorithms_bitset
-
- int grpc_compression_algorithm_parse(
+ GRPC_COMPRESS_ALGORITHMS_COUNT
+
+ ctypedef enum grpc_compression_level:
+ GRPC_COMPRESS_LEVEL_NONE
+ GRPC_COMPRESS_LEVEL_LOW
+ GRPC_COMPRESS_LEVEL_MED
+ GRPC_COMPRESS_LEVEL_HIGH
+ GRPC_COMPRESS_LEVEL_COUNT
+
+ ctypedef struct grpc_compression_options:
+ uint32_t enabled_algorithms_bitset
+
+ int grpc_compression_algorithm_parse(
grpc_slice value, grpc_compression_algorithm *algorithm) nogil
- int grpc_compression_algorithm_name(grpc_compression_algorithm algorithm,
+ int grpc_compression_algorithm_name(grpc_compression_algorithm algorithm,
const char **name) nogil
- grpc_compression_algorithm grpc_compression_algorithm_for_level(
- grpc_compression_level level, uint32_t accepted_encodings) nogil
- void grpc_compression_options_init(grpc_compression_options *opts) nogil
- void grpc_compression_options_enable_algorithm(
- grpc_compression_options *opts,
- grpc_compression_algorithm algorithm) nogil
- void grpc_compression_options_disable_algorithm(
- grpc_compression_options *opts,
- grpc_compression_algorithm algorithm) nogil
- int grpc_compression_options_is_algorithm_enabled(
- const grpc_compression_options *opts,
- grpc_compression_algorithm algorithm) nogil
+ grpc_compression_algorithm grpc_compression_algorithm_for_level(
+ grpc_compression_level level, uint32_t accepted_encodings) nogil
+ void grpc_compression_options_init(grpc_compression_options *opts) nogil
+ void grpc_compression_options_enable_algorithm(
+ grpc_compression_options *opts,
+ grpc_compression_algorithm algorithm) nogil
+ void grpc_compression_options_disable_algorithm(
+ grpc_compression_options *opts,
+ grpc_compression_algorithm algorithm) nogil
+ int grpc_compression_options_is_algorithm_enabled(
+ const grpc_compression_options *opts,
+ grpc_compression_algorithm algorithm) nogil
cdef extern from "grpc/impl/codegen/compression_types.h":
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/grpc_string.pyx.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/grpc_string.pyx.pxi
index 5c1e0679a9..a3b20f472c 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/grpc_string.pyx.pxi
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/grpc_string.pyx.pxi
@@ -1,27 +1,27 @@
# Copyright 2016 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
-
+
+
# This function will ascii encode unicode string inputs if necessary.
-# In Python3, unicode strings are the default str type.
-cdef bytes str_to_bytes(object s):
- if s is None or isinstance(s, bytes):
- return s
- elif isinstance(s, unicode):
- return s.encode('ascii')
- else:
- raise TypeError('Expected bytes, str, or unicode, not {}'.format(type(s)))
+# In Python3, unicode strings are the default str type.
+cdef bytes str_to_bytes(object s):
+ if s is None or isinstance(s, bytes):
+ return s
+ elif isinstance(s, unicode):
+ return s.encode('ascii')
+ else:
+ raise TypeError('Expected bytes, str, or unicode, not {}'.format(type(s)))
# TODO(https://github.com/grpc/grpc/issues/13782): It would be nice for us if
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/records.pxd.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/records.pxd.pxi
index 35e1bdb0ae..1660c9c2b8 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/records.pxd.pxi
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/records.pxd.pxi
@@ -1,34 +1,34 @@
# Copyright 2015 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
-
+
+
cdef bytes _slice_bytes(grpc_slice slice)
cdef grpc_slice _copy_slice(grpc_slice slice) nogil
cdef grpc_slice _slice_from_bytes(bytes value) nogil
-
-
-cdef class CallDetails:
-
- cdef grpc_call_details c_details
-
-
-cdef class SslPemKeyCertPair:
-
- cdef grpc_ssl_pem_key_cert_pair c_pair
- cdef readonly object private_key, certificate_chain
-
-
-cdef class CompressionOptions:
-
- cdef grpc_compression_options c_options
+
+
+cdef class CallDetails:
+
+ cdef grpc_call_details c_details
+
+
+cdef class SslPemKeyCertPair:
+
+ cdef grpc_ssl_pem_key_cert_pair c_pair
+ cdef readonly object private_key, certificate_chain
+
+
+cdef class CompressionOptions:
+
+ cdef grpc_compression_options c_options
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/records.pyx.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/records.pyx.pxi
index 308d677695..fc08110851 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/records.pyx.pxi
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/records.pyx.pxi
@@ -1,18 +1,18 @@
# Copyright 2015 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
-
+
+
cdef bytes _slice_bytes(grpc_slice slice):
cdef void *start = grpc_slice_start_ptr(slice)
cdef size_t length = grpc_slice_length(slice)
@@ -32,166 +32,166 @@ cdef grpc_slice _slice_from_bytes(bytes value) nogil:
return grpc_slice_from_copied_buffer(value_ptr, length)
-class ConnectivityState:
- idle = GRPC_CHANNEL_IDLE
- connecting = GRPC_CHANNEL_CONNECTING
- ready = GRPC_CHANNEL_READY
- transient_failure = GRPC_CHANNEL_TRANSIENT_FAILURE
- shutdown = GRPC_CHANNEL_SHUTDOWN
-
-
-class ChannelArgKey:
- enable_census = GRPC_ARG_ENABLE_CENSUS
- max_concurrent_streams = GRPC_ARG_MAX_CONCURRENT_STREAMS
- max_receive_message_length = GRPC_ARG_MAX_RECEIVE_MESSAGE_LENGTH
- max_send_message_length = GRPC_ARG_MAX_SEND_MESSAGE_LENGTH
- http2_initial_sequence_number = GRPC_ARG_HTTP2_INITIAL_SEQUENCE_NUMBER
- default_authority = GRPC_ARG_DEFAULT_AUTHORITY
- primary_user_agent_string = GRPC_ARG_PRIMARY_USER_AGENT_STRING
- secondary_user_agent_string = GRPC_ARG_SECONDARY_USER_AGENT_STRING
+class ConnectivityState:
+ idle = GRPC_CHANNEL_IDLE
+ connecting = GRPC_CHANNEL_CONNECTING
+ ready = GRPC_CHANNEL_READY
+ transient_failure = GRPC_CHANNEL_TRANSIENT_FAILURE
+ shutdown = GRPC_CHANNEL_SHUTDOWN
+
+
+class ChannelArgKey:
+ enable_census = GRPC_ARG_ENABLE_CENSUS
+ max_concurrent_streams = GRPC_ARG_MAX_CONCURRENT_STREAMS
+ max_receive_message_length = GRPC_ARG_MAX_RECEIVE_MESSAGE_LENGTH
+ max_send_message_length = GRPC_ARG_MAX_SEND_MESSAGE_LENGTH
+ http2_initial_sequence_number = GRPC_ARG_HTTP2_INITIAL_SEQUENCE_NUMBER
+ default_authority = GRPC_ARG_DEFAULT_AUTHORITY
+ primary_user_agent_string = GRPC_ARG_PRIMARY_USER_AGENT_STRING
+ secondary_user_agent_string = GRPC_ARG_SECONDARY_USER_AGENT_STRING
ssl_session_cache = GRPC_SSL_SESSION_CACHE_ARG
- ssl_target_name_override = GRPC_SSL_TARGET_NAME_OVERRIDE_ARG
-
-
-class WriteFlag:
- buffer_hint = GRPC_WRITE_BUFFER_HINT
- no_compress = GRPC_WRITE_NO_COMPRESS
-
-
-class StatusCode:
- ok = GRPC_STATUS_OK
- cancelled = GRPC_STATUS_CANCELLED
- unknown = GRPC_STATUS_UNKNOWN
- invalid_argument = GRPC_STATUS_INVALID_ARGUMENT
- deadline_exceeded = GRPC_STATUS_DEADLINE_EXCEEDED
- not_found = GRPC_STATUS_NOT_FOUND
- already_exists = GRPC_STATUS_ALREADY_EXISTS
- permission_denied = GRPC_STATUS_PERMISSION_DENIED
- unauthenticated = GRPC_STATUS_UNAUTHENTICATED
- resource_exhausted = GRPC_STATUS_RESOURCE_EXHAUSTED
- failed_precondition = GRPC_STATUS_FAILED_PRECONDITION
- aborted = GRPC_STATUS_ABORTED
- out_of_range = GRPC_STATUS_OUT_OF_RANGE
- unimplemented = GRPC_STATUS_UNIMPLEMENTED
- internal = GRPC_STATUS_INTERNAL
- unavailable = GRPC_STATUS_UNAVAILABLE
- data_loss = GRPC_STATUS_DATA_LOSS
-
-
-class CallError:
- ok = GRPC_CALL_OK
- error = GRPC_CALL_ERROR
- not_on_server = GRPC_CALL_ERROR_NOT_ON_SERVER
- not_on_client = GRPC_CALL_ERROR_NOT_ON_CLIENT
- already_accepted = GRPC_CALL_ERROR_ALREADY_ACCEPTED
- already_invoked = GRPC_CALL_ERROR_ALREADY_INVOKED
- not_invoked = GRPC_CALL_ERROR_NOT_INVOKED
- already_finished = GRPC_CALL_ERROR_ALREADY_FINISHED
- too_many_operations = GRPC_CALL_ERROR_TOO_MANY_OPERATIONS
- invalid_flags = GRPC_CALL_ERROR_INVALID_FLAGS
- invalid_metadata = GRPC_CALL_ERROR_INVALID_METADATA
-
-
-class CompletionType:
- queue_shutdown = GRPC_QUEUE_SHUTDOWN
- queue_timeout = GRPC_QUEUE_TIMEOUT
- operation_complete = GRPC_OP_COMPLETE
-
-
-class OperationType:
- send_initial_metadata = GRPC_OP_SEND_INITIAL_METADATA
- send_message = GRPC_OP_SEND_MESSAGE
- send_close_from_client = GRPC_OP_SEND_CLOSE_FROM_CLIENT
- send_status_from_server = GRPC_OP_SEND_STATUS_FROM_SERVER
- receive_initial_metadata = GRPC_OP_RECV_INITIAL_METADATA
- receive_message = GRPC_OP_RECV_MESSAGE
- receive_status_on_client = GRPC_OP_RECV_STATUS_ON_CLIENT
- receive_close_on_server = GRPC_OP_RECV_CLOSE_ON_SERVER
-
+ ssl_target_name_override = GRPC_SSL_TARGET_NAME_OVERRIDE_ARG
+
+
+class WriteFlag:
+ buffer_hint = GRPC_WRITE_BUFFER_HINT
+ no_compress = GRPC_WRITE_NO_COMPRESS
+
+
+class StatusCode:
+ ok = GRPC_STATUS_OK
+ cancelled = GRPC_STATUS_CANCELLED
+ unknown = GRPC_STATUS_UNKNOWN
+ invalid_argument = GRPC_STATUS_INVALID_ARGUMENT
+ deadline_exceeded = GRPC_STATUS_DEADLINE_EXCEEDED
+ not_found = GRPC_STATUS_NOT_FOUND
+ already_exists = GRPC_STATUS_ALREADY_EXISTS
+ permission_denied = GRPC_STATUS_PERMISSION_DENIED
+ unauthenticated = GRPC_STATUS_UNAUTHENTICATED
+ resource_exhausted = GRPC_STATUS_RESOURCE_EXHAUSTED
+ failed_precondition = GRPC_STATUS_FAILED_PRECONDITION
+ aborted = GRPC_STATUS_ABORTED
+ out_of_range = GRPC_STATUS_OUT_OF_RANGE
+ unimplemented = GRPC_STATUS_UNIMPLEMENTED
+ internal = GRPC_STATUS_INTERNAL
+ unavailable = GRPC_STATUS_UNAVAILABLE
+ data_loss = GRPC_STATUS_DATA_LOSS
+
+
+class CallError:
+ ok = GRPC_CALL_OK
+ error = GRPC_CALL_ERROR
+ not_on_server = GRPC_CALL_ERROR_NOT_ON_SERVER
+ not_on_client = GRPC_CALL_ERROR_NOT_ON_CLIENT
+ already_accepted = GRPC_CALL_ERROR_ALREADY_ACCEPTED
+ already_invoked = GRPC_CALL_ERROR_ALREADY_INVOKED
+ not_invoked = GRPC_CALL_ERROR_NOT_INVOKED
+ already_finished = GRPC_CALL_ERROR_ALREADY_FINISHED
+ too_many_operations = GRPC_CALL_ERROR_TOO_MANY_OPERATIONS
+ invalid_flags = GRPC_CALL_ERROR_INVALID_FLAGS
+ invalid_metadata = GRPC_CALL_ERROR_INVALID_METADATA
+
+
+class CompletionType:
+ queue_shutdown = GRPC_QUEUE_SHUTDOWN
+ queue_timeout = GRPC_QUEUE_TIMEOUT
+ operation_complete = GRPC_OP_COMPLETE
+
+
+class OperationType:
+ send_initial_metadata = GRPC_OP_SEND_INITIAL_METADATA
+ send_message = GRPC_OP_SEND_MESSAGE
+ send_close_from_client = GRPC_OP_SEND_CLOSE_FROM_CLIENT
+ send_status_from_server = GRPC_OP_SEND_STATUS_FROM_SERVER
+ receive_initial_metadata = GRPC_OP_RECV_INITIAL_METADATA
+ receive_message = GRPC_OP_RECV_MESSAGE
+ receive_status_on_client = GRPC_OP_RECV_STATUS_ON_CLIENT
+ receive_close_on_server = GRPC_OP_RECV_CLOSE_ON_SERVER
+
GRPC_COMPRESSION_CHANNEL_DEFAULT_ALGORITHM= (
_GRPC_COMPRESSION_CHANNEL_DEFAULT_ALGORITHM)
-
+
GRPC_COMPRESSION_REQUEST_ALGORITHM_MD_KEY = (
_GRPC_COMPRESSION_REQUEST_ALGORITHM_MD_KEY)
-class CompressionAlgorithm:
- none = GRPC_COMPRESS_NONE
- deflate = GRPC_COMPRESS_DEFLATE
- gzip = GRPC_COMPRESS_GZIP
-
-
-class CompressionLevel:
- none = GRPC_COMPRESS_LEVEL_NONE
- low = GRPC_COMPRESS_LEVEL_LOW
- medium = GRPC_COMPRESS_LEVEL_MED
- high = GRPC_COMPRESS_LEVEL_HIGH
-
-
-cdef class CallDetails:
-
- def __cinit__(self):
+class CompressionAlgorithm:
+ none = GRPC_COMPRESS_NONE
+ deflate = GRPC_COMPRESS_DEFLATE
+ gzip = GRPC_COMPRESS_GZIP
+
+
+class CompressionLevel:
+ none = GRPC_COMPRESS_LEVEL_NONE
+ low = GRPC_COMPRESS_LEVEL_LOW
+ medium = GRPC_COMPRESS_LEVEL_MED
+ high = GRPC_COMPRESS_LEVEL_HIGH
+
+
+cdef class CallDetails:
+
+ def __cinit__(self):
fork_handlers_and_grpc_init()
- with nogil:
- grpc_call_details_init(&self.c_details)
-
- def __dealloc__(self):
- with nogil:
- grpc_call_details_destroy(&self.c_details)
+ with nogil:
+ grpc_call_details_init(&self.c_details)
+
+ def __dealloc__(self):
+ with nogil:
+ grpc_call_details_destroy(&self.c_details)
grpc_shutdown_blocking()
-
- @property
- def method(self):
+
+ @property
+ def method(self):
return _slice_bytes(self.c_details.method)
-
- @property
- def host(self):
+
+ @property
+ def host(self):
return _slice_bytes(self.c_details.host)
-
- @property
- def deadline(self):
+
+ @property
+ def deadline(self):
return _time_from_timespec(self.c_details.deadline)
-
-
-cdef class SslPemKeyCertPair:
-
- def __cinit__(self, bytes private_key, bytes certificate_chain):
- self.private_key = private_key
- self.certificate_chain = certificate_chain
- self.c_pair.private_key = self.private_key
- self.c_pair.certificate_chain = self.certificate_chain
-
-
-cdef class CompressionOptions:
-
- def __cinit__(self):
- with nogil:
- grpc_compression_options_init(&self.c_options)
-
- def enable_algorithm(self, grpc_compression_algorithm algorithm):
- with nogil:
- grpc_compression_options_enable_algorithm(&self.c_options, algorithm)
-
- def disable_algorithm(self, grpc_compression_algorithm algorithm):
- with nogil:
- grpc_compression_options_disable_algorithm(&self.c_options, algorithm)
-
- def is_algorithm_enabled(self, grpc_compression_algorithm algorithm):
- cdef int result
- with nogil:
- result = grpc_compression_options_is_algorithm_enabled(
- &self.c_options, algorithm)
- return result
-
- def to_channel_arg(self):
+
+
+cdef class SslPemKeyCertPair:
+
+ def __cinit__(self, bytes private_key, bytes certificate_chain):
+ self.private_key = private_key
+ self.certificate_chain = certificate_chain
+ self.c_pair.private_key = self.private_key
+ self.c_pair.certificate_chain = self.certificate_chain
+
+
+cdef class CompressionOptions:
+
+ def __cinit__(self):
+ with nogil:
+ grpc_compression_options_init(&self.c_options)
+
+ def enable_algorithm(self, grpc_compression_algorithm algorithm):
+ with nogil:
+ grpc_compression_options_enable_algorithm(&self.c_options, algorithm)
+
+ def disable_algorithm(self, grpc_compression_algorithm algorithm):
+ with nogil:
+ grpc_compression_options_disable_algorithm(&self.c_options, algorithm)
+
+ def is_algorithm_enabled(self, grpc_compression_algorithm algorithm):
+ cdef int result
+ with nogil:
+ result = grpc_compression_options_is_algorithm_enabled(
+ &self.c_options, algorithm)
+ return result
+
+ def to_channel_arg(self):
return (
GRPC_COMPRESSION_CHANNEL_ENABLED_ALGORITHMS_BITSET,
self.c_options.enabled_algorithms_bitset,
)
-
-
-def compression_algorithm_name(grpc_compression_algorithm algorithm):
+
+
+def compression_algorithm_name(grpc_compression_algorithm algorithm):
cdef const char* name
- with nogil:
- grpc_compression_algorithm_name(algorithm, &name)
- # Let Cython do the right thing with string casting
- return name
+ with nogil:
+ grpc_compression_algorithm_name(algorithm, &name)
+ # Let Cython do the right thing with string casting
+ return name
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/security.pxd.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/security.pxd.pxi
index e6e79536bb..6afe747833 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/security.pxd.pxi
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/security.pxd.pxi
@@ -1,17 +1,17 @@
# Copyright 2016 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
-
-cdef grpc_ssl_roots_override_result ssl_roots_override_callback(
+
+
+cdef grpc_ssl_roots_override_result ssl_roots_override_callback(
char **pem_root_certs) nogil
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/security.pyx.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/security.pyx.pxi
index 9cc3fd5a21..c11b9ab165 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/security.pyx.pxi
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/security.pyx.pxi
@@ -1,20 +1,20 @@
# Copyright 2016 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
-from libc.string cimport memcpy
-
-cdef grpc_ssl_roots_override_result ssl_roots_override_callback(
+
+from libc.string cimport memcpy
+
+cdef grpc_ssl_roots_override_result ssl_roots_override_callback(
char **pem_root_certs) nogil:
with gil:
temporary_pem_root_certs = ''
@@ -24,7 +24,7 @@ cdef grpc_ssl_roots_override_result ssl_roots_override_callback(
len(temporary_pem_root_certs))
pem_root_certs[0][len(temporary_pem_root_certs)] = '\0'
- return GRPC_SSL_ROOTS_OVERRIDE_OK
+ return GRPC_SSL_ROOTS_OVERRIDE_OK
def peer_identities(Call call):
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/server.pxd.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/server.pxd.pxi
index b89ed99d97..ccdc14bf2c 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/server.pxd.pxi
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/server.pxd.pxi
@@ -1,29 +1,29 @@
# Copyright 2015 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+
+cdef class Server:
+
+ cdef grpc_server *c_server
-cdef class Server:
-
- cdef grpc_server *c_server
-
- cdef bint is_started # start has been called
- cdef bint is_shutting_down # shutdown has been called
- cdef bint is_shutdown # notification of complete shutdown received
- # used at dealloc when user forgets to shutdown
- cdef CompletionQueue backup_shutdown_queue
+ cdef bint is_started # start has been called
+ cdef bint is_shutting_down # shutdown has been called
+ cdef bint is_shutdown # notification of complete shutdown received
+ # used at dealloc when user forgets to shutdown
+ cdef CompletionQueue backup_shutdown_queue
# TODO(https://github.com/grpc/grpc/issues/15662): Elide this.
- cdef list references
- cdef list registered_completion_queues
-
- cdef _c_shutdown(self, CompletionQueue queue, tag)
- cdef notify_shutdown_complete(self)
+ cdef list references
+ cdef list registered_completion_queues
+
+ cdef _c_shutdown(self, CompletionQueue queue, tag)
+ cdef notify_shutdown_complete(self)
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/server.pyx.pxi b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/server.pyx.pxi
index eff95c4f29..adfc6ac088 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/server.pyx.pxi
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/_cygrpc/server.pyx.pxi
@@ -1,39 +1,39 @@
# Copyright 2015 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
-
-cdef class Server:
-
+
+
+cdef class Server:
+
def __cinit__(self, object arguments):
fork_handlers_and_grpc_init()
- self.references = []
- self.registered_completion_queues = []
- self.is_started = False
- self.is_shutting_down = False
- self.is_shutdown = False
+ self.references = []
+ self.registered_completion_queues = []
+ self.is_started = False
+ self.is_shutting_down = False
+ self.is_shutdown = False
self.c_server = NULL
cdef _ChannelArgs channel_args = _ChannelArgs(arguments)
self.c_server = grpc_server_create(channel_args.c_args(), NULL)
self.references.append(arguments)
-
- def request_call(
- self, CompletionQueue call_queue not None,
- CompletionQueue server_queue not None, tag):
- if not self.is_started or self.is_shutting_down:
- raise ValueError("server must be started and not shutting down")
- if server_queue not in self.registered_completion_queues:
- raise ValueError("server_queue must be a registered completion queue")
+
+ def request_call(
+ self, CompletionQueue call_queue not None,
+ CompletionQueue server_queue not None, tag):
+ if not self.is_started or self.is_shutting_down:
+ raise ValueError("server must be started and not shutting down")
+ if server_queue not in self.registered_completion_queues:
+ raise ValueError("server_queue must be a registered completion queue")
cdef _RequestCallTag request_call_tag = _RequestCallTag(tag)
request_call_tag.prepare()
cpython.Py_INCREF(request_call_tag)
@@ -43,16 +43,16 @@ cdef class Server:
&request_call_tag.c_invocation_metadata,
call_queue.c_completion_queue, server_queue.c_completion_queue,
<cpython.PyObject *>request_call_tag)
-
- def register_completion_queue(
- self, CompletionQueue queue not None):
- if self.is_started:
- raise ValueError("cannot register completion queues after start")
- with nogil:
- grpc_server_register_completion_queue(
- self.c_server, queue.c_completion_queue, NULL)
- self.registered_completion_queues.append(queue)
-
+
+ def register_completion_queue(
+ self, CompletionQueue queue not None):
+ if self.is_started:
+ raise ValueError("cannot register completion queues after start")
+ with nogil:
+ grpc_server_register_completion_queue(
+ self.c_server, queue.c_completion_queue, NULL)
+ self.registered_completion_queues.append(queue)
+
def start(self, backup_queue=True):
"""Start the Cython gRPC Server.
@@ -61,80 +61,80 @@ cdef class Server:
queue. In the case that no CQ is bound to the server, and the shutdown
of server becomes un-observable.
"""
- if self.is_started:
- raise ValueError("the server has already started")
+ if self.is_started:
+ raise ValueError("the server has already started")
if backup_queue:
self.backup_shutdown_queue = CompletionQueue(shutdown_cq=True)
self.register_completion_queue(self.backup_shutdown_queue)
- self.is_started = True
- with nogil:
- grpc_server_start(self.c_server)
+ self.is_started = True
+ with nogil:
+ grpc_server_start(self.c_server)
if backup_queue:
# Ensure the core has gotten a chance to do the start-up work
self.backup_shutdown_queue.poll(deadline=time.time())
-
- def add_http2_port(self, bytes address,
- ServerCredentials server_credentials=None):
- address = str_to_bytes(address)
- self.references.append(address)
- cdef int result
- cdef char *address_c_string = address
- if server_credentials is not None:
- self.references.append(server_credentials)
- with nogil:
- result = grpc_server_add_secure_http2_port(
- self.c_server, address_c_string, server_credentials.c_credentials)
- else:
- with nogil:
- result = grpc_server_add_insecure_http2_port(self.c_server,
- address_c_string)
- return result
-
- cdef _c_shutdown(self, CompletionQueue queue, tag):
- self.is_shutting_down = True
+
+ def add_http2_port(self, bytes address,
+ ServerCredentials server_credentials=None):
+ address = str_to_bytes(address)
+ self.references.append(address)
+ cdef int result
+ cdef char *address_c_string = address
+ if server_credentials is not None:
+ self.references.append(server_credentials)
+ with nogil:
+ result = grpc_server_add_secure_http2_port(
+ self.c_server, address_c_string, server_credentials.c_credentials)
+ else:
+ with nogil:
+ result = grpc_server_add_insecure_http2_port(self.c_server,
+ address_c_string)
+ return result
+
+ cdef _c_shutdown(self, CompletionQueue queue, tag):
+ self.is_shutting_down = True
cdef _ServerShutdownTag server_shutdown_tag = _ServerShutdownTag(tag, self)
cpython.Py_INCREF(server_shutdown_tag)
- with nogil:
- grpc_server_shutdown_and_notify(
- self.c_server, queue.c_completion_queue,
+ with nogil:
+ grpc_server_shutdown_and_notify(
+ self.c_server, queue.c_completion_queue,
<cpython.PyObject *>server_shutdown_tag)
-
- def shutdown(self, CompletionQueue queue not None, tag):
- if queue.is_shutting_down:
- raise ValueError("queue must be live")
- elif not self.is_started:
- raise ValueError("the server hasn't started yet")
- elif self.is_shutting_down:
- return
- elif queue not in self.registered_completion_queues:
- raise ValueError("expected registered completion queue")
- else:
- self._c_shutdown(queue, tag)
-
- cdef notify_shutdown_complete(self):
+
+ def shutdown(self, CompletionQueue queue not None, tag):
+ if queue.is_shutting_down:
+ raise ValueError("queue must be live")
+ elif not self.is_started:
+ raise ValueError("the server hasn't started yet")
+ elif self.is_shutting_down:
+ return
+ elif queue not in self.registered_completion_queues:
+ raise ValueError("expected registered completion queue")
+ else:
+ self._c_shutdown(queue, tag)
+
+ cdef notify_shutdown_complete(self):
# called only after our server shutdown tag has emerged from a completion
# queue.
- self.is_shutdown = True
-
- def cancel_all_calls(self):
- if not self.is_shutting_down:
+ self.is_shutdown = True
+
+ def cancel_all_calls(self):
+ if not self.is_shutting_down:
raise UsageError("the server must be shutting down to cancel all calls")
- elif self.is_shutdown:
- return
- else:
- with nogil:
- grpc_server_cancel_all_calls(self.c_server)
-
+ elif self.is_shutdown:
+ return
+ else:
+ with nogil:
+ grpc_server_cancel_all_calls(self.c_server)
+
# TODO(https://github.com/grpc/grpc/issues/17515) Determine what, if any,
# portion of this is safe to call from __dealloc__, and potentially remove
# backup_shutdown_queue.
def destroy(self):
- if self.c_server != NULL:
- if not self.is_started:
- pass
- elif self.is_shutdown:
- pass
- elif not self.is_shutting_down:
+ if self.c_server != NULL:
+ if not self.is_started:
+ pass
+ elif self.is_shutdown:
+ pass
+ elif not self.is_shutting_down:
if self.backup_shutdown_queue is None:
raise InternalError('Server shutdown failed: no completion queue.')
else:
@@ -143,11 +143,11 @@ cdef class Server:
# and now we wait
while not self.is_shutdown:
self.backup_shutdown_queue.poll()
- else:
- # We're in the process of shutting down, but have not shutdown; can't do
- # much but repeatedly release the GIL and wait
- while not self.is_shutdown:
- time.sleep(0)
+ else:
+ # We're in the process of shutting down, but have not shutdown; can't do
+ # much but repeatedly release the GIL and wait
+ while not self.is_shutdown:
+ time.sleep(0)
with nogil:
grpc_server_destroy(self.c_server)
self.c_server = NULL
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/cygrpc.pxd b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/cygrpc.pxd
index 166be37022..4aa3c32b80 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/cygrpc.pxd
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/cygrpc.pxd
@@ -1,34 +1,34 @@
# Copyright 2015 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# distutils: language=c++
-
+
cimport cpython
-include "_cygrpc/grpc.pxi"
-
+include "_cygrpc/grpc.pxi"
+
include "_cygrpc/arguments.pxd.pxi"
-include "_cygrpc/call.pxd.pxi"
-include "_cygrpc/channel.pxd.pxi"
-include "_cygrpc/credentials.pxd.pxi"
-include "_cygrpc/completion_queue.pxd.pxi"
+include "_cygrpc/call.pxd.pxi"
+include "_cygrpc/channel.pxd.pxi"
+include "_cygrpc/credentials.pxd.pxi"
+include "_cygrpc/completion_queue.pxd.pxi"
include "_cygrpc/event.pxd.pxi"
include "_cygrpc/metadata.pxd.pxi"
include "_cygrpc/operation.pxd.pxi"
include "_cygrpc/propagation_bits.pxd.pxi"
-include "_cygrpc/records.pxd.pxi"
-include "_cygrpc/security.pxd.pxi"
-include "_cygrpc/server.pxd.pxi"
+include "_cygrpc/records.pxd.pxi"
+include "_cygrpc/security.pxd.pxi"
+include "_cygrpc/server.pxd.pxi"
include "_cygrpc/tag.pxd.pxi"
include "_cygrpc/time.pxd.pxi"
include "_cygrpc/vtable.pxd.pxi"
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/cygrpc.pyx b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/cygrpc.pyx
index 8d355c6fbb..ecbba24cbf 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/_cython/cygrpc.pyx
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/_cython/cygrpc.pyx
@@ -1,23 +1,23 @@
# Copyright 2015 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# distutils: language=c++
-
-cimport cpython
-
+
+cimport cpython
+
import logging
import os
-import sys
+import sys
import threading
import time
@@ -30,31 +30,31 @@ except ImportError:
# distributed without breaking none compatible Python versions. For now, if
# Asyncio package is not available we just skip it.
pass
-
+
# The only copy of Python logger for the Cython extension
_LOGGER = logging.getLogger(__name__)
-# TODO(atash): figure out why the coverage tool gets confused about the Cython
-# coverage plugin when the following files don't have a '.pxi' suffix.
-include "_cygrpc/grpc_string.pyx.pxi"
+# TODO(atash): figure out why the coverage tool gets confused about the Cython
+# coverage plugin when the following files don't have a '.pxi' suffix.
+include "_cygrpc/grpc_string.pyx.pxi"
include "_cygrpc/arguments.pyx.pxi"
-include "_cygrpc/call.pyx.pxi"
-include "_cygrpc/channel.pyx.pxi"
+include "_cygrpc/call.pyx.pxi"
+include "_cygrpc/channel.pyx.pxi"
include "_cygrpc/channelz.pyx.pxi"
-include "_cygrpc/credentials.pyx.pxi"
-include "_cygrpc/completion_queue.pyx.pxi"
+include "_cygrpc/credentials.pyx.pxi"
+include "_cygrpc/completion_queue.pyx.pxi"
include "_cygrpc/event.pyx.pxi"
include "_cygrpc/metadata.pyx.pxi"
include "_cygrpc/operation.pyx.pxi"
include "_cygrpc/propagation_bits.pyx.pxi"
-include "_cygrpc/records.pyx.pxi"
-include "_cygrpc/security.pyx.pxi"
-include "_cygrpc/server.pyx.pxi"
+include "_cygrpc/records.pyx.pxi"
+include "_cygrpc/security.pyx.pxi"
+include "_cygrpc/server.pyx.pxi"
include "_cygrpc/tag.pyx.pxi"
include "_cygrpc/time.pyx.pxi"
include "_cygrpc/vtable.pyx.pxi"
include "_cygrpc/_hooks.pyx.pxi"
-
+
include "_cygrpc/iomgr.pyx.pxi"
include "_cygrpc/grpc_gevent.pyx.pxi"
@@ -81,13 +81,13 @@ include "_cygrpc/aio/channel.pyx.pxi"
include "_cygrpc/aio/server.pyx.pxi"
-#
-# initialize gRPC
-#
-cdef extern from "Python.h":
-
+#
+# initialize gRPC
+#
+cdef extern from "Python.h":
+
int PyEval_InitThreads()
-
+
cdef _initialize():
# We have Python callbacks called by c-core threads, this ensures the GIL
# is initialized.
@@ -95,5 +95,5 @@ cdef _initialize():
import ssl
grpc_dont_init_openssl()
# Load Arcadia certs in ComputePemRootCerts and do not override here.
-
-_initialize()
+
+_initialize()
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_plugin_wrapping.py b/contrib/libs/grpc/src/python/grpcio/grpc/_plugin_wrapping.py
index e3bfa90916..407ddf2c0c 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/_plugin_wrapping.py
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/_plugin_wrapping.py
@@ -1,51 +1,51 @@
# Copyright 2015 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
-import collections
+
+import collections
import logging
-import threading
-
-import grpc
-from grpc import _common
-from grpc._cython import cygrpc
-
+import threading
+
+import grpc
+from grpc import _common
+from grpc._cython import cygrpc
+
_LOGGER = logging.getLogger(__name__)
-
+
class _AuthMetadataContext(
- collections.namedtuple('AuthMetadataContext', (
+ collections.namedtuple('AuthMetadataContext', (
'service_url',
'method_name',
)), grpc.AuthMetadataContext):
- pass
-
-
+ pass
+
+
class _CallbackState(object):
-
+
def __init__(self):
self.lock = threading.Lock()
self.called = False
self.exception = None
-
-
+
+
class _AuthMetadataPluginCallback(grpc.AuthMetadataPluginCallback):
-
+
def __init__(self, state, callback):
self._state = state
self._callback = callback
-
- def __call__(self, metadata, error):
+
+ def __call__(self, metadata, error):
with self._state.lock:
if self._state.exception is None:
if self._state.called:
@@ -57,23 +57,23 @@ class _AuthMetadataPluginCallback(grpc.AuthMetadataPluginCallback):
raise RuntimeError(
'AuthMetadataPluginCallback raised exception "{}"!'.format(
self._state.exception))
- if error is None:
+ if error is None:
self._callback(metadata, cygrpc.StatusCode.ok, None)
- else:
+ else:
self._callback(None, cygrpc.StatusCode.internal,
_common.encode(str(error)))
-
-
+
+
class _Plugin(object):
-
+
def __init__(self, metadata_plugin):
self._metadata_plugin = metadata_plugin
-
+
def __call__(self, service_url, method_name, callback):
context = _AuthMetadataContext(_common.decode(service_url),
_common.decode(method_name))
callback_state = _CallbackState()
- try:
+ try:
self._metadata_plugin(
context, _AuthMetadataPluginCallback(callback_state, callback))
except Exception as exception: # pylint: disable=broad-except
@@ -86,8 +86,8 @@ class _Plugin(object):
return
callback(None, cygrpc.StatusCode.internal,
_common.encode(str(exception)))
-
-
+
+
def metadata_plugin_call_credentials(metadata_plugin, name):
if name is None:
try:
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_server.py b/contrib/libs/grpc/src/python/grpcio/grpc/_server.py
index 48ff743995..9e9a20650c 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/_server.py
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/_server.py
@@ -1,137 +1,137 @@
# Copyright 2016 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-"""Service-side implementation of gRPC Python."""
-
-import collections
-import enum
-import logging
-import threading
-import time
-
+"""Service-side implementation of gRPC Python."""
+
+import collections
+import enum
+import logging
+import threading
+import time
+
from concurrent import futures
import six
-import grpc
-from grpc import _common
+import grpc
+from grpc import _common
from grpc import _compression
from grpc import _interceptor
-from grpc._cython import cygrpc
-
+from grpc._cython import cygrpc
+
_LOGGER = logging.getLogger(__name__)
-_SHUTDOWN_TAG = 'shutdown'
-_REQUEST_CALL_TAG = 'request_call'
-
-_RECEIVE_CLOSE_ON_SERVER_TOKEN = 'receive_close_on_server'
-_SEND_INITIAL_METADATA_TOKEN = 'send_initial_metadata'
-_RECEIVE_MESSAGE_TOKEN = 'receive_message'
-_SEND_MESSAGE_TOKEN = 'send_message'
-_SEND_INITIAL_METADATA_AND_SEND_MESSAGE_TOKEN = (
- 'send_initial_metadata * send_message')
-_SEND_STATUS_FROM_SERVER_TOKEN = 'send_status_from_server'
-_SEND_INITIAL_METADATA_AND_SEND_STATUS_FROM_SERVER_TOKEN = (
- 'send_initial_metadata * send_status_from_server')
-
-_OPEN = 'open'
-_CLOSED = 'closed'
-_CANCELLED = 'cancelled'
-
-_EMPTY_FLAGS = 0
-
+_SHUTDOWN_TAG = 'shutdown'
+_REQUEST_CALL_TAG = 'request_call'
+
+_RECEIVE_CLOSE_ON_SERVER_TOKEN = 'receive_close_on_server'
+_SEND_INITIAL_METADATA_TOKEN = 'send_initial_metadata'
+_RECEIVE_MESSAGE_TOKEN = 'receive_message'
+_SEND_MESSAGE_TOKEN = 'send_message'
+_SEND_INITIAL_METADATA_AND_SEND_MESSAGE_TOKEN = (
+ 'send_initial_metadata * send_message')
+_SEND_STATUS_FROM_SERVER_TOKEN = 'send_status_from_server'
+_SEND_INITIAL_METADATA_AND_SEND_STATUS_FROM_SERVER_TOKEN = (
+ 'send_initial_metadata * send_status_from_server')
+
+_OPEN = 'open'
+_CLOSED = 'closed'
+_CANCELLED = 'cancelled'
+
+_EMPTY_FLAGS = 0
+
_DEALLOCATED_SERVER_CHECK_PERIOD_S = 1.0
_INF_TIMEOUT = 1e9
-
-
-def _serialized_request(request_event):
+
+
+def _serialized_request(request_event):
return request_event.batch_operations[0].message()
-
-
-def _application_code(code):
- cygrpc_code = _common.STATUS_CODE_TO_CYGRPC_STATUS_CODE.get(code)
- return cygrpc.StatusCode.unknown if cygrpc_code is None else cygrpc_code
-
-
-def _completion_code(state):
- if state.code is None:
- return cygrpc.StatusCode.ok
- else:
- return _application_code(state.code)
-
-
-def _abortion_code(state, code):
- if state.code is None:
- return code
- else:
- return _application_code(state.code)
-
-
-def _details(state):
- return b'' if state.details is None else state.details
-
-
-class _HandlerCallDetails(
- collections.namedtuple('_HandlerCallDetails', (
+
+
+def _application_code(code):
+ cygrpc_code = _common.STATUS_CODE_TO_CYGRPC_STATUS_CODE.get(code)
+ return cygrpc.StatusCode.unknown if cygrpc_code is None else cygrpc_code
+
+
+def _completion_code(state):
+ if state.code is None:
+ return cygrpc.StatusCode.ok
+ else:
+ return _application_code(state.code)
+
+
+def _abortion_code(state, code):
+ if state.code is None:
+ return code
+ else:
+ return _application_code(state.code)
+
+
+def _details(state):
+ return b'' if state.details is None else state.details
+
+
+class _HandlerCallDetails(
+ collections.namedtuple('_HandlerCallDetails', (
'method',
'invocation_metadata',
)), grpc.HandlerCallDetails):
- pass
-
-
-class _RPCState(object):
-
- def __init__(self):
- self.condition = threading.Condition()
- self.due = set()
- self.request = None
- self.client = _OPEN
- self.initial_metadata_allowed = True
+ pass
+
+
+class _RPCState(object):
+
+ def __init__(self):
+ self.condition = threading.Condition()
+ self.due = set()
+ self.request = None
+ self.client = _OPEN
+ self.initial_metadata_allowed = True
self.compression_algorithm = None
- self.disable_next_compression = False
- self.trailing_metadata = None
- self.code = None
- self.details = None
- self.statused = False
- self.rpc_errors = []
- self.callbacks = []
+ self.disable_next_compression = False
+ self.trailing_metadata = None
+ self.code = None
+ self.details = None
+ self.statused = False
+ self.rpc_errors = []
+ self.callbacks = []
self.aborted = False
-
-
-def _raise_rpc_error(state):
- rpc_error = grpc.RpcError()
- state.rpc_errors.append(rpc_error)
- raise rpc_error
-
-
-def _possibly_finish_call(state, token):
- state.due.remove(token)
+
+
+def _raise_rpc_error(state):
+ rpc_error = grpc.RpcError()
+ state.rpc_errors.append(rpc_error)
+ raise rpc_error
+
+
+def _possibly_finish_call(state, token):
+ state.due.remove(token)
if not _is_rpc_state_active(state) and not state.due:
- callbacks = state.callbacks
- state.callbacks = None
- return state, callbacks
- else:
- return None, ()
-
-
-def _send_status_from_server(state, token):
-
- def send_status_from_server(unused_send_status_from_server_event):
- with state.condition:
- return _possibly_finish_call(state, token)
-
- return send_status_from_server
-
-
+ callbacks = state.callbacks
+ state.callbacks = None
+ return state, callbacks
+ else:
+ return None, ()
+
+
+def _send_status_from_server(state, token):
+
+ def send_status_from_server(unused_send_status_from_server_event):
+ with state.condition:
+ return _possibly_finish_call(state, token)
+
+ return send_status_from_server
+
+
def _get_initial_metadata(state, metadata):
with state.condition:
if state.compression_algorithm:
@@ -152,11 +152,11 @@ def _get_initial_metadata_operation(state, metadata):
return operation
-def _abort(state, call, code, details):
- if state.client is not _CANCELLED:
- effective_code = _abortion_code(state, code)
- effective_details = details if state.details is None else state.details
- if state.initial_metadata_allowed:
+def _abort(state, call, code, details):
+ if state.client is not _CANCELLED:
+ effective_code = _abortion_code(state, code)
+ effective_details = details if state.details is None else state.details
+ if state.initial_metadata_allowed:
operations = (
_get_initial_metadata_operation(state, None),
cygrpc.SendStatusFromServerOperation(state.trailing_metadata,
@@ -164,111 +164,111 @@ def _abort(state, call, code, details):
effective_details,
_EMPTY_FLAGS),
)
- token = _SEND_INITIAL_METADATA_AND_SEND_STATUS_FROM_SERVER_TOKEN
- else:
+ token = _SEND_INITIAL_METADATA_AND_SEND_STATUS_FROM_SERVER_TOKEN
+ else:
operations = (cygrpc.SendStatusFromServerOperation(
state.trailing_metadata, effective_code, effective_details,
_EMPTY_FLAGS),)
- token = _SEND_STATUS_FROM_SERVER_TOKEN
+ token = _SEND_STATUS_FROM_SERVER_TOKEN
call.start_server_batch(operations,
_send_status_from_server(state, token))
- state.statused = True
- state.due.add(token)
-
-
-def _receive_close_on_server(state):
-
- def receive_close_on_server(receive_close_on_server_event):
- with state.condition:
+ state.statused = True
+ state.due.add(token)
+
+
+def _receive_close_on_server(state):
+
+ def receive_close_on_server(receive_close_on_server_event):
+ with state.condition:
if receive_close_on_server_event.batch_operations[0].cancelled():
- state.client = _CANCELLED
- elif state.client is _OPEN:
- state.client = _CLOSED
- state.condition.notify_all()
- return _possibly_finish_call(state, _RECEIVE_CLOSE_ON_SERVER_TOKEN)
-
- return receive_close_on_server
-
-
-def _receive_message(state, call, request_deserializer):
-
- def receive_message(receive_message_event):
- serialized_request = _serialized_request(receive_message_event)
- if serialized_request is None:
- with state.condition:
- if state.client is _OPEN:
- state.client = _CLOSED
- state.condition.notify_all()
- return _possibly_finish_call(state, _RECEIVE_MESSAGE_TOKEN)
- else:
- request = _common.deserialize(serialized_request,
- request_deserializer)
- with state.condition:
- if request is None:
- _abort(state, call, cygrpc.StatusCode.internal,
- b'Exception deserializing request!')
- else:
- state.request = request
- state.condition.notify_all()
- return _possibly_finish_call(state, _RECEIVE_MESSAGE_TOKEN)
-
- return receive_message
-
-
-def _send_initial_metadata(state):
-
- def send_initial_metadata(unused_send_initial_metadata_event):
- with state.condition:
- return _possibly_finish_call(state, _SEND_INITIAL_METADATA_TOKEN)
-
- return send_initial_metadata
-
-
-def _send_message(state, token):
-
- def send_message(unused_send_message_event):
- with state.condition:
- state.condition.notify_all()
- return _possibly_finish_call(state, token)
-
- return send_message
-
-
-class _Context(grpc.ServicerContext):
-
- def __init__(self, rpc_event, state, request_deserializer):
- self._rpc_event = rpc_event
- self._state = state
- self._request_deserializer = request_deserializer
-
- def is_active(self):
- with self._state.condition:
+ state.client = _CANCELLED
+ elif state.client is _OPEN:
+ state.client = _CLOSED
+ state.condition.notify_all()
+ return _possibly_finish_call(state, _RECEIVE_CLOSE_ON_SERVER_TOKEN)
+
+ return receive_close_on_server
+
+
+def _receive_message(state, call, request_deserializer):
+
+ def receive_message(receive_message_event):
+ serialized_request = _serialized_request(receive_message_event)
+ if serialized_request is None:
+ with state.condition:
+ if state.client is _OPEN:
+ state.client = _CLOSED
+ state.condition.notify_all()
+ return _possibly_finish_call(state, _RECEIVE_MESSAGE_TOKEN)
+ else:
+ request = _common.deserialize(serialized_request,
+ request_deserializer)
+ with state.condition:
+ if request is None:
+ _abort(state, call, cygrpc.StatusCode.internal,
+ b'Exception deserializing request!')
+ else:
+ state.request = request
+ state.condition.notify_all()
+ return _possibly_finish_call(state, _RECEIVE_MESSAGE_TOKEN)
+
+ return receive_message
+
+
+def _send_initial_metadata(state):
+
+ def send_initial_metadata(unused_send_initial_metadata_event):
+ with state.condition:
+ return _possibly_finish_call(state, _SEND_INITIAL_METADATA_TOKEN)
+
+ return send_initial_metadata
+
+
+def _send_message(state, token):
+
+ def send_message(unused_send_message_event):
+ with state.condition:
+ state.condition.notify_all()
+ return _possibly_finish_call(state, token)
+
+ return send_message
+
+
+class _Context(grpc.ServicerContext):
+
+ def __init__(self, rpc_event, state, request_deserializer):
+ self._rpc_event = rpc_event
+ self._state = state
+ self._request_deserializer = request_deserializer
+
+ def is_active(self):
+ with self._state.condition:
return _is_rpc_state_active(self._state)
-
- def time_remaining(self):
+
+ def time_remaining(self):
return max(self._rpc_event.call_details.deadline - time.time(), 0)
-
- def cancel(self):
+
+ def cancel(self):
self._rpc_event.call.cancel()
-
- def add_callback(self, callback):
- with self._state.condition:
- if self._state.callbacks is None:
- return False
- else:
- self._state.callbacks.append(callback)
- return True
-
- def disable_next_message_compression(self):
- with self._state.condition:
- self._state.disable_next_compression = True
-
- def invocation_metadata(self):
+
+ def add_callback(self, callback):
+ with self._state.condition:
+ if self._state.callbacks is None:
+ return False
+ else:
+ self._state.callbacks.append(callback)
+ return True
+
+ def disable_next_message_compression(self):
+ with self._state.condition:
+ self._state.disable_next_compression = True
+
+ def invocation_metadata(self):
return self._rpc_event.invocation_metadata
-
- def peer(self):
+
+ def peer(self):
return _common.decode(self._rpc_event.call.peer())
-
+
def peer_identities(self):
return cygrpc.peer_identities(self._rpc_event.call)
@@ -286,25 +286,25 @@ class _Context(grpc.ServicerContext):
with self._state.condition:
self._state.compression_algorithm = compression
- def send_initial_metadata(self, initial_metadata):
- with self._state.condition:
- if self._state.client is _CANCELLED:
- _raise_rpc_error(self._state)
- else:
- if self._state.initial_metadata_allowed:
+ def send_initial_metadata(self, initial_metadata):
+ with self._state.condition:
+ if self._state.client is _CANCELLED:
+ _raise_rpc_error(self._state)
+ else:
+ if self._state.initial_metadata_allowed:
operation = _get_initial_metadata_operation(
self._state, initial_metadata)
self._rpc_event.call.start_server_batch(
(operation,), _send_initial_metadata(self._state))
- self._state.initial_metadata_allowed = False
- self._state.due.add(_SEND_INITIAL_METADATA_TOKEN)
- else:
- raise ValueError('Initial metadata no longer allowed!')
-
- def set_trailing_metadata(self, trailing_metadata):
- with self._state.condition:
+ self._state.initial_metadata_allowed = False
+ self._state.due.add(_SEND_INITIAL_METADATA_TOKEN)
+ else:
+ raise ValueError('Initial metadata no longer allowed!')
+
+ def set_trailing_metadata(self, trailing_metadata):
+ with self._state.condition:
self._state.trailing_metadata = trailing_metadata
-
+
def abort(self, code, details):
# treat OK like other invalid arguments: fail the RPC
if code == grpc.StatusCode.OK:
@@ -322,101 +322,101 @@ class _Context(grpc.ServicerContext):
self._state.trailing_metadata = status.trailing_metadata
self.abort(status.code, status.details)
- def set_code(self, code):
- with self._state.condition:
- self._state.code = code
-
- def set_details(self, details):
- with self._state.condition:
- self._state.details = _common.encode(details)
-
+ def set_code(self, code):
+ with self._state.condition:
+ self._state.code = code
+
+ def set_details(self, details):
+ with self._state.condition:
+ self._state.details = _common.encode(details)
+
def _finalize_state(self):
pass
-
-
-class _RequestIterator(object):
-
- def __init__(self, state, call, request_deserializer):
- self._state = state
- self._call = call
- self._request_deserializer = request_deserializer
-
- def _raise_or_start_receive_message(self):
- if self._state.client is _CANCELLED:
- _raise_rpc_error(self._state)
+
+
+class _RequestIterator(object):
+
+ def __init__(self, state, call, request_deserializer):
+ self._state = state
+ self._call = call
+ self._request_deserializer = request_deserializer
+
+ def _raise_or_start_receive_message(self):
+ if self._state.client is _CANCELLED:
+ _raise_rpc_error(self._state)
elif not _is_rpc_state_active(self._state):
- raise StopIteration()
- else:
- self._call.start_server_batch(
+ raise StopIteration()
+ else:
+ self._call.start_server_batch(
(cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS),),
- _receive_message(self._state, self._call,
- self._request_deserializer))
- self._state.due.add(_RECEIVE_MESSAGE_TOKEN)
-
- def _look_for_request(self):
- if self._state.client is _CANCELLED:
- _raise_rpc_error(self._state)
- elif (self._state.request is None and
- _RECEIVE_MESSAGE_TOKEN not in self._state.due):
- raise StopIteration()
- else:
- request = self._state.request
- self._state.request = None
- return request
-
+ _receive_message(self._state, self._call,
+ self._request_deserializer))
+ self._state.due.add(_RECEIVE_MESSAGE_TOKEN)
+
+ def _look_for_request(self):
+ if self._state.client is _CANCELLED:
+ _raise_rpc_error(self._state)
+ elif (self._state.request is None and
+ _RECEIVE_MESSAGE_TOKEN not in self._state.due):
+ raise StopIteration()
+ else:
+ request = self._state.request
+ self._state.request = None
+ return request
+
raise AssertionError() # should never run
- def _next(self):
- with self._state.condition:
- self._raise_or_start_receive_message()
- while True:
- self._state.condition.wait()
- request = self._look_for_request()
- if request is not None:
- return request
-
- def __iter__(self):
- return self
-
- def __next__(self):
- return self._next()
-
- def next(self):
- return self._next()
-
-
-def _unary_request(rpc_event, state, request_deserializer):
-
- def unary_request():
- with state.condition:
+ def _next(self):
+ with self._state.condition:
+ self._raise_or_start_receive_message()
+ while True:
+ self._state.condition.wait()
+ request = self._look_for_request()
+ if request is not None:
+ return request
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ return self._next()
+
+ def next(self):
+ return self._next()
+
+
+def _unary_request(rpc_event, state, request_deserializer):
+
+ def unary_request():
+ with state.condition:
if not _is_rpc_state_active(state):
- return None
- else:
+ return None
+ else:
rpc_event.call.start_server_batch(
(cygrpc.ReceiveMessageOperation(_EMPTY_FLAGS),),
_receive_message(state, rpc_event.call,
- request_deserializer))
- state.due.add(_RECEIVE_MESSAGE_TOKEN)
- while True:
- state.condition.wait()
- if state.request is None:
- if state.client is _CLOSED:
- details = '"{}" requires exactly one request message.'.format(
+ request_deserializer))
+ state.due.add(_RECEIVE_MESSAGE_TOKEN)
+ while True:
+ state.condition.wait()
+ if state.request is None:
+ if state.client is _CLOSED:
+ details = '"{}" requires exactly one request message.'.format(
rpc_event.call_details.method)
_abort(state, rpc_event.call,
- cygrpc.StatusCode.unimplemented,
- _common.encode(details))
- return None
- elif state.client is _CANCELLED:
- return None
- else:
- request = state.request
- state.request = None
- return request
-
- return unary_request
-
-
+ cygrpc.StatusCode.unimplemented,
+ _common.encode(details))
+ return None
+ elif state.client is _CANCELLED:
+ return None
+ else:
+ request = state.request
+ state.request = None
+ return request
+
+ return unary_request
+
+
def _call_behavior(rpc_event,
state,
behavior,
@@ -446,15 +446,15 @@ def _call_behavior(rpc_event,
_abort(state, rpc_event.call, cygrpc.StatusCode.unknown,
_common.encode(details))
return None, False
-
-
-def _take_response_from_response_iterator(rpc_event, state, response_iterator):
- try:
- return next(response_iterator), True
- except StopIteration:
- return None, True
+
+
+def _take_response_from_response_iterator(rpc_event, state, response_iterator):
+ try:
+ return next(response_iterator), True
+ except StopIteration:
+ return None, True
except Exception as exception: # pylint: disable=broad-except
- with state.condition:
+ with state.condition:
if state.aborted:
_abort(state, rpc_event.call, cygrpc.StatusCode.unknown,
b'RPC Aborted')
@@ -463,20 +463,20 @@ def _take_response_from_response_iterator(rpc_event, state, response_iterator):
_LOGGER.exception(details)
_abort(state, rpc_event.call, cygrpc.StatusCode.unknown,
_common.encode(details))
- return None, False
-
-
-def _serialize_response(rpc_event, state, response, response_serializer):
- serialized_response = _common.serialize(response, response_serializer)
- if serialized_response is None:
- with state.condition:
+ return None, False
+
+
+def _serialize_response(rpc_event, state, response, response_serializer):
+ serialized_response = _common.serialize(response, response_serializer)
+ if serialized_response is None:
+ with state.condition:
_abort(state, rpc_event.call, cygrpc.StatusCode.internal,
- b'Failed to serialize response!')
- return None
- else:
- return serialized_response
-
-
+ b'Failed to serialize response!')
+ return None
+ else:
+ return serialized_response
+
+
def _get_send_message_op_flags_from_state(state):
if state.disable_next_compression:
return cygrpc.WriteFlag.no_compress
@@ -489,62 +489,62 @@ def _reset_per_message_state(state):
state.disable_next_compression = False
-def _send_response(rpc_event, state, serialized_response):
- with state.condition:
+def _send_response(rpc_event, state, serialized_response):
+ with state.condition:
if not _is_rpc_state_active(state):
- return False
- else:
- if state.initial_metadata_allowed:
+ return False
+ else:
+ if state.initial_metadata_allowed:
operations = (
_get_initial_metadata_operation(state, None),
cygrpc.SendMessageOperation(
serialized_response,
_get_send_message_op_flags_from_state(state)),
)
- state.initial_metadata_allowed = False
- token = _SEND_INITIAL_METADATA_AND_SEND_MESSAGE_TOKEN
- else:
+ state.initial_metadata_allowed = False
+ token = _SEND_INITIAL_METADATA_AND_SEND_MESSAGE_TOKEN
+ else:
operations = (cygrpc.SendMessageOperation(
serialized_response,
_get_send_message_op_flags_from_state(state)),)
- token = _SEND_MESSAGE_TOKEN
+ token = _SEND_MESSAGE_TOKEN
rpc_event.call.start_server_batch(operations,
_send_message(state, token))
- state.due.add(token)
+ state.due.add(token)
_reset_per_message_state(state)
- while True:
- state.condition.wait()
- if token not in state.due:
+ while True:
+ state.condition.wait()
+ if token not in state.due:
return _is_rpc_state_active(state)
-
-
-def _status(rpc_event, state, serialized_response):
- with state.condition:
- if state.client is not _CANCELLED:
- code = _completion_code(state)
- details = _details(state)
- operations = [
+
+
+def _status(rpc_event, state, serialized_response):
+ with state.condition:
+ if state.client is not _CANCELLED:
+ code = _completion_code(state)
+ details = _details(state)
+ operations = [
cygrpc.SendStatusFromServerOperation(state.trailing_metadata,
code, details,
_EMPTY_FLAGS),
- ]
- if state.initial_metadata_allowed:
+ ]
+ if state.initial_metadata_allowed:
operations.append(_get_initial_metadata_operation(state, None))
- if serialized_response is not None:
- operations.append(
+ if serialized_response is not None:
+ operations.append(
cygrpc.SendMessageOperation(
serialized_response,
_get_send_message_op_flags_from_state(state)))
rpc_event.call.start_server_batch(
operations,
- _send_status_from_server(state, _SEND_STATUS_FROM_SERVER_TOKEN))
- state.statused = True
+ _send_status_from_server(state, _SEND_STATUS_FROM_SERVER_TOKEN))
+ state.statused = True
_reset_per_message_state(state)
- state.due.add(_SEND_STATUS_FROM_SERVER_TOKEN)
-
-
-def _unary_response_in_pool(rpc_event, state, behavior, argument_thunk,
- request_deserializer, response_serializer):
+ state.due.add(_SEND_STATUS_FROM_SERVER_TOKEN)
+
+
+def _unary_response_in_pool(rpc_event, state, behavior, argument_thunk,
+ request_deserializer, response_serializer):
cygrpc.install_context_from_request_call_event(rpc_event)
try:
argument = argument_thunk()
@@ -558,10 +558,10 @@ def _unary_response_in_pool(rpc_event, state, behavior, argument_thunk,
_status(rpc_event, state, serialized_response)
finally:
cygrpc.uninstall_context()
-
-
-def _stream_response_in_pool(rpc_event, state, behavior, argument_thunk,
- request_deserializer, response_serializer):
+
+
+def _stream_response_in_pool(rpc_event, state, behavior, argument_thunk,
+ request_deserializer, response_serializer):
cygrpc.install_context_from_request_call_event(rpc_event)
def send_response(response):
@@ -593,8 +593,8 @@ def _stream_response_in_pool(rpc_event, state, behavior, argument_thunk,
rpc_event, state, send_response, response_iterator)
finally:
cygrpc.uninstall_context()
-
-
+
+
def _is_rpc_state_active(state):
return state.client is not _CANCELLED and not state.statused
@@ -622,30 +622,30 @@ def _select_thread_pool_for_behavior(behavior, default_thread_pool):
def _handle_unary_unary(rpc_event, state, method_handler, default_thread_pool):
- unary_request = _unary_request(rpc_event, state,
- method_handler.request_deserializer)
+ unary_request = _unary_request(rpc_event, state,
+ method_handler.request_deserializer)
thread_pool = _select_thread_pool_for_behavior(method_handler.unary_unary,
default_thread_pool)
return thread_pool.submit(_unary_response_in_pool, rpc_event, state,
method_handler.unary_unary, unary_request,
method_handler.request_deserializer,
method_handler.response_serializer)
-
-
+
+
def _handle_unary_stream(rpc_event, state, method_handler, default_thread_pool):
- unary_request = _unary_request(rpc_event, state,
- method_handler.request_deserializer)
+ unary_request = _unary_request(rpc_event, state,
+ method_handler.request_deserializer)
thread_pool = _select_thread_pool_for_behavior(method_handler.unary_stream,
default_thread_pool)
return thread_pool.submit(_stream_response_in_pool, rpc_event, state,
method_handler.unary_stream, unary_request,
method_handler.request_deserializer,
method_handler.response_serializer)
-
-
+
+
def _handle_stream_unary(rpc_event, state, method_handler, default_thread_pool):
request_iterator = _RequestIterator(state, rpc_event.call,
- method_handler.request_deserializer)
+ method_handler.request_deserializer)
thread_pool = _select_thread_pool_for_behavior(method_handler.stream_unary,
default_thread_pool)
return thread_pool.submit(_unary_response_in_pool, rpc_event, state,
@@ -653,12 +653,12 @@ def _handle_stream_unary(rpc_event, state, method_handler, default_thread_pool):
lambda: request_iterator,
method_handler.request_deserializer,
method_handler.response_serializer)
-
-
+
+
def _handle_stream_stream(rpc_event, state, method_handler,
default_thread_pool):
request_iterator = _RequestIterator(state, rpc_event.call,
- method_handler.request_deserializer)
+ method_handler.request_deserializer)
thread_pool = _select_thread_pool_for_behavior(method_handler.stream_stream,
default_thread_pool)
return thread_pool.submit(_stream_response_in_pool, rpc_event, state,
@@ -666,8 +666,8 @@ def _handle_stream_stream(rpc_event, state, method_handler,
lambda: request_iterator,
method_handler.request_deserializer,
method_handler.response_serializer)
-
-
+
+
def _find_method_handler(rpc_event, generic_handlers, interceptor_pipeline):
def query_handlers(handler_call_details):
@@ -675,12 +675,12 @@ def _find_method_handler(rpc_event, generic_handlers, interceptor_pipeline):
method_handler = generic_handler.service(handler_call_details)
if method_handler is not None:
return method_handler
- return None
-
+ return None
+
handler_call_details = _HandlerCallDetails(
_common.decode(rpc_event.call_details.method),
rpc_event.invocation_metadata)
-
+
if interceptor_pipeline is not None:
return interceptor_pipeline.execute(query_handlers,
handler_call_details)
@@ -690,7 +690,7 @@ def _find_method_handler(rpc_event, generic_handlers, interceptor_pipeline):
def _reject_rpc(rpc_event, status, details):
rpc_state = _RPCState()
- operations = (
+ operations = (
_get_initial_metadata_operation(rpc_state, None),
cygrpc.ReceiveCloseOnServerOperation(_EMPTY_FLAGS),
cygrpc.SendStatusFromServerOperation(None, status, details,
@@ -700,32 +700,32 @@ def _reject_rpc(rpc_event, status, details):
rpc_state,
(),
))
- return rpc_state
-
-
-def _handle_with_method_handler(rpc_event, method_handler, thread_pool):
- state = _RPCState()
- with state.condition:
+ return rpc_state
+
+
+def _handle_with_method_handler(rpc_event, method_handler, thread_pool):
+ state = _RPCState()
+ with state.condition:
rpc_event.call.start_server_batch(
(cygrpc.ReceiveCloseOnServerOperation(_EMPTY_FLAGS),),
- _receive_close_on_server(state))
- state.due.add(_RECEIVE_CLOSE_ON_SERVER_TOKEN)
- if method_handler.request_streaming:
- if method_handler.response_streaming:
+ _receive_close_on_server(state))
+ state.due.add(_RECEIVE_CLOSE_ON_SERVER_TOKEN)
+ if method_handler.request_streaming:
+ if method_handler.response_streaming:
return state, _handle_stream_stream(rpc_event, state,
method_handler, thread_pool)
- else:
+ else:
return state, _handle_stream_unary(rpc_event, state,
method_handler, thread_pool)
- else:
- if method_handler.response_streaming:
+ else:
+ if method_handler.response_streaming:
return state, _handle_unary_stream(rpc_event, state,
method_handler, thread_pool)
- else:
+ else:
return state, _handle_unary_unary(rpc_event, state,
method_handler, thread_pool)
-
-
+
+
def _handle_call(rpc_event, generic_handlers, interceptor_pipeline, thread_pool,
concurrency_exceeded):
if not rpc_event.success:
@@ -739,85 +739,85 @@ def _handle_call(rpc_event, generic_handlers, interceptor_pipeline, thread_pool,
_LOGGER.exception(details)
return _reject_rpc(rpc_event, cygrpc.StatusCode.unknown,
b'Error in service handler!'), None
- if method_handler is None:
+ if method_handler is None:
return _reject_rpc(rpc_event, cygrpc.StatusCode.unimplemented,
b'Method not found!'), None
elif concurrency_exceeded:
return _reject_rpc(rpc_event, cygrpc.StatusCode.resource_exhausted,
b'Concurrent RPC limit exceeded!'), None
- else:
- return _handle_with_method_handler(rpc_event, method_handler,
- thread_pool)
- else:
+ else:
+ return _handle_with_method_handler(rpc_event, method_handler,
+ thread_pool)
+ else:
return None, None
-
-
-@enum.unique
-class _ServerStage(enum.Enum):
- STOPPED = 'stopped'
- STARTED = 'started'
- GRACE = 'grace'
-
-
-class _ServerState(object):
-
+
+
+@enum.unique
+class _ServerStage(enum.Enum):
+ STOPPED = 'stopped'
+ STARTED = 'started'
+ GRACE = 'grace'
+
+
+class _ServerState(object):
+
# pylint: disable=too-many-arguments
def __init__(self, completion_queue, server, generic_handlers,
interceptor_pipeline, thread_pool, maximum_concurrent_rpcs):
self.lock = threading.RLock()
- self.completion_queue = completion_queue
- self.server = server
- self.generic_handlers = list(generic_handlers)
+ self.completion_queue = completion_queue
+ self.server = server
+ self.generic_handlers = list(generic_handlers)
self.interceptor_pipeline = interceptor_pipeline
- self.thread_pool = thread_pool
- self.stage = _ServerStage.STOPPED
+ self.thread_pool = thread_pool
+ self.stage = _ServerStage.STOPPED
self.termination_event = threading.Event()
self.shutdown_events = [self.termination_event]
self.maximum_concurrent_rpcs = maximum_concurrent_rpcs
self.active_rpc_count = 0
-
- # TODO(https://github.com/grpc/grpc/issues/6597): eliminate these fields.
- self.rpc_states = set()
- self.due = set()
-
+
+ # TODO(https://github.com/grpc/grpc/issues/6597): eliminate these fields.
+ self.rpc_states = set()
+ self.due = set()
+
# A "volatile" flag to interrupt the daemon serving thread
self.server_deallocated = False
-
-
-def _add_generic_handlers(state, generic_handlers):
- with state.lock:
- state.generic_handlers.extend(generic_handlers)
-
-
-def _add_insecure_port(state, address):
- with state.lock:
- return state.server.add_http2_port(address)
-
-
-def _add_secure_port(state, address, server_credentials):
- with state.lock:
- return state.server.add_http2_port(address,
- server_credentials._credentials)
-
-
-def _request_call(state):
- state.server.request_call(state.completion_queue, state.completion_queue,
- _REQUEST_CALL_TAG)
- state.due.add(_REQUEST_CALL_TAG)
-
-
-# TODO(https://github.com/grpc/grpc/issues/6597): delete this function.
-def _stop_serving(state):
- if not state.rpc_states and not state.due:
+
+
+def _add_generic_handlers(state, generic_handlers):
+ with state.lock:
+ state.generic_handlers.extend(generic_handlers)
+
+
+def _add_insecure_port(state, address):
+ with state.lock:
+ return state.server.add_http2_port(address)
+
+
+def _add_secure_port(state, address, server_credentials):
+ with state.lock:
+ return state.server.add_http2_port(address,
+ server_credentials._credentials)
+
+
+def _request_call(state):
+ state.server.request_call(state.completion_queue, state.completion_queue,
+ _REQUEST_CALL_TAG)
+ state.due.add(_REQUEST_CALL_TAG)
+
+
+# TODO(https://github.com/grpc/grpc/issues/6597): delete this function.
+def _stop_serving(state):
+ if not state.rpc_states and not state.due:
state.server.destroy()
- for shutdown_event in state.shutdown_events:
- shutdown_event.set()
- state.stage = _ServerStage.STOPPED
- return True
- else:
- return False
-
-
+ for shutdown_event in state.shutdown_events:
+ shutdown_event.set()
+ state.stage = _ServerStage.STOPPED
+ return True
+ else:
+ return False
+
+
def _on_call_completed(state):
with state.lock:
state.active_rpc_count -= 1
@@ -865,8 +865,8 @@ def _process_event_and_continue(state, event):
return should_continue
-def _serve(state):
- while True:
+def _serve(state):
+ while True:
timeout = time.time() + _DEALLOCATED_SERVER_CHECK_PERIOD_S
event = state.completion_queue.poll(timeout)
if state.server_deallocated:
@@ -878,8 +878,8 @@ def _serve(state):
# ~before~ we poll again; if the event has a reference
# to a shutdown Call object, this can induce spinlock.
event = None
-
-
+
+
def _begin_shutdown_once(state):
with state.lock:
if state.stage is _ServerStage.STARTED:
@@ -888,45 +888,45 @@ def _begin_shutdown_once(state):
state.due.add(_SHUTDOWN_TAG)
-def _stop(state, grace):
- with state.lock:
- if state.stage is _ServerStage.STOPPED:
- shutdown_event = threading.Event()
- shutdown_event.set()
- return shutdown_event
- else:
+def _stop(state, grace):
+ with state.lock:
+ if state.stage is _ServerStage.STOPPED:
+ shutdown_event = threading.Event()
+ shutdown_event.set()
+ return shutdown_event
+ else:
_begin_shutdown_once(state)
- shutdown_event = threading.Event()
- state.shutdown_events.append(shutdown_event)
- if grace is None:
- state.server.cancel_all_calls()
- else:
-
- def cancel_all_calls_after_grace():
- shutdown_event.wait(timeout=grace)
- with state.lock:
- state.server.cancel_all_calls()
-
- thread = threading.Thread(target=cancel_all_calls_after_grace)
- thread.start()
- return shutdown_event
- shutdown_event.wait()
- return shutdown_event
-
-
-def _start(state):
- with state.lock:
- if state.stage is not _ServerStage.STOPPED:
- raise ValueError('Cannot start already-started server!')
- state.server.start()
- state.stage = _ServerStage.STARTED
- _request_call(state)
-
+ shutdown_event = threading.Event()
+ state.shutdown_events.append(shutdown_event)
+ if grace is None:
+ state.server.cancel_all_calls()
+ else:
+
+ def cancel_all_calls_after_grace():
+ shutdown_event.wait(timeout=grace)
+ with state.lock:
+ state.server.cancel_all_calls()
+
+ thread = threading.Thread(target=cancel_all_calls_after_grace)
+ thread.start()
+ return shutdown_event
+ shutdown_event.wait()
+ return shutdown_event
+
+
+def _start(state):
+ with state.lock:
+ if state.stage is not _ServerStage.STOPPED:
+ raise ValueError('Cannot start already-started server!')
+ state.server.start()
+ state.stage = _ServerStage.STARTED
+ _request_call(state)
+
thread = threading.Thread(target=_serve, args=(state,))
thread.daemon = True
- thread.start()
-
-
+ thread.start()
+
+
def _validate_generic_rpc_handlers(generic_rpc_handlers):
for generic_rpc_handler in generic_rpc_handlers:
service_attribute = getattr(generic_rpc_handler, 'service', None)
@@ -934,7 +934,7 @@ def _validate_generic_rpc_handlers(generic_rpc_handlers):
raise AttributeError(
'"{}" must conform to grpc.GenericRpcHandler type but does '
'not have "service" method!'.format(generic_rpc_handler))
-
+
def _augment_options(base_options, compression):
compression_option = _compression.create_channel_option(compression)
@@ -946,30 +946,30 @@ class _Server(grpc.Server):
# pylint: disable=too-many-arguments
def __init__(self, thread_pool, generic_handlers, interceptors, options,
maximum_concurrent_rpcs, compression):
- completion_queue = cygrpc.CompletionQueue()
+ completion_queue = cygrpc.CompletionQueue()
server = cygrpc.Server(_augment_options(options, compression))
- server.register_completion_queue(completion_queue)
- self._state = _ServerState(completion_queue, server, generic_handlers,
+ server.register_completion_queue(completion_queue)
+ self._state = _ServerState(completion_queue, server, generic_handlers,
_interceptor.service_pipeline(interceptors),
thread_pool, maximum_concurrent_rpcs)
-
- def add_generic_rpc_handlers(self, generic_rpc_handlers):
+
+ def add_generic_rpc_handlers(self, generic_rpc_handlers):
_validate_generic_rpc_handlers(generic_rpc_handlers)
- _add_generic_handlers(self._state, generic_rpc_handlers)
-
- def add_insecure_port(self, address):
+ _add_generic_handlers(self._state, generic_rpc_handlers)
+
+ def add_insecure_port(self, address):
return _common.validate_port_binding_result(
address, _add_insecure_port(self._state, _common.encode(address)))
-
- def add_secure_port(self, address, server_credentials):
+
+ def add_secure_port(self, address, server_credentials):
return _common.validate_port_binding_result(
address,
_add_secure_port(self._state, _common.encode(address),
server_credentials))
-
- def start(self):
- _start(self._state)
-
+
+ def start(self):
+ _start(self._state)
+
def wait_for_termination(self, timeout=None):
# NOTE(https://bugs.python.org/issue35935)
# Remove this workaround once threading.Event.wait() is working with
@@ -978,10 +978,10 @@ class _Server(grpc.Server):
self._state.termination_event.is_set,
timeout=timeout)
- def stop(self, grace):
- return _stop(self._state, grace)
-
- def __del__(self):
+ def stop(self, grace):
+ return _stop(self._state, grace)
+
+ def __del__(self):
if hasattr(self, '_state'):
# We can not grab a lock in __del__(), so set a flag to signal the
# serving daemon thread (if it exists) to initiate shutdown.
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/_utilities.py b/contrib/libs/grpc/src/python/grpcio/grpc/_utilities.py
index c48aaf60a2..70ce2a3dd6 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/_utilities.py
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/_utilities.py
@@ -1,36 +1,36 @@
# Copyright 2015 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-"""Internal utilities for gRPC Python."""
-
-import collections
-import threading
-import time
+"""Internal utilities for gRPC Python."""
+
+import collections
+import threading
+import time
import logging
-
-import six
-
-import grpc
-from grpc import _common
-
+
+import six
+
+import grpc
+from grpc import _common
+
_LOGGER = logging.getLogger(__name__)
-_DONE_CALLBACK_EXCEPTION_LOG_MESSAGE = (
- 'Exception calling connectivity future "done" callback!')
-
-
-class RpcMethodHandler(
- collections.namedtuple('_RpcMethodHandler', (
+_DONE_CALLBACK_EXCEPTION_LOG_MESSAGE = (
+ 'Exception calling connectivity future "done" callback!')
+
+
+class RpcMethodHandler(
+ collections.namedtuple('_RpcMethodHandler', (
'request_streaming',
'response_streaming',
'request_deserializer',
@@ -40,130 +40,130 @@ class RpcMethodHandler(
'stream_unary',
'stream_stream',
)), grpc.RpcMethodHandler):
- pass
-
-
-class DictionaryGenericHandler(grpc.ServiceRpcHandler):
-
- def __init__(self, service, method_handlers):
- self._name = service
- self._method_handlers = {
- _common.fully_qualified_method(service, method): method_handler
- for method, method_handler in six.iteritems(method_handlers)
- }
-
- def service_name(self):
- return self._name
-
- def service(self, handler_call_details):
- return self._method_handlers.get(handler_call_details.method)
-
-
-class _ChannelReadyFuture(grpc.Future):
-
- def __init__(self, channel):
- self._condition = threading.Condition()
- self._channel = channel
-
- self._matured = False
- self._cancelled = False
- self._done_callbacks = []
-
- def _block(self, timeout):
- until = None if timeout is None else time.time() + timeout
- with self._condition:
- while True:
- if self._cancelled:
- raise grpc.FutureCancelledError()
- elif self._matured:
- return
- else:
- if until is None:
- self._condition.wait()
- else:
- remaining = until - time.time()
- if remaining < 0:
- raise grpc.FutureTimeoutError()
- else:
- self._condition.wait(timeout=remaining)
-
- def _update(self, connectivity):
- with self._condition:
- if (not self._cancelled and
- connectivity is grpc.ChannelConnectivity.READY):
- self._matured = True
- self._channel.unsubscribe(self._update)
- self._condition.notify_all()
- done_callbacks = tuple(self._done_callbacks)
- self._done_callbacks = None
- else:
- return
-
- for done_callback in done_callbacks:
+ pass
+
+
+class DictionaryGenericHandler(grpc.ServiceRpcHandler):
+
+ def __init__(self, service, method_handlers):
+ self._name = service
+ self._method_handlers = {
+ _common.fully_qualified_method(service, method): method_handler
+ for method, method_handler in six.iteritems(method_handlers)
+ }
+
+ def service_name(self):
+ return self._name
+
+ def service(self, handler_call_details):
+ return self._method_handlers.get(handler_call_details.method)
+
+
+class _ChannelReadyFuture(grpc.Future):
+
+ def __init__(self, channel):
+ self._condition = threading.Condition()
+ self._channel = channel
+
+ self._matured = False
+ self._cancelled = False
+ self._done_callbacks = []
+
+ def _block(self, timeout):
+ until = None if timeout is None else time.time() + timeout
+ with self._condition:
+ while True:
+ if self._cancelled:
+ raise grpc.FutureCancelledError()
+ elif self._matured:
+ return
+ else:
+ if until is None:
+ self._condition.wait()
+ else:
+ remaining = until - time.time()
+ if remaining < 0:
+ raise grpc.FutureTimeoutError()
+ else:
+ self._condition.wait(timeout=remaining)
+
+ def _update(self, connectivity):
+ with self._condition:
+ if (not self._cancelled and
+ connectivity is grpc.ChannelConnectivity.READY):
+ self._matured = True
+ self._channel.unsubscribe(self._update)
+ self._condition.notify_all()
+ done_callbacks = tuple(self._done_callbacks)
+ self._done_callbacks = None
+ else:
+ return
+
+ for done_callback in done_callbacks:
try:
done_callback(self)
except Exception: # pylint: disable=broad-except
_LOGGER.exception(_DONE_CALLBACK_EXCEPTION_LOG_MESSAGE)
-
- def cancel(self):
- with self._condition:
- if not self._matured:
- self._cancelled = True
- self._channel.unsubscribe(self._update)
- self._condition.notify_all()
- done_callbacks = tuple(self._done_callbacks)
- self._done_callbacks = None
- else:
- return False
-
- for done_callback in done_callbacks:
+
+ def cancel(self):
+ with self._condition:
+ if not self._matured:
+ self._cancelled = True
+ self._channel.unsubscribe(self._update)
+ self._condition.notify_all()
+ done_callbacks = tuple(self._done_callbacks)
+ self._done_callbacks = None
+ else:
+ return False
+
+ for done_callback in done_callbacks:
try:
done_callback(self)
except Exception: # pylint: disable=broad-except
_LOGGER.exception(_DONE_CALLBACK_EXCEPTION_LOG_MESSAGE)
-
+
return True
- def cancelled(self):
- with self._condition:
- return self._cancelled
-
- def running(self):
- with self._condition:
- return not self._cancelled and not self._matured
-
- def done(self):
- with self._condition:
- return self._cancelled or self._matured
-
- def result(self, timeout=None):
- self._block(timeout)
-
- def exception(self, timeout=None):
- self._block(timeout)
-
- def traceback(self, timeout=None):
- self._block(timeout)
-
- def add_done_callback(self, fn):
- with self._condition:
- if not self._cancelled and not self._matured:
- self._done_callbacks.append(fn)
- return
-
- fn(self)
-
- def start(self):
- with self._condition:
- self._channel.subscribe(self._update, try_to_connect=True)
-
- def __del__(self):
- with self._condition:
- if not self._cancelled and not self._matured:
- self._channel.unsubscribe(self._update)
-
-
-def channel_ready_future(channel):
- ready_future = _ChannelReadyFuture(channel)
- ready_future.start()
- return ready_future
+ def cancelled(self):
+ with self._condition:
+ return self._cancelled
+
+ def running(self):
+ with self._condition:
+ return not self._cancelled and not self._matured
+
+ def done(self):
+ with self._condition:
+ return self._cancelled or self._matured
+
+ def result(self, timeout=None):
+ self._block(timeout)
+
+ def exception(self, timeout=None):
+ self._block(timeout)
+
+ def traceback(self, timeout=None):
+ self._block(timeout)
+
+ def add_done_callback(self, fn):
+ with self._condition:
+ if not self._cancelled and not self._matured:
+ self._done_callbacks.append(fn)
+ return
+
+ fn(self)
+
+ def start(self):
+ with self._condition:
+ self._channel.subscribe(self._update, try_to_connect=True)
+
+ def __del__(self):
+ with self._condition:
+ if not self._cancelled and not self._matured:
+ self._channel.unsubscribe(self._update)
+
+
+def channel_ready_future(channel):
+ ready_future = _ChannelReadyFuture(channel)
+ ready_future.start()
+ return ready_future
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/beta/__init__.py b/contrib/libs/grpc/src/python/grpcio/grpc/beta/__init__.py
index 5fb4f3c3cf..1841020f80 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/beta/__init__.py
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/beta/__init__.py
@@ -1,11 +1,11 @@
# Copyright 2015 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/beta/_client_adaptations.py b/contrib/libs/grpc/src/python/grpcio/grpc/beta/_client_adaptations.py
index 652ae0ea17..a0b7465a57 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/beta/_client_adaptations.py
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/beta/_client_adaptations.py
@@ -1,29 +1,29 @@
# Copyright 2016 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-"""Translates gRPC's client-side API into gRPC's client-side Beta API."""
-
-import grpc
-from grpc import _common
+"""Translates gRPC's client-side API into gRPC's client-side Beta API."""
+
+import grpc
+from grpc import _common
from grpc.beta import _metadata
-from grpc.beta import interfaces
-from grpc.framework.common import cardinality
-from grpc.framework.foundation import future
-from grpc.framework.interfaces.face import face
-
+from grpc.beta import interfaces
+from grpc.framework.common import cardinality
+from grpc.framework.foundation import future
+from grpc.framework.interfaces.face import face
+
# pylint: disable=too-many-arguments,too-many-locals,unused-argument
-_STATUS_CODE_TO_ABORTION_KIND_AND_ABORTION_ERROR_CLASS = {
+_STATUS_CODE_TO_ABORTION_KIND_AND_ABORTION_ERROR_CLASS = {
grpc.StatusCode.CANCELLED:
(face.Abortion.Kind.CANCELLED, face.CancellationError),
grpc.StatusCode.UNKNOWN:
@@ -32,418 +32,418 @@ _STATUS_CODE_TO_ABORTION_KIND_AND_ABORTION_ERROR_CLASS = {
(face.Abortion.Kind.EXPIRED, face.ExpirationError),
grpc.StatusCode.UNIMPLEMENTED:
(face.Abortion.Kind.LOCAL_FAILURE, face.LocalError),
-}
-
-
-def _effective_metadata(metadata, metadata_transformer):
- non_none_metadata = () if metadata is None else metadata
- if metadata_transformer is None:
- return non_none_metadata
- else:
- return metadata_transformer(non_none_metadata)
-
-
-def _credentials(grpc_call_options):
- return None if grpc_call_options is None else grpc_call_options.credentials
-
-
-def _abortion(rpc_error_call):
- code = rpc_error_call.code()
- pair = _STATUS_CODE_TO_ABORTION_KIND_AND_ABORTION_ERROR_CLASS.get(code)
- error_kind = face.Abortion.Kind.LOCAL_FAILURE if pair is None else pair[0]
+}
+
+
+def _effective_metadata(metadata, metadata_transformer):
+ non_none_metadata = () if metadata is None else metadata
+ if metadata_transformer is None:
+ return non_none_metadata
+ else:
+ return metadata_transformer(non_none_metadata)
+
+
+def _credentials(grpc_call_options):
+ return None if grpc_call_options is None else grpc_call_options.credentials
+
+
+def _abortion(rpc_error_call):
+ code = rpc_error_call.code()
+ pair = _STATUS_CODE_TO_ABORTION_KIND_AND_ABORTION_ERROR_CLASS.get(code)
+ error_kind = face.Abortion.Kind.LOCAL_FAILURE if pair is None else pair[0]
return face.Abortion(error_kind, rpc_error_call.initial_metadata(),
- rpc_error_call.trailing_metadata(), code,
- rpc_error_call.details())
-
-
-def _abortion_error(rpc_error_call):
- code = rpc_error_call.code()
- pair = _STATUS_CODE_TO_ABORTION_KIND_AND_ABORTION_ERROR_CLASS.get(code)
- exception_class = face.AbortionError if pair is None else pair[1]
- return exception_class(rpc_error_call.initial_metadata(),
- rpc_error_call.trailing_metadata(), code,
- rpc_error_call.details())
-
-
-class _InvocationProtocolContext(interfaces.GRPCInvocationContext):
-
- def disable_next_request_compression(self):
- pass # TODO(https://github.com/grpc/grpc/issues/4078): design, implement.
-
-
-class _Rendezvous(future.Future, face.Call):
-
- def __init__(self, response_future, response_iterator, call):
- self._future = response_future
- self._iterator = response_iterator
- self._call = call
-
- def cancel(self):
- return self._call.cancel()
-
- def cancelled(self):
- return self._future.cancelled()
-
- def running(self):
- return self._future.running()
-
- def done(self):
- return self._future.done()
-
- def result(self, timeout=None):
- try:
- return self._future.result(timeout=timeout)
- except grpc.RpcError as rpc_error_call:
- raise _abortion_error(rpc_error_call)
- except grpc.FutureTimeoutError:
- raise future.TimeoutError()
- except grpc.FutureCancelledError:
- raise future.CancelledError()
-
- def exception(self, timeout=None):
- try:
- rpc_error_call = self._future.exception(timeout=timeout)
- if rpc_error_call is None:
- return None
- else:
- return _abortion_error(rpc_error_call)
- except grpc.FutureTimeoutError:
- raise future.TimeoutError()
- except grpc.FutureCancelledError:
- raise future.CancelledError()
-
- def traceback(self, timeout=None):
- try:
- return self._future.traceback(timeout=timeout)
- except grpc.FutureTimeoutError:
- raise future.TimeoutError()
- except grpc.FutureCancelledError:
- raise future.CancelledError()
-
- def add_done_callback(self, fn):
- self._future.add_done_callback(lambda ignored_callback: fn(self))
-
- def __iter__(self):
- return self
-
- def _next(self):
- try:
- return next(self._iterator)
- except grpc.RpcError as rpc_error_call:
- raise _abortion_error(rpc_error_call)
-
- def __next__(self):
- return self._next()
-
- def next(self):
- return self._next()
-
- def is_active(self):
- return self._call.is_active()
-
- def time_remaining(self):
- return self._call.time_remaining()
-
- def add_abortion_callback(self, abortion_callback):
-
- def done_callback():
- if self.code() is not grpc.StatusCode.OK:
- abortion_callback(_abortion(self._call))
-
- registered = self._call.add_callback(done_callback)
- return None if registered else done_callback()
-
- def protocol_context(self):
- return _InvocationProtocolContext()
-
- def initial_metadata(self):
+ rpc_error_call.trailing_metadata(), code,
+ rpc_error_call.details())
+
+
+def _abortion_error(rpc_error_call):
+ code = rpc_error_call.code()
+ pair = _STATUS_CODE_TO_ABORTION_KIND_AND_ABORTION_ERROR_CLASS.get(code)
+ exception_class = face.AbortionError if pair is None else pair[1]
+ return exception_class(rpc_error_call.initial_metadata(),
+ rpc_error_call.trailing_metadata(), code,
+ rpc_error_call.details())
+
+
+class _InvocationProtocolContext(interfaces.GRPCInvocationContext):
+
+ def disable_next_request_compression(self):
+ pass # TODO(https://github.com/grpc/grpc/issues/4078): design, implement.
+
+
+class _Rendezvous(future.Future, face.Call):
+
+ def __init__(self, response_future, response_iterator, call):
+ self._future = response_future
+ self._iterator = response_iterator
+ self._call = call
+
+ def cancel(self):
+ return self._call.cancel()
+
+ def cancelled(self):
+ return self._future.cancelled()
+
+ def running(self):
+ return self._future.running()
+
+ def done(self):
+ return self._future.done()
+
+ def result(self, timeout=None):
+ try:
+ return self._future.result(timeout=timeout)
+ except grpc.RpcError as rpc_error_call:
+ raise _abortion_error(rpc_error_call)
+ except grpc.FutureTimeoutError:
+ raise future.TimeoutError()
+ except grpc.FutureCancelledError:
+ raise future.CancelledError()
+
+ def exception(self, timeout=None):
+ try:
+ rpc_error_call = self._future.exception(timeout=timeout)
+ if rpc_error_call is None:
+ return None
+ else:
+ return _abortion_error(rpc_error_call)
+ except grpc.FutureTimeoutError:
+ raise future.TimeoutError()
+ except grpc.FutureCancelledError:
+ raise future.CancelledError()
+
+ def traceback(self, timeout=None):
+ try:
+ return self._future.traceback(timeout=timeout)
+ except grpc.FutureTimeoutError:
+ raise future.TimeoutError()
+ except grpc.FutureCancelledError:
+ raise future.CancelledError()
+
+ def add_done_callback(self, fn):
+ self._future.add_done_callback(lambda ignored_callback: fn(self))
+
+ def __iter__(self):
+ return self
+
+ def _next(self):
+ try:
+ return next(self._iterator)
+ except grpc.RpcError as rpc_error_call:
+ raise _abortion_error(rpc_error_call)
+
+ def __next__(self):
+ return self._next()
+
+ def next(self):
+ return self._next()
+
+ def is_active(self):
+ return self._call.is_active()
+
+ def time_remaining(self):
+ return self._call.time_remaining()
+
+ def add_abortion_callback(self, abortion_callback):
+
+ def done_callback():
+ if self.code() is not grpc.StatusCode.OK:
+ abortion_callback(_abortion(self._call))
+
+ registered = self._call.add_callback(done_callback)
+ return None if registered else done_callback()
+
+ def protocol_context(self):
+ return _InvocationProtocolContext()
+
+ def initial_metadata(self):
return _metadata.beta(self._call.initial_metadata())
-
- def terminal_metadata(self):
+
+ def terminal_metadata(self):
return _metadata.beta(self._call.terminal_metadata())
-
- def code(self):
- return self._call.code()
-
- def details(self):
- return self._call.details()
-
-
-def _blocking_unary_unary(channel, group, method, timeout, with_call,
- protocol_options, metadata, metadata_transformer,
- request, request_serializer, response_deserializer):
- try:
- multi_callable = channel.unary_unary(
- _common.fully_qualified_method(group, method),
- request_serializer=request_serializer,
- response_deserializer=response_deserializer)
- effective_metadata = _effective_metadata(metadata, metadata_transformer)
- if with_call:
- response, call = multi_callable.with_call(
- request,
- timeout=timeout,
+
+ def code(self):
+ return self._call.code()
+
+ def details(self):
+ return self._call.details()
+
+
+def _blocking_unary_unary(channel, group, method, timeout, with_call,
+ protocol_options, metadata, metadata_transformer,
+ request, request_serializer, response_deserializer):
+ try:
+ multi_callable = channel.unary_unary(
+ _common.fully_qualified_method(group, method),
+ request_serializer=request_serializer,
+ response_deserializer=response_deserializer)
+ effective_metadata = _effective_metadata(metadata, metadata_transformer)
+ if with_call:
+ response, call = multi_callable.with_call(
+ request,
+ timeout=timeout,
metadata=_metadata.unbeta(effective_metadata),
- credentials=_credentials(protocol_options))
- return response, _Rendezvous(None, None, call)
- else:
+ credentials=_credentials(protocol_options))
+ return response, _Rendezvous(None, None, call)
+ else:
return multi_callable(request,
timeout=timeout,
metadata=_metadata.unbeta(effective_metadata),
credentials=_credentials(protocol_options))
- except grpc.RpcError as rpc_error_call:
- raise _abortion_error(rpc_error_call)
-
-
-def _future_unary_unary(channel, group, method, timeout, protocol_options,
- metadata, metadata_transformer, request,
- request_serializer, response_deserializer):
- multi_callable = channel.unary_unary(
- _common.fully_qualified_method(group, method),
- request_serializer=request_serializer,
- response_deserializer=response_deserializer)
- effective_metadata = _effective_metadata(metadata, metadata_transformer)
- response_future = multi_callable.future(
- request,
- timeout=timeout,
+ except grpc.RpcError as rpc_error_call:
+ raise _abortion_error(rpc_error_call)
+
+
+def _future_unary_unary(channel, group, method, timeout, protocol_options,
+ metadata, metadata_transformer, request,
+ request_serializer, response_deserializer):
+ multi_callable = channel.unary_unary(
+ _common.fully_qualified_method(group, method),
+ request_serializer=request_serializer,
+ response_deserializer=response_deserializer)
+ effective_metadata = _effective_metadata(metadata, metadata_transformer)
+ response_future = multi_callable.future(
+ request,
+ timeout=timeout,
metadata=_metadata.unbeta(effective_metadata),
- credentials=_credentials(protocol_options))
- return _Rendezvous(response_future, None, response_future)
-
-
-def _unary_stream(channel, group, method, timeout, protocol_options, metadata,
- metadata_transformer, request, request_serializer,
- response_deserializer):
- multi_callable = channel.unary_stream(
- _common.fully_qualified_method(group, method),
- request_serializer=request_serializer,
- response_deserializer=response_deserializer)
- effective_metadata = _effective_metadata(metadata, metadata_transformer)
- response_iterator = multi_callable(
- request,
- timeout=timeout,
+ credentials=_credentials(protocol_options))
+ return _Rendezvous(response_future, None, response_future)
+
+
+def _unary_stream(channel, group, method, timeout, protocol_options, metadata,
+ metadata_transformer, request, request_serializer,
+ response_deserializer):
+ multi_callable = channel.unary_stream(
+ _common.fully_qualified_method(group, method),
+ request_serializer=request_serializer,
+ response_deserializer=response_deserializer)
+ effective_metadata = _effective_metadata(metadata, metadata_transformer)
+ response_iterator = multi_callable(
+ request,
+ timeout=timeout,
metadata=_metadata.unbeta(effective_metadata),
- credentials=_credentials(protocol_options))
- return _Rendezvous(None, response_iterator, response_iterator)
-
-
-def _blocking_stream_unary(channel, group, method, timeout, with_call,
- protocol_options, metadata, metadata_transformer,
- request_iterator, request_serializer,
- response_deserializer):
- try:
- multi_callable = channel.stream_unary(
- _common.fully_qualified_method(group, method),
- request_serializer=request_serializer,
- response_deserializer=response_deserializer)
- effective_metadata = _effective_metadata(metadata, metadata_transformer)
- if with_call:
- response, call = multi_callable.with_call(
- request_iterator,
- timeout=timeout,
+ credentials=_credentials(protocol_options))
+ return _Rendezvous(None, response_iterator, response_iterator)
+
+
+def _blocking_stream_unary(channel, group, method, timeout, with_call,
+ protocol_options, metadata, metadata_transformer,
+ request_iterator, request_serializer,
+ response_deserializer):
+ try:
+ multi_callable = channel.stream_unary(
+ _common.fully_qualified_method(group, method),
+ request_serializer=request_serializer,
+ response_deserializer=response_deserializer)
+ effective_metadata = _effective_metadata(metadata, metadata_transformer)
+ if with_call:
+ response, call = multi_callable.with_call(
+ request_iterator,
+ timeout=timeout,
metadata=_metadata.unbeta(effective_metadata),
- credentials=_credentials(protocol_options))
- return response, _Rendezvous(None, None, call)
- else:
+ credentials=_credentials(protocol_options))
+ return response, _Rendezvous(None, None, call)
+ else:
return multi_callable(request_iterator,
timeout=timeout,
metadata=_metadata.unbeta(effective_metadata),
credentials=_credentials(protocol_options))
- except grpc.RpcError as rpc_error_call:
- raise _abortion_error(rpc_error_call)
-
-
-def _future_stream_unary(channel, group, method, timeout, protocol_options,
- metadata, metadata_transformer, request_iterator,
- request_serializer, response_deserializer):
- multi_callable = channel.stream_unary(
- _common.fully_qualified_method(group, method),
- request_serializer=request_serializer,
- response_deserializer=response_deserializer)
- effective_metadata = _effective_metadata(metadata, metadata_transformer)
- response_future = multi_callable.future(
- request_iterator,
- timeout=timeout,
+ except grpc.RpcError as rpc_error_call:
+ raise _abortion_error(rpc_error_call)
+
+
+def _future_stream_unary(channel, group, method, timeout, protocol_options,
+ metadata, metadata_transformer, request_iterator,
+ request_serializer, response_deserializer):
+ multi_callable = channel.stream_unary(
+ _common.fully_qualified_method(group, method),
+ request_serializer=request_serializer,
+ response_deserializer=response_deserializer)
+ effective_metadata = _effective_metadata(metadata, metadata_transformer)
+ response_future = multi_callable.future(
+ request_iterator,
+ timeout=timeout,
metadata=_metadata.unbeta(effective_metadata),
- credentials=_credentials(protocol_options))
- return _Rendezvous(response_future, None, response_future)
-
-
-def _stream_stream(channel, group, method, timeout, protocol_options, metadata,
- metadata_transformer, request_iterator, request_serializer,
- response_deserializer):
- multi_callable = channel.stream_stream(
- _common.fully_qualified_method(group, method),
- request_serializer=request_serializer,
- response_deserializer=response_deserializer)
- effective_metadata = _effective_metadata(metadata, metadata_transformer)
- response_iterator = multi_callable(
- request_iterator,
- timeout=timeout,
+ credentials=_credentials(protocol_options))
+ return _Rendezvous(response_future, None, response_future)
+
+
+def _stream_stream(channel, group, method, timeout, protocol_options, metadata,
+ metadata_transformer, request_iterator, request_serializer,
+ response_deserializer):
+ multi_callable = channel.stream_stream(
+ _common.fully_qualified_method(group, method),
+ request_serializer=request_serializer,
+ response_deserializer=response_deserializer)
+ effective_metadata = _effective_metadata(metadata, metadata_transformer)
+ response_iterator = multi_callable(
+ request_iterator,
+ timeout=timeout,
metadata=_metadata.unbeta(effective_metadata),
- credentials=_credentials(protocol_options))
- return _Rendezvous(None, response_iterator, response_iterator)
-
-
-class _UnaryUnaryMultiCallable(face.UnaryUnaryMultiCallable):
-
- def __init__(self, channel, group, method, metadata_transformer,
- request_serializer, response_deserializer):
- self._channel = channel
- self._group = group
- self._method = method
- self._metadata_transformer = metadata_transformer
- self._request_serializer = request_serializer
- self._response_deserializer = response_deserializer
-
- def __call__(self,
- request,
- timeout,
- metadata=None,
- with_call=False,
- protocol_options=None):
+ credentials=_credentials(protocol_options))
+ return _Rendezvous(None, response_iterator, response_iterator)
+
+
+class _UnaryUnaryMultiCallable(face.UnaryUnaryMultiCallable):
+
+ def __init__(self, channel, group, method, metadata_transformer,
+ request_serializer, response_deserializer):
+ self._channel = channel
+ self._group = group
+ self._method = method
+ self._metadata_transformer = metadata_transformer
+ self._request_serializer = request_serializer
+ self._response_deserializer = response_deserializer
+
+ def __call__(self,
+ request,
+ timeout,
+ metadata=None,
+ with_call=False,
+ protocol_options=None):
return _blocking_unary_unary(self._channel, self._group, self._method,
timeout, with_call, protocol_options,
metadata, self._metadata_transformer,
request, self._request_serializer,
self._response_deserializer)
-
- def future(self, request, timeout, metadata=None, protocol_options=None):
+
+ def future(self, request, timeout, metadata=None, protocol_options=None):
return _future_unary_unary(self._channel, self._group, self._method,
timeout, protocol_options, metadata,
self._metadata_transformer, request,
self._request_serializer,
self._response_deserializer)
-
- def event(self,
- request,
- receiver,
- abortion_callback,
- timeout,
- metadata=None,
- protocol_options=None):
- raise NotImplementedError()
-
-
-class _UnaryStreamMultiCallable(face.UnaryStreamMultiCallable):
-
- def __init__(self, channel, group, method, metadata_transformer,
- request_serializer, response_deserializer):
- self._channel = channel
- self._group = group
- self._method = method
- self._metadata_transformer = metadata_transformer
- self._request_serializer = request_serializer
- self._response_deserializer = response_deserializer
-
- def __call__(self, request, timeout, metadata=None, protocol_options=None):
+
+ def event(self,
+ request,
+ receiver,
+ abortion_callback,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ raise NotImplementedError()
+
+
+class _UnaryStreamMultiCallable(face.UnaryStreamMultiCallable):
+
+ def __init__(self, channel, group, method, metadata_transformer,
+ request_serializer, response_deserializer):
+ self._channel = channel
+ self._group = group
+ self._method = method
+ self._metadata_transformer = metadata_transformer
+ self._request_serializer = request_serializer
+ self._response_deserializer = response_deserializer
+
+ def __call__(self, request, timeout, metadata=None, protocol_options=None):
return _unary_stream(self._channel, self._group, self._method, timeout,
protocol_options, metadata,
self._metadata_transformer, request,
self._request_serializer,
self._response_deserializer)
-
- def event(self,
- request,
- receiver,
- abortion_callback,
- timeout,
- metadata=None,
- protocol_options=None):
- raise NotImplementedError()
-
-
-class _StreamUnaryMultiCallable(face.StreamUnaryMultiCallable):
-
- def __init__(self, channel, group, method, metadata_transformer,
- request_serializer, response_deserializer):
- self._channel = channel
- self._group = group
- self._method = method
- self._metadata_transformer = metadata_transformer
- self._request_serializer = request_serializer
- self._response_deserializer = response_deserializer
-
- def __call__(self,
- request_iterator,
- timeout,
- metadata=None,
- with_call=False,
- protocol_options=None):
+
+ def event(self,
+ request,
+ receiver,
+ abortion_callback,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ raise NotImplementedError()
+
+
+class _StreamUnaryMultiCallable(face.StreamUnaryMultiCallable):
+
+ def __init__(self, channel, group, method, metadata_transformer,
+ request_serializer, response_deserializer):
+ self._channel = channel
+ self._group = group
+ self._method = method
+ self._metadata_transformer = metadata_transformer
+ self._request_serializer = request_serializer
+ self._response_deserializer = response_deserializer
+
+ def __call__(self,
+ request_iterator,
+ timeout,
+ metadata=None,
+ with_call=False,
+ protocol_options=None):
return _blocking_stream_unary(self._channel, self._group, self._method,
timeout, with_call, protocol_options,
metadata, self._metadata_transformer,
request_iterator,
self._request_serializer,
self._response_deserializer)
-
- def future(self,
- request_iterator,
- timeout,
- metadata=None,
- protocol_options=None):
+
+ def future(self,
+ request_iterator,
+ timeout,
+ metadata=None,
+ protocol_options=None):
return _future_stream_unary(self._channel, self._group, self._method,
timeout, protocol_options, metadata,
self._metadata_transformer,
request_iterator, self._request_serializer,
self._response_deserializer)
-
- def event(self,
- receiver,
- abortion_callback,
- timeout,
- metadata=None,
- protocol_options=None):
- raise NotImplementedError()
-
-
-class _StreamStreamMultiCallable(face.StreamStreamMultiCallable):
-
- def __init__(self, channel, group, method, metadata_transformer,
- request_serializer, response_deserializer):
- self._channel = channel
- self._group = group
- self._method = method
- self._metadata_transformer = metadata_transformer
- self._request_serializer = request_serializer
- self._response_deserializer = response_deserializer
-
- def __call__(self,
- request_iterator,
- timeout,
- metadata=None,
- protocol_options=None):
+
+ def event(self,
+ receiver,
+ abortion_callback,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ raise NotImplementedError()
+
+
+class _StreamStreamMultiCallable(face.StreamStreamMultiCallable):
+
+ def __init__(self, channel, group, method, metadata_transformer,
+ request_serializer, response_deserializer):
+ self._channel = channel
+ self._group = group
+ self._method = method
+ self._metadata_transformer = metadata_transformer
+ self._request_serializer = request_serializer
+ self._response_deserializer = response_deserializer
+
+ def __call__(self,
+ request_iterator,
+ timeout,
+ metadata=None,
+ protocol_options=None):
return _stream_stream(self._channel, self._group, self._method, timeout,
protocol_options, metadata,
self._metadata_transformer, request_iterator,
self._request_serializer,
self._response_deserializer)
-
- def event(self,
- receiver,
- abortion_callback,
- timeout,
- metadata=None,
- protocol_options=None):
- raise NotImplementedError()
-
-
-class _GenericStub(face.GenericStub):
-
- def __init__(self, channel, metadata_transformer, request_serializers,
- response_deserializers):
- self._channel = channel
- self._metadata_transformer = metadata_transformer
- self._request_serializers = request_serializers or {}
- self._response_deserializers = response_deserializers or {}
-
- def blocking_unary_unary(self,
- group,
- method,
- request,
- timeout,
- metadata=None,
- with_call=None,
- protocol_options=None):
+
+ def event(self,
+ receiver,
+ abortion_callback,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ raise NotImplementedError()
+
+
+class _GenericStub(face.GenericStub):
+
+ def __init__(self, channel, metadata_transformer, request_serializers,
+ response_deserializers):
+ self._channel = channel
+ self._metadata_transformer = metadata_transformer
+ self._request_serializers = request_serializers or {}
+ self._response_deserializers = response_deserializers or {}
+
+ def blocking_unary_unary(self,
+ group,
+ method,
+ request,
+ timeout,
+ metadata=None,
+ with_call=None,
+ protocol_options=None):
request_serializer = self._request_serializers.get((
group,
method,
@@ -452,18 +452,18 @@ class _GenericStub(face.GenericStub):
group,
method,
))
- return _blocking_unary_unary(self._channel, group, method, timeout,
- with_call, protocol_options, metadata,
- self._metadata_transformer, request,
- request_serializer, response_deserializer)
-
- def future_unary_unary(self,
- group,
- method,
- request,
- timeout,
- metadata=None,
- protocol_options=None):
+ return _blocking_unary_unary(self._channel, group, method, timeout,
+ with_call, protocol_options, metadata,
+ self._metadata_transformer, request,
+ request_serializer, response_deserializer)
+
+ def future_unary_unary(self,
+ group,
+ method,
+ request,
+ timeout,
+ metadata=None,
+ protocol_options=None):
request_serializer = self._request_serializers.get((
group,
method,
@@ -472,18 +472,18 @@ class _GenericStub(face.GenericStub):
group,
method,
))
- return _future_unary_unary(self._channel, group, method, timeout,
- protocol_options, metadata,
- self._metadata_transformer, request,
- request_serializer, response_deserializer)
-
- def inline_unary_stream(self,
- group,
- method,
- request,
- timeout,
- metadata=None,
- protocol_options=None):
+ return _future_unary_unary(self._channel, group, method, timeout,
+ protocol_options, metadata,
+ self._metadata_transformer, request,
+ request_serializer, response_deserializer)
+
+ def inline_unary_stream(self,
+ group,
+ method,
+ request,
+ timeout,
+ metadata=None,
+ protocol_options=None):
request_serializer = self._request_serializers.get((
group,
method,
@@ -492,19 +492,19 @@ class _GenericStub(face.GenericStub):
group,
method,
))
- return _unary_stream(self._channel, group, method, timeout,
- protocol_options, metadata,
- self._metadata_transformer, request,
- request_serializer, response_deserializer)
-
- def blocking_stream_unary(self,
- group,
- method,
- request_iterator,
- timeout,
- metadata=None,
- with_call=None,
- protocol_options=None):
+ return _unary_stream(self._channel, group, method, timeout,
+ protocol_options, metadata,
+ self._metadata_transformer, request,
+ request_serializer, response_deserializer)
+
+ def blocking_stream_unary(self,
+ group,
+ method,
+ request_iterator,
+ timeout,
+ metadata=None,
+ with_call=None,
+ protocol_options=None):
request_serializer = self._request_serializers.get((
group,
method,
@@ -518,14 +518,14 @@ class _GenericStub(face.GenericStub):
self._metadata_transformer,
request_iterator, request_serializer,
response_deserializer)
-
- def future_stream_unary(self,
- group,
- method,
- request_iterator,
- timeout,
- metadata=None,
- protocol_options=None):
+
+ def future_stream_unary(self,
+ group,
+ method,
+ request_iterator,
+ timeout,
+ metadata=None,
+ protocol_options=None):
request_serializer = self._request_serializers.get((
group,
method,
@@ -539,14 +539,14 @@ class _GenericStub(face.GenericStub):
self._metadata_transformer,
request_iterator, request_serializer,
response_deserializer)
-
- def inline_stream_stream(self,
- group,
- method,
- request_iterator,
- timeout,
- metadata=None,
- protocol_options=None):
+
+ def inline_stream_stream(self,
+ group,
+ method,
+ request_iterator,
+ timeout,
+ metadata=None,
+ protocol_options=None):
request_serializer = self._request_serializers.get((
group,
method,
@@ -555,54 +555,54 @@ class _GenericStub(face.GenericStub):
group,
method,
))
- return _stream_stream(self._channel, group, method, timeout,
- protocol_options, metadata,
- self._metadata_transformer, request_iterator,
- request_serializer, response_deserializer)
-
- def event_unary_unary(self,
- group,
- method,
- request,
- receiver,
- abortion_callback,
- timeout,
- metadata=None,
- protocol_options=None):
- raise NotImplementedError()
-
- def event_unary_stream(self,
- group,
- method,
- request,
- receiver,
- abortion_callback,
- timeout,
- metadata=None,
- protocol_options=None):
- raise NotImplementedError()
-
- def event_stream_unary(self,
- group,
- method,
- receiver,
- abortion_callback,
- timeout,
- metadata=None,
- protocol_options=None):
- raise NotImplementedError()
-
- def event_stream_stream(self,
- group,
- method,
- receiver,
- abortion_callback,
- timeout,
- metadata=None,
- protocol_options=None):
- raise NotImplementedError()
-
- def unary_unary(self, group, method):
+ return _stream_stream(self._channel, group, method, timeout,
+ protocol_options, metadata,
+ self._metadata_transformer, request_iterator,
+ request_serializer, response_deserializer)
+
+ def event_unary_unary(self,
+ group,
+ method,
+ request,
+ receiver,
+ abortion_callback,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ raise NotImplementedError()
+
+ def event_unary_stream(self,
+ group,
+ method,
+ request,
+ receiver,
+ abortion_callback,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ raise NotImplementedError()
+
+ def event_stream_unary(self,
+ group,
+ method,
+ receiver,
+ abortion_callback,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ raise NotImplementedError()
+
+ def event_stream_stream(self,
+ group,
+ method,
+ receiver,
+ abortion_callback,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ raise NotImplementedError()
+
+ def unary_unary(self, group, method):
request_serializer = self._request_serializers.get((
group,
method,
@@ -615,8 +615,8 @@ class _GenericStub(face.GenericStub):
self._metadata_transformer,
request_serializer,
response_deserializer)
-
- def unary_stream(self, group, method):
+
+ def unary_stream(self, group, method):
request_serializer = self._request_serializers.get((
group,
method,
@@ -629,8 +629,8 @@ class _GenericStub(face.GenericStub):
self._metadata_transformer,
request_serializer,
response_deserializer)
-
- def stream_unary(self, group, method):
+
+ def stream_unary(self, group, method):
request_serializer = self._request_serializers.get((
group,
method,
@@ -643,8 +643,8 @@ class _GenericStub(face.GenericStub):
self._metadata_transformer,
request_serializer,
response_deserializer)
-
- def stream_stream(self, group, method):
+
+ def stream_stream(self, group, method):
request_serializer = self._request_serializers.get((
group,
method,
@@ -657,50 +657,50 @@ class _GenericStub(face.GenericStub):
self._metadata_transformer,
request_serializer,
response_deserializer)
-
- def __enter__(self):
- return self
-
- def __exit__(self, exc_type, exc_val, exc_tb):
- return False
-
-
-class _DynamicStub(face.DynamicStub):
-
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ return False
+
+
+class _DynamicStub(face.DynamicStub):
+
def __init__(self, backing_generic_stub, group, cardinalities):
self._generic_stub = backing_generic_stub
- self._group = group
- self._cardinalities = cardinalities
-
- def __getattr__(self, attr):
- method_cardinality = self._cardinalities.get(attr)
- if method_cardinality is cardinality.Cardinality.UNARY_UNARY:
- return self._generic_stub.unary_unary(self._group, attr)
- elif method_cardinality is cardinality.Cardinality.UNARY_STREAM:
- return self._generic_stub.unary_stream(self._group, attr)
- elif method_cardinality is cardinality.Cardinality.STREAM_UNARY:
- return self._generic_stub.stream_unary(self._group, attr)
- elif method_cardinality is cardinality.Cardinality.STREAM_STREAM:
- return self._generic_stub.stream_stream(self._group, attr)
- else:
+ self._group = group
+ self._cardinalities = cardinalities
+
+ def __getattr__(self, attr):
+ method_cardinality = self._cardinalities.get(attr)
+ if method_cardinality is cardinality.Cardinality.UNARY_UNARY:
+ return self._generic_stub.unary_unary(self._group, attr)
+ elif method_cardinality is cardinality.Cardinality.UNARY_STREAM:
+ return self._generic_stub.unary_stream(self._group, attr)
+ elif method_cardinality is cardinality.Cardinality.STREAM_UNARY:
+ return self._generic_stub.stream_unary(self._group, attr)
+ elif method_cardinality is cardinality.Cardinality.STREAM_STREAM:
+ return self._generic_stub.stream_stream(self._group, attr)
+ else:
raise AttributeError('_DynamicStub object has no attribute "%s"!' %
attr)
-
- def __enter__(self):
- return self
-
- def __exit__(self, exc_type, exc_val, exc_tb):
- return False
-
-
-def generic_stub(channel, host, metadata_transformer, request_serializers,
- response_deserializers):
- return _GenericStub(channel, metadata_transformer, request_serializers,
- response_deserializers)
-
-
-def dynamic_stub(channel, service, cardinalities, host, metadata_transformer,
- request_serializers, response_deserializers):
- return _DynamicStub(
- _GenericStub(channel, metadata_transformer, request_serializers,
- response_deserializers), service, cardinalities)
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ return False
+
+
+def generic_stub(channel, host, metadata_transformer, request_serializers,
+ response_deserializers):
+ return _GenericStub(channel, metadata_transformer, request_serializers,
+ response_deserializers)
+
+
+def dynamic_stub(channel, service, cardinalities, host, metadata_transformer,
+ request_serializers, response_deserializers):
+ return _DynamicStub(
+ _GenericStub(channel, metadata_transformer, request_serializers,
+ response_deserializers), service, cardinalities)
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/beta/_server_adaptations.py b/contrib/libs/grpc/src/python/grpcio/grpc/beta/_server_adaptations.py
index 8843a3c550..83f669a51f 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/beta/_server_adaptations.py
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/beta/_server_adaptations.py
@@ -1,247 +1,247 @@
# Copyright 2016 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-"""Translates gRPC's server-side API into gRPC's server-side Beta API."""
-
-import collections
-import threading
-
-import grpc
-from grpc import _common
+"""Translates gRPC's server-side API into gRPC's server-side Beta API."""
+
+import collections
+import threading
+
+import grpc
+from grpc import _common
from grpc.beta import _metadata
-from grpc.beta import interfaces
-from grpc.framework.common import cardinality
-from grpc.framework.common import style
-from grpc.framework.foundation import abandonment
-from grpc.framework.foundation import logging_pool
-from grpc.framework.foundation import stream
-from grpc.framework.interfaces.face import face
-
+from grpc.beta import interfaces
+from grpc.framework.common import cardinality
+from grpc.framework.common import style
+from grpc.framework.foundation import abandonment
+from grpc.framework.foundation import logging_pool
+from grpc.framework.foundation import stream
+from grpc.framework.interfaces.face import face
+
# pylint: disable=too-many-return-statements
-_DEFAULT_POOL_SIZE = 8
-
-
-class _ServerProtocolContext(interfaces.GRPCServicerContext):
-
- def __init__(self, servicer_context):
- self._servicer_context = servicer_context
-
- def peer(self):
- return self._servicer_context.peer()
-
- def disable_next_response_compression(self):
- pass # TODO(https://github.com/grpc/grpc/issues/4078): design, implement.
-
-
-class _FaceServicerContext(face.ServicerContext):
-
- def __init__(self, servicer_context):
- self._servicer_context = servicer_context
-
- def is_active(self):
- return self._servicer_context.is_active()
-
- def time_remaining(self):
- return self._servicer_context.time_remaining()
-
- def add_abortion_callback(self, abortion_callback):
- raise NotImplementedError(
- 'add_abortion_callback no longer supported server-side!')
-
- def cancel(self):
- self._servicer_context.cancel()
-
- def protocol_context(self):
- return _ServerProtocolContext(self._servicer_context)
-
- def invocation_metadata(self):
+_DEFAULT_POOL_SIZE = 8
+
+
+class _ServerProtocolContext(interfaces.GRPCServicerContext):
+
+ def __init__(self, servicer_context):
+ self._servicer_context = servicer_context
+
+ def peer(self):
+ return self._servicer_context.peer()
+
+ def disable_next_response_compression(self):
+ pass # TODO(https://github.com/grpc/grpc/issues/4078): design, implement.
+
+
+class _FaceServicerContext(face.ServicerContext):
+
+ def __init__(self, servicer_context):
+ self._servicer_context = servicer_context
+
+ def is_active(self):
+ return self._servicer_context.is_active()
+
+ def time_remaining(self):
+ return self._servicer_context.time_remaining()
+
+ def add_abortion_callback(self, abortion_callback):
+ raise NotImplementedError(
+ 'add_abortion_callback no longer supported server-side!')
+
+ def cancel(self):
+ self._servicer_context.cancel()
+
+ def protocol_context(self):
+ return _ServerProtocolContext(self._servicer_context)
+
+ def invocation_metadata(self):
return _metadata.beta(self._servicer_context.invocation_metadata())
-
- def initial_metadata(self, initial_metadata):
+
+ def initial_metadata(self, initial_metadata):
self._servicer_context.send_initial_metadata(
_metadata.unbeta(initial_metadata))
-
- def terminal_metadata(self, terminal_metadata):
+
+ def terminal_metadata(self, terminal_metadata):
self._servicer_context.set_terminal_metadata(
_metadata.unbeta(terminal_metadata))
-
- def code(self, code):
- self._servicer_context.set_code(code)
-
- def details(self, details):
- self._servicer_context.set_details(details)
-
-
-def _adapt_unary_request_inline(unary_request_inline):
-
- def adaptation(request, servicer_context):
- return unary_request_inline(request,
- _FaceServicerContext(servicer_context))
-
- return adaptation
-
-
-def _adapt_stream_request_inline(stream_request_inline):
-
- def adaptation(request_iterator, servicer_context):
- return stream_request_inline(request_iterator,
- _FaceServicerContext(servicer_context))
-
- return adaptation
-
-
-class _Callback(stream.Consumer):
-
- def __init__(self):
- self._condition = threading.Condition()
- self._values = []
- self._terminated = False
- self._cancelled = False
-
- def consume(self, value):
- with self._condition:
- self._values.append(value)
- self._condition.notify_all()
-
- def terminate(self):
- with self._condition:
- self._terminated = True
- self._condition.notify_all()
-
- def consume_and_terminate(self, value):
- with self._condition:
- self._values.append(value)
- self._terminated = True
- self._condition.notify_all()
-
- def cancel(self):
- with self._condition:
- self._cancelled = True
- self._condition.notify_all()
-
- def draw_one_value(self):
- with self._condition:
- while True:
- if self._cancelled:
- raise abandonment.Abandoned()
- elif self._values:
- return self._values.pop(0)
- elif self._terminated:
- return None
- else:
- self._condition.wait()
-
- def draw_all_values(self):
- with self._condition:
- while True:
- if self._cancelled:
- raise abandonment.Abandoned()
- elif self._terminated:
- all_values = tuple(self._values)
- self._values = None
- return all_values
- else:
- self._condition.wait()
-
-
-def _run_request_pipe_thread(request_iterator, request_consumer,
- servicer_context):
- thread_joined = threading.Event()
-
- def pipe_requests():
- for request in request_iterator:
- if not servicer_context.is_active() or thread_joined.is_set():
- return
- request_consumer.consume(request)
- if not servicer_context.is_active() or thread_joined.is_set():
- return
- request_consumer.terminate()
-
+
+ def code(self, code):
+ self._servicer_context.set_code(code)
+
+ def details(self, details):
+ self._servicer_context.set_details(details)
+
+
+def _adapt_unary_request_inline(unary_request_inline):
+
+ def adaptation(request, servicer_context):
+ return unary_request_inline(request,
+ _FaceServicerContext(servicer_context))
+
+ return adaptation
+
+
+def _adapt_stream_request_inline(stream_request_inline):
+
+ def adaptation(request_iterator, servicer_context):
+ return stream_request_inline(request_iterator,
+ _FaceServicerContext(servicer_context))
+
+ return adaptation
+
+
+class _Callback(stream.Consumer):
+
+ def __init__(self):
+ self._condition = threading.Condition()
+ self._values = []
+ self._terminated = False
+ self._cancelled = False
+
+ def consume(self, value):
+ with self._condition:
+ self._values.append(value)
+ self._condition.notify_all()
+
+ def terminate(self):
+ with self._condition:
+ self._terminated = True
+ self._condition.notify_all()
+
+ def consume_and_terminate(self, value):
+ with self._condition:
+ self._values.append(value)
+ self._terminated = True
+ self._condition.notify_all()
+
+ def cancel(self):
+ with self._condition:
+ self._cancelled = True
+ self._condition.notify_all()
+
+ def draw_one_value(self):
+ with self._condition:
+ while True:
+ if self._cancelled:
+ raise abandonment.Abandoned()
+ elif self._values:
+ return self._values.pop(0)
+ elif self._terminated:
+ return None
+ else:
+ self._condition.wait()
+
+ def draw_all_values(self):
+ with self._condition:
+ while True:
+ if self._cancelled:
+ raise abandonment.Abandoned()
+ elif self._terminated:
+ all_values = tuple(self._values)
+ self._values = None
+ return all_values
+ else:
+ self._condition.wait()
+
+
+def _run_request_pipe_thread(request_iterator, request_consumer,
+ servicer_context):
+ thread_joined = threading.Event()
+
+ def pipe_requests():
+ for request in request_iterator:
+ if not servicer_context.is_active() or thread_joined.is_set():
+ return
+ request_consumer.consume(request)
+ if not servicer_context.is_active() or thread_joined.is_set():
+ return
+ request_consumer.terminate()
+
request_pipe_thread = threading.Thread(target=pipe_requests)
request_pipe_thread.daemon = True
- request_pipe_thread.start()
-
-
-def _adapt_unary_unary_event(unary_unary_event):
-
- def adaptation(request, servicer_context):
- callback = _Callback()
- if not servicer_context.add_callback(callback.cancel):
- raise abandonment.Abandoned()
- unary_unary_event(request, callback.consume_and_terminate,
- _FaceServicerContext(servicer_context))
- return callback.draw_all_values()[0]
-
- return adaptation
-
-
-def _adapt_unary_stream_event(unary_stream_event):
-
- def adaptation(request, servicer_context):
- callback = _Callback()
- if not servicer_context.add_callback(callback.cancel):
- raise abandonment.Abandoned()
- unary_stream_event(request, callback,
- _FaceServicerContext(servicer_context))
- while True:
- response = callback.draw_one_value()
- if response is None:
- return
- else:
- yield response
-
- return adaptation
-
-
-def _adapt_stream_unary_event(stream_unary_event):
-
- def adaptation(request_iterator, servicer_context):
- callback = _Callback()
- if not servicer_context.add_callback(callback.cancel):
- raise abandonment.Abandoned()
- request_consumer = stream_unary_event(
- callback.consume_and_terminate,
- _FaceServicerContext(servicer_context))
- _run_request_pipe_thread(request_iterator, request_consumer,
- servicer_context)
- return callback.draw_all_values()[0]
-
- return adaptation
-
-
-def _adapt_stream_stream_event(stream_stream_event):
-
- def adaptation(request_iterator, servicer_context):
- callback = _Callback()
- if not servicer_context.add_callback(callback.cancel):
- raise abandonment.Abandoned()
- request_consumer = stream_stream_event(
- callback, _FaceServicerContext(servicer_context))
- _run_request_pipe_thread(request_iterator, request_consumer,
- servicer_context)
- while True:
- response = callback.draw_one_value()
- if response is None:
- return
- else:
- yield response
-
- return adaptation
-
-
-class _SimpleMethodHandler(
- collections.namedtuple('_MethodHandler', (
+ request_pipe_thread.start()
+
+
+def _adapt_unary_unary_event(unary_unary_event):
+
+ def adaptation(request, servicer_context):
+ callback = _Callback()
+ if not servicer_context.add_callback(callback.cancel):
+ raise abandonment.Abandoned()
+ unary_unary_event(request, callback.consume_and_terminate,
+ _FaceServicerContext(servicer_context))
+ return callback.draw_all_values()[0]
+
+ return adaptation
+
+
+def _adapt_unary_stream_event(unary_stream_event):
+
+ def adaptation(request, servicer_context):
+ callback = _Callback()
+ if not servicer_context.add_callback(callback.cancel):
+ raise abandonment.Abandoned()
+ unary_stream_event(request, callback,
+ _FaceServicerContext(servicer_context))
+ while True:
+ response = callback.draw_one_value()
+ if response is None:
+ return
+ else:
+ yield response
+
+ return adaptation
+
+
+def _adapt_stream_unary_event(stream_unary_event):
+
+ def adaptation(request_iterator, servicer_context):
+ callback = _Callback()
+ if not servicer_context.add_callback(callback.cancel):
+ raise abandonment.Abandoned()
+ request_consumer = stream_unary_event(
+ callback.consume_and_terminate,
+ _FaceServicerContext(servicer_context))
+ _run_request_pipe_thread(request_iterator, request_consumer,
+ servicer_context)
+ return callback.draw_all_values()[0]
+
+ return adaptation
+
+
+def _adapt_stream_stream_event(stream_stream_event):
+
+ def adaptation(request_iterator, servicer_context):
+ callback = _Callback()
+ if not servicer_context.add_callback(callback.cancel):
+ raise abandonment.Abandoned()
+ request_consumer = stream_stream_event(
+ callback, _FaceServicerContext(servicer_context))
+ _run_request_pipe_thread(request_iterator, request_consumer,
+ servicer_context)
+ while True:
+ response = callback.draw_one_value()
+ if response is None:
+ return
+ else:
+ yield response
+
+ return adaptation
+
+
+class _SimpleMethodHandler(
+ collections.namedtuple('_MethodHandler', (
'request_streaming',
'response_streaming',
'request_deserializer',
@@ -251,135 +251,135 @@ class _SimpleMethodHandler(
'stream_unary',
'stream_stream',
)), grpc.RpcMethodHandler):
- pass
-
-
-def _simple_method_handler(implementation, request_deserializer,
- response_serializer):
- if implementation.style is style.Service.INLINE:
- if implementation.cardinality is cardinality.Cardinality.UNARY_UNARY:
+ pass
+
+
+def _simple_method_handler(implementation, request_deserializer,
+ response_serializer):
+ if implementation.style is style.Service.INLINE:
+ if implementation.cardinality is cardinality.Cardinality.UNARY_UNARY:
return _SimpleMethodHandler(
False, False, request_deserializer, response_serializer,
_adapt_unary_request_inline(implementation.unary_unary_inline),
None, None, None)
- elif implementation.cardinality is cardinality.Cardinality.UNARY_STREAM:
+ elif implementation.cardinality is cardinality.Cardinality.UNARY_STREAM:
return _SimpleMethodHandler(
False, True, request_deserializer, response_serializer, None,
_adapt_unary_request_inline(implementation.unary_stream_inline),
None, None)
- elif implementation.cardinality is cardinality.Cardinality.STREAM_UNARY:
+ elif implementation.cardinality is cardinality.Cardinality.STREAM_UNARY:
return _SimpleMethodHandler(
True, False, request_deserializer, response_serializer, None,
None,
_adapt_stream_request_inline(
implementation.stream_unary_inline), None)
- elif implementation.cardinality is cardinality.Cardinality.STREAM_STREAM:
- return _SimpleMethodHandler(
- True, True, request_deserializer, response_serializer, None,
- None, None,
- _adapt_stream_request_inline(
- implementation.stream_stream_inline))
- elif implementation.style is style.Service.EVENT:
- if implementation.cardinality is cardinality.Cardinality.UNARY_UNARY:
+ elif implementation.cardinality is cardinality.Cardinality.STREAM_STREAM:
+ return _SimpleMethodHandler(
+ True, True, request_deserializer, response_serializer, None,
+ None, None,
+ _adapt_stream_request_inline(
+ implementation.stream_stream_inline))
+ elif implementation.style is style.Service.EVENT:
+ if implementation.cardinality is cardinality.Cardinality.UNARY_UNARY:
return _SimpleMethodHandler(
False, False, request_deserializer, response_serializer,
_adapt_unary_unary_event(implementation.unary_unary_event),
None, None, None)
- elif implementation.cardinality is cardinality.Cardinality.UNARY_STREAM:
+ elif implementation.cardinality is cardinality.Cardinality.UNARY_STREAM:
return _SimpleMethodHandler(
False, True, request_deserializer, response_serializer, None,
_adapt_unary_stream_event(implementation.unary_stream_event),
None, None)
- elif implementation.cardinality is cardinality.Cardinality.STREAM_UNARY:
+ elif implementation.cardinality is cardinality.Cardinality.STREAM_UNARY:
return _SimpleMethodHandler(
True, False, request_deserializer, response_serializer, None,
None,
_adapt_stream_unary_event(implementation.stream_unary_event),
None)
- elif implementation.cardinality is cardinality.Cardinality.STREAM_STREAM:
+ elif implementation.cardinality is cardinality.Cardinality.STREAM_STREAM:
return _SimpleMethodHandler(
True, True, request_deserializer, response_serializer, None,
None, None,
_adapt_stream_stream_event(implementation.stream_stream_event))
raise ValueError()
-
-
-def _flatten_method_pair_map(method_pair_map):
- method_pair_map = method_pair_map or {}
- flat_map = {}
- for method_pair in method_pair_map:
- method = _common.fully_qualified_method(method_pair[0], method_pair[1])
- flat_map[method] = method_pair_map[method_pair]
- return flat_map
-
-
-class _GenericRpcHandler(grpc.GenericRpcHandler):
-
- def __init__(self, method_implementations, multi_method_implementation,
- request_deserializers, response_serializers):
- self._method_implementations = _flatten_method_pair_map(
- method_implementations)
- self._request_deserializers = _flatten_method_pair_map(
- request_deserializers)
- self._response_serializers = _flatten_method_pair_map(
- response_serializers)
- self._multi_method_implementation = multi_method_implementation
-
- def service(self, handler_call_details):
- method_implementation = self._method_implementations.get(
- handler_call_details.method)
- if method_implementation is not None:
+
+
+def _flatten_method_pair_map(method_pair_map):
+ method_pair_map = method_pair_map or {}
+ flat_map = {}
+ for method_pair in method_pair_map:
+ method = _common.fully_qualified_method(method_pair[0], method_pair[1])
+ flat_map[method] = method_pair_map[method_pair]
+ return flat_map
+
+
+class _GenericRpcHandler(grpc.GenericRpcHandler):
+
+ def __init__(self, method_implementations, multi_method_implementation,
+ request_deserializers, response_serializers):
+ self._method_implementations = _flatten_method_pair_map(
+ method_implementations)
+ self._request_deserializers = _flatten_method_pair_map(
+ request_deserializers)
+ self._response_serializers = _flatten_method_pair_map(
+ response_serializers)
+ self._multi_method_implementation = multi_method_implementation
+
+ def service(self, handler_call_details):
+ method_implementation = self._method_implementations.get(
+ handler_call_details.method)
+ if method_implementation is not None:
return _simple_method_handler(
method_implementation,
self._request_deserializers.get(handler_call_details.method),
self._response_serializers.get(handler_call_details.method))
- elif self._multi_method_implementation is None:
- return None
- else:
- try:
- return None #TODO(nathaniel): call the multimethod.
- except face.NoSuchMethodError:
- return None
-
-
-class _Server(interfaces.Server):
-
+ elif self._multi_method_implementation is None:
+ return None
+ else:
+ try:
+ return None #TODO(nathaniel): call the multimethod.
+ except face.NoSuchMethodError:
+ return None
+
+
+class _Server(interfaces.Server):
+
def __init__(self, grpc_server):
self._grpc_server = grpc_server
-
- def add_insecure_port(self, address):
+
+ def add_insecure_port(self, address):
return self._grpc_server.add_insecure_port(address)
-
- def add_secure_port(self, address, server_credentials):
+
+ def add_secure_port(self, address, server_credentials):
return self._grpc_server.add_secure_port(address, server_credentials)
-
- def start(self):
+
+ def start(self):
self._grpc_server.start()
-
- def stop(self, grace):
+
+ def stop(self, grace):
return self._grpc_server.stop(grace)
-
- def __enter__(self):
+
+ def __enter__(self):
self._grpc_server.start()
- return self
-
- def __exit__(self, exc_type, exc_val, exc_tb):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
self._grpc_server.stop(None)
- return False
-
-
-def server(service_implementations, multi_method_implementation,
- request_deserializers, response_serializers, thread_pool,
- thread_pool_size):
+ return False
+
+
+def server(service_implementations, multi_method_implementation,
+ request_deserializers, response_serializers, thread_pool,
+ thread_pool_size):
generic_rpc_handler = _GenericRpcHandler(service_implementations,
multi_method_implementation,
request_deserializers,
response_serializers)
- if thread_pool is None:
+ if thread_pool is None:
effective_thread_pool = logging_pool.pool(
_DEFAULT_POOL_SIZE if thread_pool_size is None else thread_pool_size
)
- else:
- effective_thread_pool = thread_pool
- return _Server(
- grpc.server(effective_thread_pool, handlers=(generic_rpc_handler,)))
+ else:
+ effective_thread_pool = thread_pool
+ return _Server(
+ grpc.server(effective_thread_pool, handlers=(generic_rpc_handler,)))
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/beta/implementations.py b/contrib/libs/grpc/src/python/grpcio/grpc/beta/implementations.py
index c5507b543f..bf3c03bf64 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/beta/implementations.py
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/beta/implementations.py
@@ -1,39 +1,39 @@
# Copyright 2015-2016 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-"""Entry points into the Beta API of gRPC Python."""
-
-# threading is referenced from specification in this module.
-import threading # pylint: disable=unused-import
-
+"""Entry points into the Beta API of gRPC Python."""
+
+# threading is referenced from specification in this module.
+import threading # pylint: disable=unused-import
+
# interfaces, cardinality, and face are referenced from specification in this
# module.
-import grpc
-from grpc import _auth
-from grpc.beta import _client_adaptations
+import grpc
+from grpc import _auth
+from grpc.beta import _client_adaptations
from grpc.beta import _metadata
-from grpc.beta import _server_adaptations
+from grpc.beta import _server_adaptations
from grpc.beta import interfaces # pylint: disable=unused-import
-from grpc.framework.common import cardinality # pylint: disable=unused-import
-from grpc.framework.interfaces.face import face # pylint: disable=unused-import
-
+from grpc.framework.common import cardinality # pylint: disable=unused-import
+from grpc.framework.interfaces.face import face # pylint: disable=unused-import
+
# pylint: disable=too-many-arguments
-ChannelCredentials = grpc.ChannelCredentials
-ssl_channel_credentials = grpc.ssl_channel_credentials
-CallCredentials = grpc.CallCredentials
-
-
+ChannelCredentials = grpc.ChannelCredentials
+ssl_channel_credentials = grpc.ssl_channel_credentials
+CallCredentials = grpc.CallCredentials
+
+
def metadata_call_credentials(metadata_plugin, name=None):
def plugin(context, callback):
@@ -46,265 +46,265 @@ def metadata_call_credentials(metadata_plugin, name=None):
return grpc.metadata_call_credentials(plugin, name=name)
-def google_call_credentials(credentials):
- """Construct CallCredentials from GoogleCredentials.
-
- Args:
- credentials: A GoogleCredentials object from the oauth2client library.
-
- Returns:
- A CallCredentials object for use in a GRPCCallOptions object.
- """
- return metadata_call_credentials(_auth.GoogleCallCredentials(credentials))
-
-
-access_token_call_credentials = grpc.access_token_call_credentials
-composite_call_credentials = grpc.composite_call_credentials
-composite_channel_credentials = grpc.composite_channel_credentials
-
-
-class Channel(object):
- """A channel to a remote host through which RPCs may be conducted.
-
- Only the "subscribe" and "unsubscribe" methods are supported for application
- use. This class' instance constructor and all other attributes are
- unsupported.
- """
-
- def __init__(self, channel):
- self._channel = channel
-
- def subscribe(self, callback, try_to_connect=None):
- """Subscribes to this Channel's connectivity.
-
- Args:
- callback: A callable to be invoked and passed an
- interfaces.ChannelConnectivity identifying this Channel's connectivity.
- The callable will be invoked immediately upon subscription and again for
- every change to this Channel's connectivity thereafter until it is
- unsubscribed.
- try_to_connect: A boolean indicating whether or not this Channel should
- attempt to connect if it is not already connected and ready to conduct
- RPCs.
- """
- self._channel.subscribe(callback, try_to_connect=try_to_connect)
-
- def unsubscribe(self, callback):
- """Unsubscribes a callback from this Channel's connectivity.
-
- Args:
- callback: A callable previously registered with this Channel from having
- been passed to its "subscribe" method.
- """
- self._channel.unsubscribe(callback)
-
-
-def insecure_channel(host, port):
- """Creates an insecure Channel to a remote host.
-
- Args:
- host: The name of the remote host to which to connect.
- port: The port of the remote host to which to connect.
- If None only the 'host' part will be used.
-
- Returns:
- A Channel to the remote host through which RPCs may be conducted.
- """
+def google_call_credentials(credentials):
+ """Construct CallCredentials from GoogleCredentials.
+
+ Args:
+ credentials: A GoogleCredentials object from the oauth2client library.
+
+ Returns:
+ A CallCredentials object for use in a GRPCCallOptions object.
+ """
+ return metadata_call_credentials(_auth.GoogleCallCredentials(credentials))
+
+
+access_token_call_credentials = grpc.access_token_call_credentials
+composite_call_credentials = grpc.composite_call_credentials
+composite_channel_credentials = grpc.composite_channel_credentials
+
+
+class Channel(object):
+ """A channel to a remote host through which RPCs may be conducted.
+
+ Only the "subscribe" and "unsubscribe" methods are supported for application
+ use. This class' instance constructor and all other attributes are
+ unsupported.
+ """
+
+ def __init__(self, channel):
+ self._channel = channel
+
+ def subscribe(self, callback, try_to_connect=None):
+ """Subscribes to this Channel's connectivity.
+
+ Args:
+ callback: A callable to be invoked and passed an
+ interfaces.ChannelConnectivity identifying this Channel's connectivity.
+ The callable will be invoked immediately upon subscription and again for
+ every change to this Channel's connectivity thereafter until it is
+ unsubscribed.
+ try_to_connect: A boolean indicating whether or not this Channel should
+ attempt to connect if it is not already connected and ready to conduct
+ RPCs.
+ """
+ self._channel.subscribe(callback, try_to_connect=try_to_connect)
+
+ def unsubscribe(self, callback):
+ """Unsubscribes a callback from this Channel's connectivity.
+
+ Args:
+ callback: A callable previously registered with this Channel from having
+ been passed to its "subscribe" method.
+ """
+ self._channel.unsubscribe(callback)
+
+
+def insecure_channel(host, port):
+ """Creates an insecure Channel to a remote host.
+
+ Args:
+ host: The name of the remote host to which to connect.
+ port: The port of the remote host to which to connect.
+ If None only the 'host' part will be used.
+
+ Returns:
+ A Channel to the remote host through which RPCs may be conducted.
+ """
channel = grpc.insecure_channel(host if port is None else '%s:%d' %
(host, port))
- return Channel(channel)
-
-
-def secure_channel(host, port, channel_credentials):
- """Creates a secure Channel to a remote host.
-
- Args:
- host: The name of the remote host to which to connect.
- port: The port of the remote host to which to connect.
- If None only the 'host' part will be used.
- channel_credentials: A ChannelCredentials.
-
- Returns:
- A secure Channel to the remote host through which RPCs may be conducted.
- """
+ return Channel(channel)
+
+
+def secure_channel(host, port, channel_credentials):
+ """Creates a secure Channel to a remote host.
+
+ Args:
+ host: The name of the remote host to which to connect.
+ port: The port of the remote host to which to connect.
+ If None only the 'host' part will be used.
+ channel_credentials: A ChannelCredentials.
+
+ Returns:
+ A secure Channel to the remote host through which RPCs may be conducted.
+ """
channel = grpc.secure_channel(
host if port is None else '%s:%d' % (host, port), channel_credentials)
- return Channel(channel)
-
-
-class StubOptions(object):
- """A value encapsulating the various options for creation of a Stub.
-
- This class and its instances have no supported interface - it exists to define
- the type of its instances and its instances exist to be passed to other
- functions.
- """
-
- def __init__(self, host, request_serializers, response_deserializers,
- metadata_transformer, thread_pool, thread_pool_size):
- self.host = host
- self.request_serializers = request_serializers
- self.response_deserializers = response_deserializers
- self.metadata_transformer = metadata_transformer
- self.thread_pool = thread_pool
- self.thread_pool_size = thread_pool_size
-
-
-_EMPTY_STUB_OPTIONS = StubOptions(None, None, None, None, None, None)
-
-
-def stub_options(host=None,
- request_serializers=None,
- response_deserializers=None,
- metadata_transformer=None,
- thread_pool=None,
- thread_pool_size=None):
- """Creates a StubOptions value to be passed at stub creation.
-
- All parameters are optional and should always be passed by keyword.
-
- Args:
- host: A host string to set on RPC calls.
- request_serializers: A dictionary from service name-method name pair to
- request serialization behavior.
- response_deserializers: A dictionary from service name-method name pair to
- response deserialization behavior.
- metadata_transformer: A callable that given a metadata object produces
- another metadata object to be used in the underlying communication on the
- wire.
- thread_pool: A thread pool to use in stubs.
- thread_pool_size: The size of thread pool to create for use in stubs;
- ignored if thread_pool has been passed.
-
- Returns:
- A StubOptions value created from the passed parameters.
- """
- return StubOptions(host, request_serializers, response_deserializers,
- metadata_transformer, thread_pool, thread_pool_size)
-
-
-def generic_stub(channel, options=None):
- """Creates a face.GenericStub on which RPCs can be made.
-
- Args:
- channel: A Channel for use by the created stub.
- options: A StubOptions customizing the created stub.
-
- Returns:
- A face.GenericStub on which RPCs can be made.
- """
- effective_options = _EMPTY_STUB_OPTIONS if options is None else options
- return _client_adaptations.generic_stub(
- channel._channel, # pylint: disable=protected-access
- effective_options.host,
- effective_options.metadata_transformer,
- effective_options.request_serializers,
- effective_options.response_deserializers)
-
-
-def dynamic_stub(channel, service, cardinalities, options=None):
- """Creates a face.DynamicStub with which RPCs can be invoked.
-
- Args:
- channel: A Channel for the returned face.DynamicStub to use.
- service: The package-qualified full name of the service.
- cardinalities: A dictionary from RPC method name to cardinality.Cardinality
- value identifying the cardinality of the RPC method.
- options: An optional StubOptions value further customizing the functionality
- of the returned face.DynamicStub.
-
- Returns:
- A face.DynamicStub with which RPCs can be invoked.
- """
+ return Channel(channel)
+
+
+class StubOptions(object):
+ """A value encapsulating the various options for creation of a Stub.
+
+ This class and its instances have no supported interface - it exists to define
+ the type of its instances and its instances exist to be passed to other
+ functions.
+ """
+
+ def __init__(self, host, request_serializers, response_deserializers,
+ metadata_transformer, thread_pool, thread_pool_size):
+ self.host = host
+ self.request_serializers = request_serializers
+ self.response_deserializers = response_deserializers
+ self.metadata_transformer = metadata_transformer
+ self.thread_pool = thread_pool
+ self.thread_pool_size = thread_pool_size
+
+
+_EMPTY_STUB_OPTIONS = StubOptions(None, None, None, None, None, None)
+
+
+def stub_options(host=None,
+ request_serializers=None,
+ response_deserializers=None,
+ metadata_transformer=None,
+ thread_pool=None,
+ thread_pool_size=None):
+ """Creates a StubOptions value to be passed at stub creation.
+
+ All parameters are optional and should always be passed by keyword.
+
+ Args:
+ host: A host string to set on RPC calls.
+ request_serializers: A dictionary from service name-method name pair to
+ request serialization behavior.
+ response_deserializers: A dictionary from service name-method name pair to
+ response deserialization behavior.
+ metadata_transformer: A callable that given a metadata object produces
+ another metadata object to be used in the underlying communication on the
+ wire.
+ thread_pool: A thread pool to use in stubs.
+ thread_pool_size: The size of thread pool to create for use in stubs;
+ ignored if thread_pool has been passed.
+
+ Returns:
+ A StubOptions value created from the passed parameters.
+ """
+ return StubOptions(host, request_serializers, response_deserializers,
+ metadata_transformer, thread_pool, thread_pool_size)
+
+
+def generic_stub(channel, options=None):
+ """Creates a face.GenericStub on which RPCs can be made.
+
+ Args:
+ channel: A Channel for use by the created stub.
+ options: A StubOptions customizing the created stub.
+
+ Returns:
+ A face.GenericStub on which RPCs can be made.
+ """
+ effective_options = _EMPTY_STUB_OPTIONS if options is None else options
+ return _client_adaptations.generic_stub(
+ channel._channel, # pylint: disable=protected-access
+ effective_options.host,
+ effective_options.metadata_transformer,
+ effective_options.request_serializers,
+ effective_options.response_deserializers)
+
+
+def dynamic_stub(channel, service, cardinalities, options=None):
+ """Creates a face.DynamicStub with which RPCs can be invoked.
+
+ Args:
+ channel: A Channel for the returned face.DynamicStub to use.
+ service: The package-qualified full name of the service.
+ cardinalities: A dictionary from RPC method name to cardinality.Cardinality
+ value identifying the cardinality of the RPC method.
+ options: An optional StubOptions value further customizing the functionality
+ of the returned face.DynamicStub.
+
+ Returns:
+ A face.DynamicStub with which RPCs can be invoked.
+ """
effective_options = _EMPTY_STUB_OPTIONS if options is None else options
- return _client_adaptations.dynamic_stub(
- channel._channel, # pylint: disable=protected-access
- service,
- cardinalities,
- effective_options.host,
- effective_options.metadata_transformer,
- effective_options.request_serializers,
- effective_options.response_deserializers)
-
-
-ServerCredentials = grpc.ServerCredentials
-ssl_server_credentials = grpc.ssl_server_credentials
-
-
-class ServerOptions(object):
- """A value encapsulating the various options for creation of a Server.
-
- This class and its instances have no supported interface - it exists to define
- the type of its instances and its instances exist to be passed to other
- functions.
- """
-
- def __init__(self, multi_method_implementation, request_deserializers,
- response_serializers, thread_pool, thread_pool_size,
- default_timeout, maximum_timeout):
- self.multi_method_implementation = multi_method_implementation
- self.request_deserializers = request_deserializers
- self.response_serializers = response_serializers
- self.thread_pool = thread_pool
- self.thread_pool_size = thread_pool_size
- self.default_timeout = default_timeout
- self.maximum_timeout = maximum_timeout
-
-
-_EMPTY_SERVER_OPTIONS = ServerOptions(None, None, None, None, None, None, None)
-
-
-def server_options(multi_method_implementation=None,
- request_deserializers=None,
- response_serializers=None,
- thread_pool=None,
- thread_pool_size=None,
- default_timeout=None,
- maximum_timeout=None):
- """Creates a ServerOptions value to be passed at server creation.
-
- All parameters are optional and should always be passed by keyword.
-
- Args:
- multi_method_implementation: A face.MultiMethodImplementation to be called
- to service an RPC if the server has no specific method implementation for
- the name of the RPC for which service was requested.
- request_deserializers: A dictionary from service name-method name pair to
- request deserialization behavior.
- response_serializers: A dictionary from service name-method name pair to
- response serialization behavior.
- thread_pool: A thread pool to use in stubs.
- thread_pool_size: The size of thread pool to create for use in stubs;
- ignored if thread_pool has been passed.
- default_timeout: A duration in seconds to allow for RPC service when
- servicing RPCs that did not include a timeout value when invoked.
- maximum_timeout: A duration in seconds to allow for RPC service when
- servicing RPCs no matter what timeout value was passed when the RPC was
- invoked.
-
- Returns:
- A StubOptions value created from the passed parameters.
- """
- return ServerOptions(multi_method_implementation, request_deserializers,
- response_serializers, thread_pool, thread_pool_size,
- default_timeout, maximum_timeout)
-
-
-def server(service_implementations, options=None):
- """Creates an interfaces.Server with which RPCs can be serviced.
-
- Args:
- service_implementations: A dictionary from service name-method name pair to
- face.MethodImplementation.
- options: An optional ServerOptions value further customizing the
- functionality of the returned Server.
-
- Returns:
- An interfaces.Server with which RPCs can be serviced.
- """
- effective_options = _EMPTY_SERVER_OPTIONS if options is None else options
- return _server_adaptations.server(
- service_implementations, effective_options.multi_method_implementation,
- effective_options.request_deserializers,
- effective_options.response_serializers, effective_options.thread_pool,
- effective_options.thread_pool_size)
+ return _client_adaptations.dynamic_stub(
+ channel._channel, # pylint: disable=protected-access
+ service,
+ cardinalities,
+ effective_options.host,
+ effective_options.metadata_transformer,
+ effective_options.request_serializers,
+ effective_options.response_deserializers)
+
+
+ServerCredentials = grpc.ServerCredentials
+ssl_server_credentials = grpc.ssl_server_credentials
+
+
+class ServerOptions(object):
+ """A value encapsulating the various options for creation of a Server.
+
+ This class and its instances have no supported interface - it exists to define
+ the type of its instances and its instances exist to be passed to other
+ functions.
+ """
+
+ def __init__(self, multi_method_implementation, request_deserializers,
+ response_serializers, thread_pool, thread_pool_size,
+ default_timeout, maximum_timeout):
+ self.multi_method_implementation = multi_method_implementation
+ self.request_deserializers = request_deserializers
+ self.response_serializers = response_serializers
+ self.thread_pool = thread_pool
+ self.thread_pool_size = thread_pool_size
+ self.default_timeout = default_timeout
+ self.maximum_timeout = maximum_timeout
+
+
+_EMPTY_SERVER_OPTIONS = ServerOptions(None, None, None, None, None, None, None)
+
+
+def server_options(multi_method_implementation=None,
+ request_deserializers=None,
+ response_serializers=None,
+ thread_pool=None,
+ thread_pool_size=None,
+ default_timeout=None,
+ maximum_timeout=None):
+ """Creates a ServerOptions value to be passed at server creation.
+
+ All parameters are optional and should always be passed by keyword.
+
+ Args:
+ multi_method_implementation: A face.MultiMethodImplementation to be called
+ to service an RPC if the server has no specific method implementation for
+ the name of the RPC for which service was requested.
+ request_deserializers: A dictionary from service name-method name pair to
+ request deserialization behavior.
+ response_serializers: A dictionary from service name-method name pair to
+ response serialization behavior.
+ thread_pool: A thread pool to use in stubs.
+ thread_pool_size: The size of thread pool to create for use in stubs;
+ ignored if thread_pool has been passed.
+ default_timeout: A duration in seconds to allow for RPC service when
+ servicing RPCs that did not include a timeout value when invoked.
+ maximum_timeout: A duration in seconds to allow for RPC service when
+ servicing RPCs no matter what timeout value was passed when the RPC was
+ invoked.
+
+ Returns:
+ A StubOptions value created from the passed parameters.
+ """
+ return ServerOptions(multi_method_implementation, request_deserializers,
+ response_serializers, thread_pool, thread_pool_size,
+ default_timeout, maximum_timeout)
+
+
+def server(service_implementations, options=None):
+ """Creates an interfaces.Server with which RPCs can be serviced.
+
+ Args:
+ service_implementations: A dictionary from service name-method name pair to
+ face.MethodImplementation.
+ options: An optional ServerOptions value further customizing the
+ functionality of the returned Server.
+
+ Returns:
+ An interfaces.Server with which RPCs can be serviced.
+ """
+ effective_options = _EMPTY_SERVER_OPTIONS if options is None else options
+ return _server_adaptations.server(
+ service_implementations, effective_options.multi_method_implementation,
+ effective_options.request_deserializers,
+ effective_options.response_serializers, effective_options.thread_pool,
+ effective_options.thread_pool_size)
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/beta/interfaces.py b/contrib/libs/grpc/src/python/grpcio/grpc/beta/interfaces.py
index fb8266c74b..e27403de08 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/beta/interfaces.py
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/beta/interfaces.py
@@ -1,165 +1,165 @@
# Copyright 2015 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-"""Constants and interfaces of the Beta API of gRPC Python."""
-
-import abc
-
-import six
-
-import grpc
-
-ChannelConnectivity = grpc.ChannelConnectivity
-# FATAL_FAILURE was a Beta-API name for SHUTDOWN
-ChannelConnectivity.FATAL_FAILURE = ChannelConnectivity.SHUTDOWN
-
-StatusCode = grpc.StatusCode
-
-
-class GRPCCallOptions(object):
- """A value encapsulating gRPC-specific options passed on RPC invocation.
-
- This class and its instances have no supported interface - it exists to
- define the type of its instances and its instances exist to be passed to
- other functions.
- """
-
- def __init__(self, disable_compression, subcall_of, credentials):
- self.disable_compression = disable_compression
- self.subcall_of = subcall_of
- self.credentials = credentials
-
-
-def grpc_call_options(disable_compression=False, credentials=None):
- """Creates a GRPCCallOptions value to be passed at RPC invocation.
-
- All parameters are optional and should always be passed by keyword.
-
- Args:
- disable_compression: A boolean indicating whether or not compression should
- be disabled for the request object of the RPC. Only valid for
- request-unary RPCs.
- credentials: A CallCredentials object to use for the invoked RPC.
- """
- return GRPCCallOptions(disable_compression, None, credentials)
-
-
-GRPCAuthMetadataContext = grpc.AuthMetadataContext
-GRPCAuthMetadataPluginCallback = grpc.AuthMetadataPluginCallback
-GRPCAuthMetadataPlugin = grpc.AuthMetadataPlugin
-
-
-class GRPCServicerContext(six.with_metaclass(abc.ABCMeta)):
- """Exposes gRPC-specific options and behaviors to code servicing RPCs."""
-
- @abc.abstractmethod
- def peer(self):
- """Identifies the peer that invoked the RPC being serviced.
-
- Returns:
- A string identifying the peer that invoked the RPC being serviced.
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def disable_next_response_compression(self):
- """Disables compression of the next response passed by the application."""
- raise NotImplementedError()
-
-
-class GRPCInvocationContext(six.with_metaclass(abc.ABCMeta)):
- """Exposes gRPC-specific options and behaviors to code invoking RPCs."""
-
- @abc.abstractmethod
- def disable_next_request_compression(self):
- """Disables compression of the next request passed by the application."""
- raise NotImplementedError()
-
-
-class Server(six.with_metaclass(abc.ABCMeta)):
- """Services RPCs."""
-
- @abc.abstractmethod
- def add_insecure_port(self, address):
- """Reserves a port for insecure RPC service once this Server becomes active.
-
- This method may only be called before calling this Server's start method is
- called.
-
- Args:
- address: The address for which to open a port.
-
- Returns:
- An integer port on which RPCs will be serviced after this link has been
- started. This is typically the same number as the port number contained
- in the passed address, but will likely be different if the port number
- contained in the passed address was zero.
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def add_secure_port(self, address, server_credentials):
- """Reserves a port for secure RPC service after this Server becomes active.
-
- This method may only be called before calling this Server's start method is
- called.
-
- Args:
- address: The address for which to open a port.
- server_credentials: A ServerCredentials.
-
- Returns:
- An integer port on which RPCs will be serviced after this link has been
- started. This is typically the same number as the port number contained
- in the passed address, but will likely be different if the port number
- contained in the passed address was zero.
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def start(self):
- """Starts this Server's service of RPCs.
-
- This method may only be called while the server is not serving RPCs (i.e. it
- is not idempotent).
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def stop(self, grace):
- """Stops this Server's service of RPCs.
-
- All calls to this method immediately stop service of new RPCs. When existing
- RPCs are aborted is controlled by the grace period parameter passed to this
- method.
-
- This method may be called at any time and is idempotent. Passing a smaller
- grace value than has been passed in a previous call will have the effect of
- stopping the Server sooner. Passing a larger grace value than has been
- passed in a previous call will not have the effect of stopping the server
- later.
-
- Args:
- grace: A duration of time in seconds to allow existing RPCs to complete
- before being aborted by this Server's stopping. May be zero for
- immediate abortion of all in-progress RPCs.
-
- Returns:
- A threading.Event that will be set when this Server has completely
- stopped. The returned event may not be set until after the full grace
- period (if some ongoing RPC continues for the full length of the period)
- of it may be set much sooner (such as if this Server had no RPCs underway
- at the time it was stopped or if all RPCs that it had underway completed
- very early in the grace period).
- """
- raise NotImplementedError()
+"""Constants and interfaces of the Beta API of gRPC Python."""
+
+import abc
+
+import six
+
+import grpc
+
+ChannelConnectivity = grpc.ChannelConnectivity
+# FATAL_FAILURE was a Beta-API name for SHUTDOWN
+ChannelConnectivity.FATAL_FAILURE = ChannelConnectivity.SHUTDOWN
+
+StatusCode = grpc.StatusCode
+
+
+class GRPCCallOptions(object):
+ """A value encapsulating gRPC-specific options passed on RPC invocation.
+
+ This class and its instances have no supported interface - it exists to
+ define the type of its instances and its instances exist to be passed to
+ other functions.
+ """
+
+ def __init__(self, disable_compression, subcall_of, credentials):
+ self.disable_compression = disable_compression
+ self.subcall_of = subcall_of
+ self.credentials = credentials
+
+
+def grpc_call_options(disable_compression=False, credentials=None):
+ """Creates a GRPCCallOptions value to be passed at RPC invocation.
+
+ All parameters are optional and should always be passed by keyword.
+
+ Args:
+ disable_compression: A boolean indicating whether or not compression should
+ be disabled for the request object of the RPC. Only valid for
+ request-unary RPCs.
+ credentials: A CallCredentials object to use for the invoked RPC.
+ """
+ return GRPCCallOptions(disable_compression, None, credentials)
+
+
+GRPCAuthMetadataContext = grpc.AuthMetadataContext
+GRPCAuthMetadataPluginCallback = grpc.AuthMetadataPluginCallback
+GRPCAuthMetadataPlugin = grpc.AuthMetadataPlugin
+
+
+class GRPCServicerContext(six.with_metaclass(abc.ABCMeta)):
+ """Exposes gRPC-specific options and behaviors to code servicing RPCs."""
+
+ @abc.abstractmethod
+ def peer(self):
+ """Identifies the peer that invoked the RPC being serviced.
+
+ Returns:
+ A string identifying the peer that invoked the RPC being serviced.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def disable_next_response_compression(self):
+ """Disables compression of the next response passed by the application."""
+ raise NotImplementedError()
+
+
+class GRPCInvocationContext(six.with_metaclass(abc.ABCMeta)):
+ """Exposes gRPC-specific options and behaviors to code invoking RPCs."""
+
+ @abc.abstractmethod
+ def disable_next_request_compression(self):
+ """Disables compression of the next request passed by the application."""
+ raise NotImplementedError()
+
+
+class Server(six.with_metaclass(abc.ABCMeta)):
+ """Services RPCs."""
+
+ @abc.abstractmethod
+ def add_insecure_port(self, address):
+ """Reserves a port for insecure RPC service once this Server becomes active.
+
+ This method may only be called before calling this Server's start method is
+ called.
+
+ Args:
+ address: The address for which to open a port.
+
+ Returns:
+ An integer port on which RPCs will be serviced after this link has been
+ started. This is typically the same number as the port number contained
+ in the passed address, but will likely be different if the port number
+ contained in the passed address was zero.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def add_secure_port(self, address, server_credentials):
+ """Reserves a port for secure RPC service after this Server becomes active.
+
+ This method may only be called before calling this Server's start method is
+ called.
+
+ Args:
+ address: The address for which to open a port.
+ server_credentials: A ServerCredentials.
+
+ Returns:
+ An integer port on which RPCs will be serviced after this link has been
+ started. This is typically the same number as the port number contained
+ in the passed address, but will likely be different if the port number
+ contained in the passed address was zero.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def start(self):
+ """Starts this Server's service of RPCs.
+
+ This method may only be called while the server is not serving RPCs (i.e. it
+ is not idempotent).
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def stop(self, grace):
+ """Stops this Server's service of RPCs.
+
+ All calls to this method immediately stop service of new RPCs. When existing
+ RPCs are aborted is controlled by the grace period parameter passed to this
+ method.
+
+ This method may be called at any time and is idempotent. Passing a smaller
+ grace value than has been passed in a previous call will have the effect of
+ stopping the Server sooner. Passing a larger grace value than has been
+ passed in a previous call will not have the effect of stopping the server
+ later.
+
+ Args:
+ grace: A duration of time in seconds to allow existing RPCs to complete
+ before being aborted by this Server's stopping. May be zero for
+ immediate abortion of all in-progress RPCs.
+
+ Returns:
+ A threading.Event that will be set when this Server has completely
+ stopped. The returned event may not be set until after the full grace
+ period (if some ongoing RPC continues for the full length of the period)
+ of it may be set much sooner (such as if this Server had no RPCs underway
+ at the time it was stopped or if all RPCs that it had underway completed
+ very early in the grace period).
+ """
+ raise NotImplementedError()
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/beta/utilities.py b/contrib/libs/grpc/src/python/grpcio/grpc/beta/utilities.py
index fe3ce606c9..4a4bc0f9e7 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/beta/utilities.py
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/beta/utilities.py
@@ -1,149 +1,149 @@
# Copyright 2015 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-"""Utilities for the gRPC Python Beta API."""
-
-import threading
-import time
-
-# implementations is referenced from specification in this module.
-from grpc.beta import implementations # pylint: disable=unused-import
-from grpc.beta import interfaces
-from grpc.framework.foundation import callable_util
-from grpc.framework.foundation import future
-
-_DONE_CALLBACK_EXCEPTION_LOG_MESSAGE = (
- 'Exception calling connectivity future "done" callback!')
-
-
-class _ChannelReadyFuture(future.Future):
-
- def __init__(self, channel):
- self._condition = threading.Condition()
- self._channel = channel
-
- self._matured = False
- self._cancelled = False
- self._done_callbacks = []
-
- def _block(self, timeout):
- until = None if timeout is None else time.time() + timeout
- with self._condition:
- while True:
- if self._cancelled:
- raise future.CancelledError()
- elif self._matured:
- return
- else:
- if until is None:
- self._condition.wait()
- else:
- remaining = until - time.time()
- if remaining < 0:
- raise future.TimeoutError()
- else:
- self._condition.wait(timeout=remaining)
-
- def _update(self, connectivity):
- with self._condition:
- if (not self._cancelled and
- connectivity is interfaces.ChannelConnectivity.READY):
- self._matured = True
- self._channel.unsubscribe(self._update)
- self._condition.notify_all()
- done_callbacks = tuple(self._done_callbacks)
- self._done_callbacks = None
- else:
- return
-
- for done_callback in done_callbacks:
- callable_util.call_logging_exceptions(
- done_callback, _DONE_CALLBACK_EXCEPTION_LOG_MESSAGE, self)
-
- def cancel(self):
- with self._condition:
- if not self._matured:
- self._cancelled = True
- self._channel.unsubscribe(self._update)
- self._condition.notify_all()
- done_callbacks = tuple(self._done_callbacks)
- self._done_callbacks = None
- else:
- return False
-
- for done_callback in done_callbacks:
- callable_util.call_logging_exceptions(
- done_callback, _DONE_CALLBACK_EXCEPTION_LOG_MESSAGE, self)
-
+"""Utilities for the gRPC Python Beta API."""
+
+import threading
+import time
+
+# implementations is referenced from specification in this module.
+from grpc.beta import implementations # pylint: disable=unused-import
+from grpc.beta import interfaces
+from grpc.framework.foundation import callable_util
+from grpc.framework.foundation import future
+
+_DONE_CALLBACK_EXCEPTION_LOG_MESSAGE = (
+ 'Exception calling connectivity future "done" callback!')
+
+
+class _ChannelReadyFuture(future.Future):
+
+ def __init__(self, channel):
+ self._condition = threading.Condition()
+ self._channel = channel
+
+ self._matured = False
+ self._cancelled = False
+ self._done_callbacks = []
+
+ def _block(self, timeout):
+ until = None if timeout is None else time.time() + timeout
+ with self._condition:
+ while True:
+ if self._cancelled:
+ raise future.CancelledError()
+ elif self._matured:
+ return
+ else:
+ if until is None:
+ self._condition.wait()
+ else:
+ remaining = until - time.time()
+ if remaining < 0:
+ raise future.TimeoutError()
+ else:
+ self._condition.wait(timeout=remaining)
+
+ def _update(self, connectivity):
+ with self._condition:
+ if (not self._cancelled and
+ connectivity is interfaces.ChannelConnectivity.READY):
+ self._matured = True
+ self._channel.unsubscribe(self._update)
+ self._condition.notify_all()
+ done_callbacks = tuple(self._done_callbacks)
+ self._done_callbacks = None
+ else:
+ return
+
+ for done_callback in done_callbacks:
+ callable_util.call_logging_exceptions(
+ done_callback, _DONE_CALLBACK_EXCEPTION_LOG_MESSAGE, self)
+
+ def cancel(self):
+ with self._condition:
+ if not self._matured:
+ self._cancelled = True
+ self._channel.unsubscribe(self._update)
+ self._condition.notify_all()
+ done_callbacks = tuple(self._done_callbacks)
+ self._done_callbacks = None
+ else:
+ return False
+
+ for done_callback in done_callbacks:
+ callable_util.call_logging_exceptions(
+ done_callback, _DONE_CALLBACK_EXCEPTION_LOG_MESSAGE, self)
+
return True
- def cancelled(self):
- with self._condition:
- return self._cancelled
-
- def running(self):
- with self._condition:
- return not self._cancelled and not self._matured
-
- def done(self):
- with self._condition:
- return self._cancelled or self._matured
-
- def result(self, timeout=None):
- self._block(timeout)
- return None
-
- def exception(self, timeout=None):
- self._block(timeout)
- return None
-
- def traceback(self, timeout=None):
- self._block(timeout)
- return None
-
- def add_done_callback(self, fn):
- with self._condition:
- if not self._cancelled and not self._matured:
- self._done_callbacks.append(fn)
- return
-
- fn(self)
-
- def start(self):
- with self._condition:
- self._channel.subscribe(self._update, try_to_connect=True)
-
- def __del__(self):
- with self._condition:
- if not self._cancelled and not self._matured:
- self._channel.unsubscribe(self._update)
-
-
-def channel_ready_future(channel):
- """Creates a future.Future tracking when an implementations.Channel is ready.
-
- Cancelling the returned future.Future does not tell the given
- implementations.Channel to abandon attempts it may have been making to
- connect; cancelling merely deactivates the return future.Future's
- subscription to the given implementations.Channel's connectivity.
-
- Args:
- channel: An implementations.Channel.
-
- Returns:
- A future.Future that matures when the given Channel has connectivity
- interfaces.ChannelConnectivity.READY.
- """
- ready_future = _ChannelReadyFuture(channel)
- ready_future.start()
- return ready_future
+ def cancelled(self):
+ with self._condition:
+ return self._cancelled
+
+ def running(self):
+ with self._condition:
+ return not self._cancelled and not self._matured
+
+ def done(self):
+ with self._condition:
+ return self._cancelled or self._matured
+
+ def result(self, timeout=None):
+ self._block(timeout)
+ return None
+
+ def exception(self, timeout=None):
+ self._block(timeout)
+ return None
+
+ def traceback(self, timeout=None):
+ self._block(timeout)
+ return None
+
+ def add_done_callback(self, fn):
+ with self._condition:
+ if not self._cancelled and not self._matured:
+ self._done_callbacks.append(fn)
+ return
+
+ fn(self)
+
+ def start(self):
+ with self._condition:
+ self._channel.subscribe(self._update, try_to_connect=True)
+
+ def __del__(self):
+ with self._condition:
+ if not self._cancelled and not self._matured:
+ self._channel.unsubscribe(self._update)
+
+
+def channel_ready_future(channel):
+ """Creates a future.Future tracking when an implementations.Channel is ready.
+
+ Cancelling the returned future.Future does not tell the given
+ implementations.Channel to abandon attempts it may have been making to
+ connect; cancelling merely deactivates the return future.Future's
+ subscription to the given implementations.Channel's connectivity.
+
+ Args:
+ channel: An implementations.Channel.
+
+ Returns:
+ A future.Future that matures when the given Channel has connectivity
+ interfaces.ChannelConnectivity.READY.
+ """
+ ready_future = _ChannelReadyFuture(channel)
+ ready_future.start()
+ return ready_future
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/framework/__init__.py b/contrib/libs/grpc/src/python/grpcio/grpc/framework/__init__.py
index 5fb4f3c3cf..1841020f80 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/framework/__init__.py
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/framework/__init__.py
@@ -1,11 +1,11 @@
# Copyright 2015 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/framework/common/__init__.py b/contrib/libs/grpc/src/python/grpcio/grpc/framework/common/__init__.py
index 5fb4f3c3cf..1841020f80 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/framework/common/__init__.py
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/framework/common/__init__.py
@@ -1,11 +1,11 @@
# Copyright 2015 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/framework/common/cardinality.py b/contrib/libs/grpc/src/python/grpcio/grpc/framework/common/cardinality.py
index c98735622d..a7ffc1b576 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/framework/common/cardinality.py
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/framework/common/cardinality.py
@@ -1,26 +1,26 @@
# Copyright 2015 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-"""Defines an enum for classifying RPC methods by streaming semantics."""
-
-import enum
-
-
-@enum.unique
-class Cardinality(enum.Enum):
- """Describes the streaming semantics of an RPC method."""
-
- UNARY_UNARY = 'request-unary/response-unary'
- UNARY_STREAM = 'request-unary/response-streaming'
- STREAM_UNARY = 'request-streaming/response-unary'
- STREAM_STREAM = 'request-streaming/response-streaming'
+"""Defines an enum for classifying RPC methods by streaming semantics."""
+
+import enum
+
+
+@enum.unique
+class Cardinality(enum.Enum):
+ """Describes the streaming semantics of an RPC method."""
+
+ UNARY_UNARY = 'request-unary/response-unary'
+ UNARY_STREAM = 'request-unary/response-streaming'
+ STREAM_UNARY = 'request-streaming/response-unary'
+ STREAM_STREAM = 'request-streaming/response-streaming'
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/framework/common/style.py b/contrib/libs/grpc/src/python/grpcio/grpc/framework/common/style.py
index f6138d417f..e6ea3c12a4 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/framework/common/style.py
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/framework/common/style.py
@@ -1,24 +1,24 @@
# Copyright 2015 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-"""Defines an enum for classifying RPC methods by control flow semantics."""
-
-import enum
-
-
-@enum.unique
-class Service(enum.Enum):
- """Describes the control flow style of RPC method implementation."""
-
- INLINE = 'inline'
- EVENT = 'event'
+"""Defines an enum for classifying RPC methods by control flow semantics."""
+
+import enum
+
+
+@enum.unique
+class Service(enum.Enum):
+ """Describes the control flow style of RPC method implementation."""
+
+ INLINE = 'inline'
+ EVENT = 'event'
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/framework/foundation/__init__.py b/contrib/libs/grpc/src/python/grpcio/grpc/framework/foundation/__init__.py
index 5fb4f3c3cf..1841020f80 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/framework/foundation/__init__.py
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/framework/foundation/__init__.py
@@ -1,11 +1,11 @@
# Copyright 2015 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/framework/foundation/abandonment.py b/contrib/libs/grpc/src/python/grpcio/grpc/framework/foundation/abandonment.py
index 660ce991c4..54021597e5 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/framework/foundation/abandonment.py
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/framework/foundation/abandonment.py
@@ -1,22 +1,22 @@
# Copyright 2015 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-"""Utilities for indicating abandonment of computation."""
-
-
-class Abandoned(Exception):
- """Indicates that some computation is being abandoned.
-
- Abandoning a computation is different than returning a value or raising
- an exception indicating some operational or programming defect.
- """
+"""Utilities for indicating abandonment of computation."""
+
+
+class Abandoned(Exception):
+ """Indicates that some computation is being abandoned.
+
+ Abandoning a computation is different than returning a value or raising
+ an exception indicating some operational or programming defect.
+ """
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/framework/foundation/callable_util.py b/contrib/libs/grpc/src/python/grpcio/grpc/framework/foundation/callable_util.py
index 24daf3406f..ed4f249344 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/framework/foundation/callable_util.py
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/framework/foundation/callable_util.py
@@ -1,96 +1,96 @@
# Copyright 2015 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-"""Utilities for working with callables."""
-
-import abc
-import collections
-import enum
-import functools
-import logging
-
-import six
-
+"""Utilities for working with callables."""
+
+import abc
+import collections
+import enum
+import functools
+import logging
+
+import six
+
_LOGGER = logging.getLogger(__name__)
-
-
-class Outcome(six.with_metaclass(abc.ABCMeta)):
- """A sum type describing the outcome of some call.
-
- Attributes:
- kind: One of Kind.RETURNED or Kind.RAISED respectively indicating that the
- call returned a value or raised an exception.
- return_value: The value returned by the call. Must be present if kind is
- Kind.RETURNED.
- exception: The exception raised by the call. Must be present if kind is
- Kind.RAISED.
- """
-
- @enum.unique
- class Kind(enum.Enum):
- """Identifies the general kind of the outcome of some call."""
-
- RETURNED = object()
- RAISED = object()
-
-
-class _EasyOutcome(
- collections.namedtuple('_EasyOutcome',
- ['kind', 'return_value', 'exception']), Outcome):
- """A trivial implementation of Outcome."""
-
-
-def _call_logging_exceptions(behavior, message, *args, **kwargs):
- try:
+
+
+class Outcome(six.with_metaclass(abc.ABCMeta)):
+ """A sum type describing the outcome of some call.
+
+ Attributes:
+ kind: One of Kind.RETURNED or Kind.RAISED respectively indicating that the
+ call returned a value or raised an exception.
+ return_value: The value returned by the call. Must be present if kind is
+ Kind.RETURNED.
+ exception: The exception raised by the call. Must be present if kind is
+ Kind.RAISED.
+ """
+
+ @enum.unique
+ class Kind(enum.Enum):
+ """Identifies the general kind of the outcome of some call."""
+
+ RETURNED = object()
+ RAISED = object()
+
+
+class _EasyOutcome(
+ collections.namedtuple('_EasyOutcome',
+ ['kind', 'return_value', 'exception']), Outcome):
+ """A trivial implementation of Outcome."""
+
+
+def _call_logging_exceptions(behavior, message, *args, **kwargs):
+ try:
return _EasyOutcome(Outcome.Kind.RETURNED, behavior(*args, **kwargs),
None)
- except Exception as e: # pylint: disable=broad-except
+ except Exception as e: # pylint: disable=broad-except
_LOGGER.exception(message)
- return _EasyOutcome(Outcome.Kind.RAISED, None, e)
-
-
-def with_exceptions_logged(behavior, message):
- """Wraps a callable in a try-except that logs any exceptions it raises.
-
- Args:
- behavior: Any callable.
- message: A string to log if the behavior raises an exception.
-
- Returns:
- A callable that when executed invokes the given behavior. The returned
- callable takes the same arguments as the given behavior but returns a
- future.Outcome describing whether the given behavior returned a value or
- raised an exception.
- """
-
- @functools.wraps(behavior)
- def wrapped_behavior(*args, **kwargs):
- return _call_logging_exceptions(behavior, message, *args, **kwargs)
-
- return wrapped_behavior
-
-
-def call_logging_exceptions(behavior, message, *args, **kwargs):
- """Calls a behavior in a try-except that logs any exceptions it raises.
-
- Args:
- behavior: Any callable.
- message: A string to log if the behavior raises an exception.
- *args: Positional arguments to pass to the given behavior.
- **kwargs: Keyword arguments to pass to the given behavior.
-
- Returns:
- An Outcome describing whether the given behavior returned a value or raised
- an exception.
- """
- return _call_logging_exceptions(behavior, message, *args, **kwargs)
+ return _EasyOutcome(Outcome.Kind.RAISED, None, e)
+
+
+def with_exceptions_logged(behavior, message):
+ """Wraps a callable in a try-except that logs any exceptions it raises.
+
+ Args:
+ behavior: Any callable.
+ message: A string to log if the behavior raises an exception.
+
+ Returns:
+ A callable that when executed invokes the given behavior. The returned
+ callable takes the same arguments as the given behavior but returns a
+ future.Outcome describing whether the given behavior returned a value or
+ raised an exception.
+ """
+
+ @functools.wraps(behavior)
+ def wrapped_behavior(*args, **kwargs):
+ return _call_logging_exceptions(behavior, message, *args, **kwargs)
+
+ return wrapped_behavior
+
+
+def call_logging_exceptions(behavior, message, *args, **kwargs):
+ """Calls a behavior in a try-except that logs any exceptions it raises.
+
+ Args:
+ behavior: Any callable.
+ message: A string to log if the behavior raises an exception.
+ *args: Positional arguments to pass to the given behavior.
+ **kwargs: Keyword arguments to pass to the given behavior.
+
+ Returns:
+ An Outcome describing whether the given behavior returned a value or raised
+ an exception.
+ """
+ return _call_logging_exceptions(behavior, message, *args, **kwargs)
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/framework/foundation/future.py b/contrib/libs/grpc/src/python/grpcio/grpc/framework/foundation/future.py
index d11679cc3d..e2206142e2 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/framework/foundation/future.py
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/framework/foundation/future.py
@@ -1,221 +1,221 @@
# Copyright 2015 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-"""A Future interface.
-
-Python doesn't have a Future interface in its standard library. In the absence
-of such a standard, three separate, incompatible implementations
-(concurrent.futures.Future, ndb.Future, and asyncio.Future) have appeared. This
-interface attempts to be as compatible as possible with
-concurrent.futures.Future. From ndb.Future it adopts a traceback-object accessor
-method.
-
-Unlike the concrete and implemented Future classes listed above, the Future
-class defined in this module is an entirely abstract interface that anyone may
-implement and use.
-
-The one known incompatibility between this interface and the interface of
-concurrent.futures.Future is that this interface defines its own CancelledError
-and TimeoutError exceptions rather than raising the implementation-private
-concurrent.futures._base.CancelledError and the
-built-in-but-only-in-3.3-and-later TimeoutError.
-"""
-
-import abc
-
-import six
-
-
-class TimeoutError(Exception):
- """Indicates that a particular call timed out."""
-
-
-class CancelledError(Exception):
- """Indicates that the computation underlying a Future was cancelled."""
-
-
-class Future(six.with_metaclass(abc.ABCMeta)):
- """A representation of a computation in another control flow.
-
- Computations represented by a Future may be yet to be begun, may be ongoing,
- or may have already completed.
- """
-
- # NOTE(nathaniel): This isn't the return type that I would want to have if it
- # were up to me. Were this interface being written from scratch, the return
- # type of this method would probably be a sum type like:
- #
- # NOT_COMMENCED
- # COMMENCED_AND_NOT_COMPLETED
- # PARTIAL_RESULT<Partial_Result_Type>
- # COMPLETED<Result_Type>
- # UNCANCELLABLE
- # NOT_IMMEDIATELY_DETERMINABLE
- @abc.abstractmethod
- def cancel(self):
- """Attempts to cancel the computation.
-
- This method does not block.
-
- Returns:
- True if the computation has not yet begun, will not be allowed to take
- place, and determination of both was possible without blocking. False
- under all other circumstances including but not limited to the
- computation's already having begun, the computation's already having
- finished, and the computation's having been scheduled for execution on a
- remote system for which a determination of whether or not it commenced
- before being cancelled cannot be made without blocking.
- """
- raise NotImplementedError()
-
- # NOTE(nathaniel): Here too this isn't the return type that I'd want this
- # method to have if it were up to me. I think I'd go with another sum type
- # like:
- #
- # NOT_CANCELLED (this object's cancel method hasn't been called)
- # NOT_COMMENCED
- # COMMENCED_AND_NOT_COMPLETED
- # PARTIAL_RESULT<Partial_Result_Type>
- # COMPLETED<Result_Type>
- # UNCANCELLABLE
- # NOT_IMMEDIATELY_DETERMINABLE
- #
- # Notice how giving the cancel method the right semantics obviates most
- # reasons for this method to exist.
- @abc.abstractmethod
- def cancelled(self):
- """Describes whether the computation was cancelled.
-
- This method does not block.
-
- Returns:
- True if the computation was cancelled any time before its result became
- immediately available. False under all other circumstances including but
- not limited to this object's cancel method not having been called and
- the computation's result having become immediately available.
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def running(self):
- """Describes whether the computation is taking place.
-
- This method does not block.
-
- Returns:
- True if the computation is scheduled to take place in the future or is
- taking place now, or False if the computation took place in the past or
- was cancelled.
- """
- raise NotImplementedError()
-
- # NOTE(nathaniel): These aren't quite the semantics I'd like here either. I
- # would rather this only returned True in cases in which the underlying
- # computation completed successfully. A computation's having been cancelled
- # conflicts with considering that computation "done".
- @abc.abstractmethod
- def done(self):
- """Describes whether the computation has taken place.
-
- This method does not block.
-
- Returns:
- True if the computation is known to have either completed or have been
- unscheduled or interrupted. False if the computation may possibly be
- executing or scheduled to execute later.
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def result(self, timeout=None):
- """Accesses the outcome of the computation or raises its exception.
-
- This method may return immediately or may block.
-
- Args:
- timeout: The length of time in seconds to wait for the computation to
- finish or be cancelled, or None if this method should block until the
- computation has finished or is cancelled no matter how long that takes.
-
- Returns:
- The return value of the computation.
-
- Raises:
- TimeoutError: If a timeout value is passed and the computation does not
- terminate within the allotted time.
- CancelledError: If the computation was cancelled.
- Exception: If the computation raised an exception, this call will raise
- the same exception.
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def exception(self, timeout=None):
- """Return the exception raised by the computation.
-
- This method may return immediately or may block.
-
- Args:
- timeout: The length of time in seconds to wait for the computation to
- terminate or be cancelled, or None if this method should block until
- the computation is terminated or is cancelled no matter how long that
- takes.
-
- Returns:
- The exception raised by the computation, or None if the computation did
- not raise an exception.
-
- Raises:
- TimeoutError: If a timeout value is passed and the computation does not
- terminate within the allotted time.
- CancelledError: If the computation was cancelled.
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def traceback(self, timeout=None):
- """Access the traceback of the exception raised by the computation.
-
- This method may return immediately or may block.
-
- Args:
- timeout: The length of time in seconds to wait for the computation to
- terminate or be cancelled, or None if this method should block until
- the computation is terminated or is cancelled no matter how long that
- takes.
-
- Returns:
- The traceback of the exception raised by the computation, or None if the
- computation did not raise an exception.
-
- Raises:
- TimeoutError: If a timeout value is passed and the computation does not
- terminate within the allotted time.
- CancelledError: If the computation was cancelled.
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def add_done_callback(self, fn):
- """Adds a function to be called at completion of the computation.
-
- The callback will be passed this Future object describing the outcome of
- the computation.
-
- If the computation has already completed, the callback will be called
- immediately.
-
- Args:
- fn: A callable taking this Future object as its single parameter.
- """
- raise NotImplementedError()
+"""A Future interface.
+
+Python doesn't have a Future interface in its standard library. In the absence
+of such a standard, three separate, incompatible implementations
+(concurrent.futures.Future, ndb.Future, and asyncio.Future) have appeared. This
+interface attempts to be as compatible as possible with
+concurrent.futures.Future. From ndb.Future it adopts a traceback-object accessor
+method.
+
+Unlike the concrete and implemented Future classes listed above, the Future
+class defined in this module is an entirely abstract interface that anyone may
+implement and use.
+
+The one known incompatibility between this interface and the interface of
+concurrent.futures.Future is that this interface defines its own CancelledError
+and TimeoutError exceptions rather than raising the implementation-private
+concurrent.futures._base.CancelledError and the
+built-in-but-only-in-3.3-and-later TimeoutError.
+"""
+
+import abc
+
+import six
+
+
+class TimeoutError(Exception):
+ """Indicates that a particular call timed out."""
+
+
+class CancelledError(Exception):
+ """Indicates that the computation underlying a Future was cancelled."""
+
+
+class Future(six.with_metaclass(abc.ABCMeta)):
+ """A representation of a computation in another control flow.
+
+ Computations represented by a Future may be yet to be begun, may be ongoing,
+ or may have already completed.
+ """
+
+ # NOTE(nathaniel): This isn't the return type that I would want to have if it
+ # were up to me. Were this interface being written from scratch, the return
+ # type of this method would probably be a sum type like:
+ #
+ # NOT_COMMENCED
+ # COMMENCED_AND_NOT_COMPLETED
+ # PARTIAL_RESULT<Partial_Result_Type>
+ # COMPLETED<Result_Type>
+ # UNCANCELLABLE
+ # NOT_IMMEDIATELY_DETERMINABLE
+ @abc.abstractmethod
+ def cancel(self):
+ """Attempts to cancel the computation.
+
+ This method does not block.
+
+ Returns:
+ True if the computation has not yet begun, will not be allowed to take
+ place, and determination of both was possible without blocking. False
+ under all other circumstances including but not limited to the
+ computation's already having begun, the computation's already having
+ finished, and the computation's having been scheduled for execution on a
+ remote system for which a determination of whether or not it commenced
+ before being cancelled cannot be made without blocking.
+ """
+ raise NotImplementedError()
+
+ # NOTE(nathaniel): Here too this isn't the return type that I'd want this
+ # method to have if it were up to me. I think I'd go with another sum type
+ # like:
+ #
+ # NOT_CANCELLED (this object's cancel method hasn't been called)
+ # NOT_COMMENCED
+ # COMMENCED_AND_NOT_COMPLETED
+ # PARTIAL_RESULT<Partial_Result_Type>
+ # COMPLETED<Result_Type>
+ # UNCANCELLABLE
+ # NOT_IMMEDIATELY_DETERMINABLE
+ #
+ # Notice how giving the cancel method the right semantics obviates most
+ # reasons for this method to exist.
+ @abc.abstractmethod
+ def cancelled(self):
+ """Describes whether the computation was cancelled.
+
+ This method does not block.
+
+ Returns:
+ True if the computation was cancelled any time before its result became
+ immediately available. False under all other circumstances including but
+ not limited to this object's cancel method not having been called and
+ the computation's result having become immediately available.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def running(self):
+ """Describes whether the computation is taking place.
+
+ This method does not block.
+
+ Returns:
+ True if the computation is scheduled to take place in the future or is
+ taking place now, or False if the computation took place in the past or
+ was cancelled.
+ """
+ raise NotImplementedError()
+
+ # NOTE(nathaniel): These aren't quite the semantics I'd like here either. I
+ # would rather this only returned True in cases in which the underlying
+ # computation completed successfully. A computation's having been cancelled
+ # conflicts with considering that computation "done".
+ @abc.abstractmethod
+ def done(self):
+ """Describes whether the computation has taken place.
+
+ This method does not block.
+
+ Returns:
+ True if the computation is known to have either completed or have been
+ unscheduled or interrupted. False if the computation may possibly be
+ executing or scheduled to execute later.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def result(self, timeout=None):
+ """Accesses the outcome of the computation or raises its exception.
+
+ This method may return immediately or may block.
+
+ Args:
+ timeout: The length of time in seconds to wait for the computation to
+ finish or be cancelled, or None if this method should block until the
+ computation has finished or is cancelled no matter how long that takes.
+
+ Returns:
+ The return value of the computation.
+
+ Raises:
+ TimeoutError: If a timeout value is passed and the computation does not
+ terminate within the allotted time.
+ CancelledError: If the computation was cancelled.
+ Exception: If the computation raised an exception, this call will raise
+ the same exception.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def exception(self, timeout=None):
+ """Return the exception raised by the computation.
+
+ This method may return immediately or may block.
+
+ Args:
+ timeout: The length of time in seconds to wait for the computation to
+ terminate or be cancelled, or None if this method should block until
+ the computation is terminated or is cancelled no matter how long that
+ takes.
+
+ Returns:
+ The exception raised by the computation, or None if the computation did
+ not raise an exception.
+
+ Raises:
+ TimeoutError: If a timeout value is passed and the computation does not
+ terminate within the allotted time.
+ CancelledError: If the computation was cancelled.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def traceback(self, timeout=None):
+ """Access the traceback of the exception raised by the computation.
+
+ This method may return immediately or may block.
+
+ Args:
+ timeout: The length of time in seconds to wait for the computation to
+ terminate or be cancelled, or None if this method should block until
+ the computation is terminated or is cancelled no matter how long that
+ takes.
+
+ Returns:
+ The traceback of the exception raised by the computation, or None if the
+ computation did not raise an exception.
+
+ Raises:
+ TimeoutError: If a timeout value is passed and the computation does not
+ terminate within the allotted time.
+ CancelledError: If the computation was cancelled.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def add_done_callback(self, fn):
+ """Adds a function to be called at completion of the computation.
+
+ The callback will be passed this Future object describing the outcome of
+ the computation.
+
+ If the computation has already completed, the callback will be called
+ immediately.
+
+ Args:
+ fn: A callable taking this Future object as its single parameter.
+ """
+ raise NotImplementedError()
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/framework/foundation/logging_pool.py b/contrib/libs/grpc/src/python/grpcio/grpc/framework/foundation/logging_pool.py
index 421999fb1c..e3e3f4f23a 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/framework/foundation/logging_pool.py
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/framework/foundation/logging_pool.py
@@ -1,72 +1,72 @@
# Copyright 2015 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-"""A thread pool that logs exceptions raised by tasks executed within it."""
-
-import logging
-
-from concurrent import futures
-
+"""A thread pool that logs exceptions raised by tasks executed within it."""
+
+import logging
+
+from concurrent import futures
+
_LOGGER = logging.getLogger(__name__)
-
-
-def _wrap(behavior):
- """Wraps an arbitrary callable behavior in exception-logging."""
-
- def _wrapping(*args, **kwargs):
- try:
- return behavior(*args, **kwargs)
+
+
+def _wrap(behavior):
+ """Wraps an arbitrary callable behavior in exception-logging."""
+
+ def _wrapping(*args, **kwargs):
+ try:
+ return behavior(*args, **kwargs)
except Exception:
_LOGGER.exception(
- 'Unexpected exception from %s executed in logging pool!',
- behavior)
- raise
-
- return _wrapping
-
-
-class _LoggingPool(object):
- """An exception-logging futures.ThreadPoolExecutor-compatible thread pool."""
-
- def __init__(self, backing_pool):
- self._backing_pool = backing_pool
-
- def __enter__(self):
- return self
-
- def __exit__(self, exc_type, exc_val, exc_tb):
- self._backing_pool.shutdown(wait=True)
-
- def submit(self, fn, *args, **kwargs):
- return self._backing_pool.submit(_wrap(fn), *args, **kwargs)
-
- def map(self, func, *iterables, **kwargs):
+ 'Unexpected exception from %s executed in logging pool!',
+ behavior)
+ raise
+
+ return _wrapping
+
+
+class _LoggingPool(object):
+ """An exception-logging futures.ThreadPoolExecutor-compatible thread pool."""
+
+ def __init__(self, backing_pool):
+ self._backing_pool = backing_pool
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self._backing_pool.shutdown(wait=True)
+
+ def submit(self, fn, *args, **kwargs):
+ return self._backing_pool.submit(_wrap(fn), *args, **kwargs)
+
+ def map(self, func, *iterables, **kwargs):
return self._backing_pool.map(_wrap(func),
*iterables,
timeout=kwargs.get('timeout', None))
-
- def shutdown(self, wait=True):
- self._backing_pool.shutdown(wait=wait)
-
-
-def pool(max_workers):
- """Creates a thread pool that logs exceptions raised by the tasks within it.
-
- Args:
- max_workers: The maximum number of worker threads to allow the pool.
-
- Returns:
- A futures.ThreadPoolExecutor-compatible thread pool that logs exceptions
- raised by the tasks executed within it.
- """
- return _LoggingPool(futures.ThreadPoolExecutor(max_workers))
+
+ def shutdown(self, wait=True):
+ self._backing_pool.shutdown(wait=wait)
+
+
+def pool(max_workers):
+ """Creates a thread pool that logs exceptions raised by the tasks within it.
+
+ Args:
+ max_workers: The maximum number of worker threads to allow the pool.
+
+ Returns:
+ A futures.ThreadPoolExecutor-compatible thread pool that logs exceptions
+ raised by the tasks executed within it.
+ """
+ return _LoggingPool(futures.ThreadPoolExecutor(max_workers))
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/framework/foundation/stream.py b/contrib/libs/grpc/src/python/grpcio/grpc/framework/foundation/stream.py
index fd47977b89..cb11129aea 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/framework/foundation/stream.py
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/framework/foundation/stream.py
@@ -1,45 +1,45 @@
# Copyright 2015 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-"""Interfaces related to streams of values or objects."""
-
-import abc
-
-import six
-
-
-class Consumer(six.with_metaclass(abc.ABCMeta)):
- """Interface for consumers of finite streams of values or objects."""
-
- @abc.abstractmethod
- def consume(self, value):
- """Accepts a value.
-
- Args:
- value: Any value accepted by this Consumer.
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def terminate(self):
- """Indicates to this Consumer that no more values will be supplied."""
- raise NotImplementedError()
-
- @abc.abstractmethod
- def consume_and_terminate(self, value):
- """Supplies a value and signals that no more values will be supplied.
-
- Args:
- value: Any value accepted by this Consumer.
- """
- raise NotImplementedError()
+"""Interfaces related to streams of values or objects."""
+
+import abc
+
+import six
+
+
+class Consumer(six.with_metaclass(abc.ABCMeta)):
+ """Interface for consumers of finite streams of values or objects."""
+
+ @abc.abstractmethod
+ def consume(self, value):
+ """Accepts a value.
+
+ Args:
+ value: Any value accepted by this Consumer.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def terminate(self):
+ """Indicates to this Consumer that no more values will be supplied."""
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def consume_and_terminate(self, value):
+ """Supplies a value and signals that no more values will be supplied.
+
+ Args:
+ value: Any value accepted by this Consumer.
+ """
+ raise NotImplementedError()
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/framework/foundation/stream_util.py b/contrib/libs/grpc/src/python/grpcio/grpc/framework/foundation/stream_util.py
index 1faaf29bd7..9b832ba804 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/framework/foundation/stream_util.py
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/framework/foundation/stream_util.py
@@ -1,148 +1,148 @@
# Copyright 2015 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-"""Helpful utilities related to the stream module."""
-
-import logging
-import threading
-
-from grpc.framework.foundation import stream
-
-_NO_VALUE = object()
+"""Helpful utilities related to the stream module."""
+
+import logging
+import threading
+
+from grpc.framework.foundation import stream
+
+_NO_VALUE = object()
_LOGGER = logging.getLogger(__name__)
-
-
-class TransformingConsumer(stream.Consumer):
- """A stream.Consumer that passes a transformation of its input to another."""
-
- def __init__(self, transformation, downstream):
- self._transformation = transformation
- self._downstream = downstream
-
+
+
+class TransformingConsumer(stream.Consumer):
+ """A stream.Consumer that passes a transformation of its input to another."""
+
+ def __init__(self, transformation, downstream):
+ self._transformation = transformation
+ self._downstream = downstream
+
+ def consume(self, value):
+ self._downstream.consume(self._transformation(value))
+
+ def terminate(self):
+ self._downstream.terminate()
+
+ def consume_and_terminate(self, value):
+ self._downstream.consume_and_terminate(self._transformation(value))
+
+
+class IterableConsumer(stream.Consumer):
+ """A Consumer that when iterated over emits the values it has consumed."""
+
+ def __init__(self):
+ self._condition = threading.Condition()
+ self._values = []
+ self._active = True
+
def consume(self, value):
- self._downstream.consume(self._transformation(value))
-
- def terminate(self):
- self._downstream.terminate()
-
- def consume_and_terminate(self, value):
- self._downstream.consume_and_terminate(self._transformation(value))
-
-
-class IterableConsumer(stream.Consumer):
- """A Consumer that when iterated over emits the values it has consumed."""
-
- def __init__(self):
- self._condition = threading.Condition()
- self._values = []
- self._active = True
-
- def consume(self, value):
- with self._condition:
- if self._active:
+ with self._condition:
+ if self._active:
self._values.append(value)
- self._condition.notify()
-
- def terminate(self):
- with self._condition:
- self._active = False
- self._condition.notify()
-
+ self._condition.notify()
+
+ def terminate(self):
+ with self._condition:
+ self._active = False
+ self._condition.notify()
+
def consume_and_terminate(self, value):
- with self._condition:
- if self._active:
+ with self._condition:
+ if self._active:
self._values.append(value)
- self._active = False
- self._condition.notify()
-
- def __iter__(self):
- return self
-
- def __next__(self):
- return self.next()
-
- def next(self):
- with self._condition:
- while self._active and not self._values:
- self._condition.wait()
- if self._values:
- return self._values.pop(0)
- else:
- raise StopIteration()
-
-
-class ThreadSwitchingConsumer(stream.Consumer):
- """A Consumer decorator that affords serialization and asynchrony."""
-
- def __init__(self, sink, pool):
- self._lock = threading.Lock()
- self._sink = sink
- self._pool = pool
- # True if self._spin has been submitted to the pool to be called once and
- # that call has not yet returned, False otherwise.
- self._spinning = False
- self._values = []
- self._active = True
-
- def _spin(self, sink, value, terminate):
- while True:
- try:
- if value is _NO_VALUE:
- sink.terminate()
- elif terminate:
- sink.consume_and_terminate(value)
- else:
- sink.consume(value)
- except Exception as e: # pylint:disable=broad-except
+ self._active = False
+ self._condition.notify()
+
+ def __iter__(self):
+ return self
+
+ def __next__(self):
+ return self.next()
+
+ def next(self):
+ with self._condition:
+ while self._active and not self._values:
+ self._condition.wait()
+ if self._values:
+ return self._values.pop(0)
+ else:
+ raise StopIteration()
+
+
+class ThreadSwitchingConsumer(stream.Consumer):
+ """A Consumer decorator that affords serialization and asynchrony."""
+
+ def __init__(self, sink, pool):
+ self._lock = threading.Lock()
+ self._sink = sink
+ self._pool = pool
+ # True if self._spin has been submitted to the pool to be called once and
+ # that call has not yet returned, False otherwise.
+ self._spinning = False
+ self._values = []
+ self._active = True
+
+ def _spin(self, sink, value, terminate):
+ while True:
+ try:
+ if value is _NO_VALUE:
+ sink.terminate()
+ elif terminate:
+ sink.consume_and_terminate(value)
+ else:
+ sink.consume(value)
+ except Exception as e: # pylint:disable=broad-except
_LOGGER.exception(e)
-
- with self._lock:
- if terminate:
- self._spinning = False
- return
- elif self._values:
- value = self._values.pop(0)
- terminate = not self._values and not self._active
- elif not self._active:
- value = _NO_VALUE
- terminate = True
- else:
- self._spinning = False
- return
-
- def consume(self, value):
- with self._lock:
- if self._active:
- if self._spinning:
- self._values.append(value)
- else:
- self._pool.submit(self._spin, self._sink, value, False)
- self._spinning = True
-
- def terminate(self):
- with self._lock:
- if self._active:
- self._active = False
- if not self._spinning:
- self._pool.submit(self._spin, self._sink, _NO_VALUE, True)
- self._spinning = True
-
- def consume_and_terminate(self, value):
- with self._lock:
- if self._active:
- self._active = False
- if self._spinning:
- self._values.append(value)
- else:
- self._pool.submit(self._spin, self._sink, value, True)
- self._spinning = True
+
+ with self._lock:
+ if terminate:
+ self._spinning = False
+ return
+ elif self._values:
+ value = self._values.pop(0)
+ terminate = not self._values and not self._active
+ elif not self._active:
+ value = _NO_VALUE
+ terminate = True
+ else:
+ self._spinning = False
+ return
+
+ def consume(self, value):
+ with self._lock:
+ if self._active:
+ if self._spinning:
+ self._values.append(value)
+ else:
+ self._pool.submit(self._spin, self._sink, value, False)
+ self._spinning = True
+
+ def terminate(self):
+ with self._lock:
+ if self._active:
+ self._active = False
+ if not self._spinning:
+ self._pool.submit(self._spin, self._sink, _NO_VALUE, True)
+ self._spinning = True
+
+ def consume_and_terminate(self, value):
+ with self._lock:
+ if self._active:
+ self._active = False
+ if self._spinning:
+ self._values.append(value)
+ else:
+ self._pool.submit(self._spin, self._sink, value, True)
+ self._spinning = True
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/framework/interfaces/__init__.py b/contrib/libs/grpc/src/python/grpcio/grpc/framework/interfaces/__init__.py
index 5fb4f3c3cf..1841020f80 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/framework/interfaces/__init__.py
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/framework/interfaces/__init__.py
@@ -1,11 +1,11 @@
# Copyright 2015 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/framework/interfaces/base/__init__.py b/contrib/libs/grpc/src/python/grpcio/grpc/framework/interfaces/base/__init__.py
index 5fb4f3c3cf..1841020f80 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/framework/interfaces/base/__init__.py
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/framework/interfaces/base/__init__.py
@@ -1,11 +1,11 @@
# Copyright 2015 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/framework/interfaces/base/base.py b/contrib/libs/grpc/src/python/grpcio/grpc/framework/interfaces/base/base.py
index 82c44f9110..45ce9484bc 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/framework/interfaces/base/base.py
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/framework/interfaces/base/base.py
@@ -1,48 +1,48 @@
# Copyright 2015 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-"""The base interface of RPC Framework.
-
-Implementations of this interface support the conduct of "operations":
-exchanges between two distinct ends of an arbitrary number of data payloads
-and metadata such as a name for the operation, initial and terminal metadata
-in each direction, and flow control. These operations may be used for transfers
-of data, remote procedure calls, status indication, or anything else
-applications choose.
-"""
-
-# threading is referenced from specification in this module.
-import abc
-import enum
-import threading # pylint: disable=unused-import
-
-import six
-
-# abandonment is referenced from specification in this module.
-from grpc.framework.foundation import abandonment # pylint: disable=unused-import
-
+"""The base interface of RPC Framework.
+
+Implementations of this interface support the conduct of "operations":
+exchanges between two distinct ends of an arbitrary number of data payloads
+and metadata such as a name for the operation, initial and terminal metadata
+in each direction, and flow control. These operations may be used for transfers
+of data, remote procedure calls, status indication, or anything else
+applications choose.
+"""
+
+# threading is referenced from specification in this module.
+import abc
+import enum
+import threading # pylint: disable=unused-import
+
+import six
+
+# abandonment is referenced from specification in this module.
+from grpc.framework.foundation import abandonment # pylint: disable=unused-import
+
# pylint: disable=too-many-arguments
+
-
-class NoSuchMethodError(Exception):
- """Indicates that an unrecognized operation has been called.
-
+class NoSuchMethodError(Exception):
+ """Indicates that an unrecognized operation has been called.
+
Attributes:
- code: A code value to communicate to the other side of the operation
- along with indication of operation termination. May be None.
- details: A details value to communicate to the other side of the
- operation along with indication of operation termination. May be None.
- """
+ code: A code value to communicate to the other side of the operation
+ along with indication of operation termination. May be None.
+ details: A details value to communicate to the other side of the
+ operation along with indication of operation termination. May be None.
+ """
def __init__(self, code, details):
"""Constructor.
@@ -54,277 +54,277 @@ class NoSuchMethodError(Exception):
operation along with indication of operation termination. May be None.
"""
super(NoSuchMethodError, self).__init__()
- self.code = code
- self.details = details
-
-
-class Outcome(object):
- """The outcome of an operation.
-
- Attributes:
- kind: A Kind value coarsely identifying how the operation terminated.
- code: An application-specific code value or None if no such value was
- provided.
- details: An application-specific details value or None if no such value was
- provided.
- """
-
- @enum.unique
- class Kind(enum.Enum):
- """Ways in which an operation can terminate."""
-
- COMPLETED = 'completed'
- CANCELLED = 'cancelled'
- EXPIRED = 'expired'
- LOCAL_SHUTDOWN = 'local shutdown'
- REMOTE_SHUTDOWN = 'remote shutdown'
- RECEPTION_FAILURE = 'reception failure'
- TRANSMISSION_FAILURE = 'transmission failure'
- LOCAL_FAILURE = 'local failure'
- REMOTE_FAILURE = 'remote failure'
-
-
-class Completion(six.with_metaclass(abc.ABCMeta)):
- """An aggregate of the values exchanged upon operation completion.
-
- Attributes:
- terminal_metadata: A terminal metadata value for the operaton.
- code: A code value for the operation.
- message: A message value for the operation.
- """
-
-
-class OperationContext(six.with_metaclass(abc.ABCMeta)):
- """Provides operation-related information and action."""
-
- @abc.abstractmethod
- def outcome(self):
- """Indicates the operation's outcome (or that the operation is ongoing).
-
- Returns:
- None if the operation is still active or the Outcome value for the
- operation if it has terminated.
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def add_termination_callback(self, callback):
- """Adds a function to be called upon operation termination.
-
- Args:
- callback: A callable to be passed an Outcome value on operation
- termination.
-
- Returns:
- None if the operation has not yet terminated and the passed callback will
- later be called when it does terminate, or if the operation has already
- terminated an Outcome value describing the operation termination and the
- passed callback will not be called as a result of this method call.
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def time_remaining(self):
- """Describes the length of allowed time remaining for the operation.
-
- Returns:
- A nonnegative float indicating the length of allowed time in seconds
- remaining for the operation to complete before it is considered to have
- timed out. Zero is returned if the operation has terminated.
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def cancel(self):
- """Cancels the operation if the operation has not yet terminated."""
- raise NotImplementedError()
-
- @abc.abstractmethod
- def fail(self, exception):
- """Indicates that the operation has failed.
-
- Args:
- exception: An exception germane to the operation failure. May be None.
- """
- raise NotImplementedError()
-
-
-class Operator(six.with_metaclass(abc.ABCMeta)):
- """An interface through which to participate in an operation."""
-
- @abc.abstractmethod
- def advance(self,
- initial_metadata=None,
- payload=None,
- completion=None,
- allowance=None):
- """Progresses the operation.
-
- Args:
- initial_metadata: An initial metadata value. Only one may ever be
- communicated in each direction for an operation, and they must be
- communicated no later than either the first payload or the completion.
- payload: A payload value.
- completion: A Completion value. May only ever be non-None once in either
- direction, and no payloads may be passed after it has been communicated.
- allowance: A positive integer communicating the number of additional
- payloads allowed to be passed by the remote side of the operation.
- """
- raise NotImplementedError()
-
-
-class ProtocolReceiver(six.with_metaclass(abc.ABCMeta)):
- """A means of receiving protocol values during an operation."""
-
- @abc.abstractmethod
- def context(self, protocol_context):
- """Accepts the protocol context object for the operation.
-
- Args:
- protocol_context: The protocol context object for the operation.
- """
- raise NotImplementedError()
-
-
-class Subscription(six.with_metaclass(abc.ABCMeta)):
- """Describes customer code's interest in values from the other side.
-
- Attributes:
- kind: A Kind value describing the overall kind of this value.
- termination_callback: A callable to be passed the Outcome associated with
- the operation after it has terminated. Must be non-None if kind is
- Kind.TERMINATION_ONLY. Must be None otherwise.
- allowance: A callable behavior that accepts positive integers representing
- the number of additional payloads allowed to be passed to the other side
- of the operation. Must be None if kind is Kind.FULL. Must not be None
- otherwise.
- operator: An Operator to be passed values from the other side of the
- operation. Must be non-None if kind is Kind.FULL. Must be None otherwise.
- protocol_receiver: A ProtocolReceiver to be passed protocol objects as they
- become available during the operation. Must be non-None if kind is
- Kind.FULL.
- """
-
- @enum.unique
- class Kind(enum.Enum):
-
- NONE = 'none'
- TERMINATION_ONLY = 'termination only'
- FULL = 'full'
-
-
-class Servicer(six.with_metaclass(abc.ABCMeta)):
- """Interface for service implementations."""
-
- @abc.abstractmethod
- def service(self, group, method, context, output_operator):
- """Services an operation.
-
- Args:
- group: The group identifier of the operation to be serviced.
- method: The method identifier of the operation to be serviced.
- context: An OperationContext object affording contextual information and
- actions.
- output_operator: An Operator that will accept output values of the
- operation.
-
- Returns:
- A Subscription via which this object may or may not accept more values of
- the operation.
-
- Raises:
- NoSuchMethodError: If this Servicer does not handle operations with the
- given group and method.
- abandonment.Abandoned: If the operation has been aborted and there no
- longer is any reason to service the operation.
- """
- raise NotImplementedError()
-
-
-class End(six.with_metaclass(abc.ABCMeta)):
- """Common type for entry-point objects on both sides of an operation."""
-
- @abc.abstractmethod
- def start(self):
- """Starts this object's service of operations."""
- raise NotImplementedError()
-
- @abc.abstractmethod
- def stop(self, grace):
- """Stops this object's service of operations.
-
- This object will refuse service of new operations as soon as this method is
- called but operations under way at the time of the call may be given a
- grace period during which they are allowed to finish.
-
- Args:
- grace: A duration of time in seconds to allow ongoing operations to
- terminate before being forcefully terminated by the stopping of this
- End. May be zero to terminate all ongoing operations and immediately
- stop.
-
- Returns:
- A threading.Event that will be set to indicate all operations having
- terminated and this End having completely stopped. The returned event
- may not be set until after the full grace period (if some ongoing
- operation continues for the full length of the period) or it may be set
- much sooner (if for example this End had no operations in progress at
- the time its stop method was called).
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def operate(self,
- group,
- method,
- subscription,
- timeout,
- initial_metadata=None,
- payload=None,
- completion=None,
- protocol_options=None):
- """Commences an operation.
-
- Args:
- group: The group identifier of the invoked operation.
- method: The method identifier of the invoked operation.
- subscription: A Subscription to which the results of the operation will be
- passed.
- timeout: A length of time in seconds to allow for the operation.
- initial_metadata: An initial metadata value to be sent to the other side
- of the operation. May be None if the initial metadata will be later
- passed via the returned operator or if there will be no initial metadata
- passed at all.
- payload: An initial payload for the operation.
- completion: A Completion value indicating the end of transmission to the
- other side of the operation.
- protocol_options: A value specified by the provider of a Base interface
- implementation affording custom state and behavior.
-
- Returns:
- A pair of objects affording information about the operation and action
- continuing the operation. The first element of the returned pair is an
- OperationContext for the operation and the second element of the
- returned pair is an Operator to which operation values not passed in
- this call should later be passed.
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def operation_stats(self):
- """Reports the number of terminated operations broken down by outcome.
-
- Returns:
- A dictionary from Outcome.Kind value to an integer identifying the number
- of operations that terminated with that outcome kind.
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def add_idle_action(self, action):
- """Adds an action to be called when this End has no ongoing operations.
-
- Args:
- action: A callable that accepts no arguments.
- """
- raise NotImplementedError()
+ self.code = code
+ self.details = details
+
+
+class Outcome(object):
+ """The outcome of an operation.
+
+ Attributes:
+ kind: A Kind value coarsely identifying how the operation terminated.
+ code: An application-specific code value or None if no such value was
+ provided.
+ details: An application-specific details value or None if no such value was
+ provided.
+ """
+
+ @enum.unique
+ class Kind(enum.Enum):
+ """Ways in which an operation can terminate."""
+
+ COMPLETED = 'completed'
+ CANCELLED = 'cancelled'
+ EXPIRED = 'expired'
+ LOCAL_SHUTDOWN = 'local shutdown'
+ REMOTE_SHUTDOWN = 'remote shutdown'
+ RECEPTION_FAILURE = 'reception failure'
+ TRANSMISSION_FAILURE = 'transmission failure'
+ LOCAL_FAILURE = 'local failure'
+ REMOTE_FAILURE = 'remote failure'
+
+
+class Completion(six.with_metaclass(abc.ABCMeta)):
+ """An aggregate of the values exchanged upon operation completion.
+
+ Attributes:
+ terminal_metadata: A terminal metadata value for the operaton.
+ code: A code value for the operation.
+ message: A message value for the operation.
+ """
+
+
+class OperationContext(six.with_metaclass(abc.ABCMeta)):
+ """Provides operation-related information and action."""
+
+ @abc.abstractmethod
+ def outcome(self):
+ """Indicates the operation's outcome (or that the operation is ongoing).
+
+ Returns:
+ None if the operation is still active or the Outcome value for the
+ operation if it has terminated.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def add_termination_callback(self, callback):
+ """Adds a function to be called upon operation termination.
+
+ Args:
+ callback: A callable to be passed an Outcome value on operation
+ termination.
+
+ Returns:
+ None if the operation has not yet terminated and the passed callback will
+ later be called when it does terminate, or if the operation has already
+ terminated an Outcome value describing the operation termination and the
+ passed callback will not be called as a result of this method call.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def time_remaining(self):
+ """Describes the length of allowed time remaining for the operation.
+
+ Returns:
+ A nonnegative float indicating the length of allowed time in seconds
+ remaining for the operation to complete before it is considered to have
+ timed out. Zero is returned if the operation has terminated.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def cancel(self):
+ """Cancels the operation if the operation has not yet terminated."""
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def fail(self, exception):
+ """Indicates that the operation has failed.
+
+ Args:
+ exception: An exception germane to the operation failure. May be None.
+ """
+ raise NotImplementedError()
+
+
+class Operator(six.with_metaclass(abc.ABCMeta)):
+ """An interface through which to participate in an operation."""
+
+ @abc.abstractmethod
+ def advance(self,
+ initial_metadata=None,
+ payload=None,
+ completion=None,
+ allowance=None):
+ """Progresses the operation.
+
+ Args:
+ initial_metadata: An initial metadata value. Only one may ever be
+ communicated in each direction for an operation, and they must be
+ communicated no later than either the first payload or the completion.
+ payload: A payload value.
+ completion: A Completion value. May only ever be non-None once in either
+ direction, and no payloads may be passed after it has been communicated.
+ allowance: A positive integer communicating the number of additional
+ payloads allowed to be passed by the remote side of the operation.
+ """
+ raise NotImplementedError()
+
+
+class ProtocolReceiver(six.with_metaclass(abc.ABCMeta)):
+ """A means of receiving protocol values during an operation."""
+
+ @abc.abstractmethod
+ def context(self, protocol_context):
+ """Accepts the protocol context object for the operation.
+
+ Args:
+ protocol_context: The protocol context object for the operation.
+ """
+ raise NotImplementedError()
+
+
+class Subscription(six.with_metaclass(abc.ABCMeta)):
+ """Describes customer code's interest in values from the other side.
+
+ Attributes:
+ kind: A Kind value describing the overall kind of this value.
+ termination_callback: A callable to be passed the Outcome associated with
+ the operation after it has terminated. Must be non-None if kind is
+ Kind.TERMINATION_ONLY. Must be None otherwise.
+ allowance: A callable behavior that accepts positive integers representing
+ the number of additional payloads allowed to be passed to the other side
+ of the operation. Must be None if kind is Kind.FULL. Must not be None
+ otherwise.
+ operator: An Operator to be passed values from the other side of the
+ operation. Must be non-None if kind is Kind.FULL. Must be None otherwise.
+ protocol_receiver: A ProtocolReceiver to be passed protocol objects as they
+ become available during the operation. Must be non-None if kind is
+ Kind.FULL.
+ """
+
+ @enum.unique
+ class Kind(enum.Enum):
+
+ NONE = 'none'
+ TERMINATION_ONLY = 'termination only'
+ FULL = 'full'
+
+
+class Servicer(six.with_metaclass(abc.ABCMeta)):
+ """Interface for service implementations."""
+
+ @abc.abstractmethod
+ def service(self, group, method, context, output_operator):
+ """Services an operation.
+
+ Args:
+ group: The group identifier of the operation to be serviced.
+ method: The method identifier of the operation to be serviced.
+ context: An OperationContext object affording contextual information and
+ actions.
+ output_operator: An Operator that will accept output values of the
+ operation.
+
+ Returns:
+ A Subscription via which this object may or may not accept more values of
+ the operation.
+
+ Raises:
+ NoSuchMethodError: If this Servicer does not handle operations with the
+ given group and method.
+ abandonment.Abandoned: If the operation has been aborted and there no
+ longer is any reason to service the operation.
+ """
+ raise NotImplementedError()
+
+
+class End(six.with_metaclass(abc.ABCMeta)):
+ """Common type for entry-point objects on both sides of an operation."""
+
+ @abc.abstractmethod
+ def start(self):
+ """Starts this object's service of operations."""
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def stop(self, grace):
+ """Stops this object's service of operations.
+
+ This object will refuse service of new operations as soon as this method is
+ called but operations under way at the time of the call may be given a
+ grace period during which they are allowed to finish.
+
+ Args:
+ grace: A duration of time in seconds to allow ongoing operations to
+ terminate before being forcefully terminated by the stopping of this
+ End. May be zero to terminate all ongoing operations and immediately
+ stop.
+
+ Returns:
+ A threading.Event that will be set to indicate all operations having
+ terminated and this End having completely stopped. The returned event
+ may not be set until after the full grace period (if some ongoing
+ operation continues for the full length of the period) or it may be set
+ much sooner (if for example this End had no operations in progress at
+ the time its stop method was called).
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def operate(self,
+ group,
+ method,
+ subscription,
+ timeout,
+ initial_metadata=None,
+ payload=None,
+ completion=None,
+ protocol_options=None):
+ """Commences an operation.
+
+ Args:
+ group: The group identifier of the invoked operation.
+ method: The method identifier of the invoked operation.
+ subscription: A Subscription to which the results of the operation will be
+ passed.
+ timeout: A length of time in seconds to allow for the operation.
+ initial_metadata: An initial metadata value to be sent to the other side
+ of the operation. May be None if the initial metadata will be later
+ passed via the returned operator or if there will be no initial metadata
+ passed at all.
+ payload: An initial payload for the operation.
+ completion: A Completion value indicating the end of transmission to the
+ other side of the operation.
+ protocol_options: A value specified by the provider of a Base interface
+ implementation affording custom state and behavior.
+
+ Returns:
+ A pair of objects affording information about the operation and action
+ continuing the operation. The first element of the returned pair is an
+ OperationContext for the operation and the second element of the
+ returned pair is an Operator to which operation values not passed in
+ this call should later be passed.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def operation_stats(self):
+ """Reports the number of terminated operations broken down by outcome.
+
+ Returns:
+ A dictionary from Outcome.Kind value to an integer identifying the number
+ of operations that terminated with that outcome kind.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def add_idle_action(self, action):
+ """Adds an action to be called when this End has no ongoing operations.
+
+ Args:
+ action: A callable that accepts no arguments.
+ """
+ raise NotImplementedError()
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/framework/interfaces/base/utilities.py b/contrib/libs/grpc/src/python/grpcio/grpc/framework/interfaces/base/utilities.py
index 281db62b5d..ad803164ff 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/framework/interfaces/base/utilities.py
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/framework/interfaces/base/utilities.py
@@ -1,71 +1,71 @@
# Copyright 2015 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-"""Utilities for use with the base interface of RPC Framework."""
-
-import collections
-
-from grpc.framework.interfaces.base import base
-
-
-class _Completion(base.Completion,
+"""Utilities for use with the base interface of RPC Framework."""
+
+import collections
+
+from grpc.framework.interfaces.base import base
+
+
+class _Completion(base.Completion,
collections.namedtuple('_Completion', (
'terminal_metadata',
'code',
'message',
))):
- """A trivial implementation of base.Completion."""
-
-
-class _Subscription(base.Subscription,
- collections.namedtuple('_Subscription', (
+ """A trivial implementation of base.Completion."""
+
+
+class _Subscription(base.Subscription,
+ collections.namedtuple('_Subscription', (
'kind',
'termination_callback',
'allowance',
'operator',
'protocol_receiver',
))):
- """A trivial implementation of base.Subscription."""
-
-
-_NONE_SUBSCRIPTION = _Subscription(base.Subscription.Kind.NONE, None, None,
- None, None)
-
-
-def completion(terminal_metadata, code, message):
- """Creates a base.Completion aggregating the given operation values.
-
- Args:
- terminal_metadata: A terminal metadata value for an operaton.
- code: A code value for an operation.
- message: A message value for an operation.
-
- Returns:
- A base.Completion aggregating the given operation values.
- """
- return _Completion(terminal_metadata, code, message)
-
-
-def full_subscription(operator, protocol_receiver):
- """Creates a "full" base.Subscription for the given base.Operator.
-
- Args:
- operator: A base.Operator to be used in an operation.
- protocol_receiver: A base.ProtocolReceiver to be used in an operation.
-
- Returns:
- A base.Subscription of kind base.Subscription.Kind.FULL wrapping the given
- base.Operator and base.ProtocolReceiver.
- """
- return _Subscription(base.Subscription.Kind.FULL, None, None, operator,
- protocol_receiver)
+ """A trivial implementation of base.Subscription."""
+
+
+_NONE_SUBSCRIPTION = _Subscription(base.Subscription.Kind.NONE, None, None,
+ None, None)
+
+
+def completion(terminal_metadata, code, message):
+ """Creates a base.Completion aggregating the given operation values.
+
+ Args:
+ terminal_metadata: A terminal metadata value for an operaton.
+ code: A code value for an operation.
+ message: A message value for an operation.
+
+ Returns:
+ A base.Completion aggregating the given operation values.
+ """
+ return _Completion(terminal_metadata, code, message)
+
+
+def full_subscription(operator, protocol_receiver):
+ """Creates a "full" base.Subscription for the given base.Operator.
+
+ Args:
+ operator: A base.Operator to be used in an operation.
+ protocol_receiver: A base.ProtocolReceiver to be used in an operation.
+
+ Returns:
+ A base.Subscription of kind base.Subscription.Kind.FULL wrapping the given
+ base.Operator and base.ProtocolReceiver.
+ """
+ return _Subscription(base.Subscription.Kind.FULL, None, None, operator,
+ protocol_receiver)
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/framework/interfaces/face/__init__.py b/contrib/libs/grpc/src/python/grpcio/grpc/framework/interfaces/face/__init__.py
index 5fb4f3c3cf..1841020f80 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/framework/interfaces/face/__init__.py
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/framework/interfaces/face/__init__.py
@@ -1,11 +1,11 @@
# Copyright 2015 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/framework/interfaces/face/face.py b/contrib/libs/grpc/src/python/grpcio/grpc/framework/interfaces/face/face.py
index 5b47f11d0d..fdca20fc2c 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/framework/interfaces/face/face.py
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/framework/interfaces/face/face.py
@@ -1,62 +1,62 @@
# Copyright 2015 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-"""Interfaces defining the Face layer of RPC Framework."""
-
-import abc
-import collections
-import enum
-
-import six
-
-# cardinality, style, abandonment, future, and stream are
-# referenced from specification in this module.
-from grpc.framework.common import cardinality # pylint: disable=unused-import
-from grpc.framework.common import style # pylint: disable=unused-import
-from grpc.framework.foundation import abandonment # pylint: disable=unused-import
-from grpc.framework.foundation import future # pylint: disable=unused-import
-from grpc.framework.foundation import stream # pylint: disable=unused-import
-
+"""Interfaces defining the Face layer of RPC Framework."""
+
+import abc
+import collections
+import enum
+
+import six
+
+# cardinality, style, abandonment, future, and stream are
+# referenced from specification in this module.
+from grpc.framework.common import cardinality # pylint: disable=unused-import
+from grpc.framework.common import style # pylint: disable=unused-import
+from grpc.framework.foundation import abandonment # pylint: disable=unused-import
+from grpc.framework.foundation import future # pylint: disable=unused-import
+from grpc.framework.foundation import stream # pylint: disable=unused-import
+
# pylint: disable=too-many-arguments
-
-
-class NoSuchMethodError(Exception):
- """Raised by customer code to indicate an unrecognized method.
-
- Attributes:
- group: The group of the unrecognized method.
- name: The name of the unrecognized method.
- """
-
- def __init__(self, group, method):
- """Constructor.
-
- Args:
- group: The group identifier of the unrecognized RPC name.
- method: The method identifier of the unrecognized RPC name.
- """
- super(NoSuchMethodError, self).__init__()
- self.group = group
- self.method = method
-
- def __repr__(self):
+
+
+class NoSuchMethodError(Exception):
+ """Raised by customer code to indicate an unrecognized method.
+
+ Attributes:
+ group: The group of the unrecognized method.
+ name: The name of the unrecognized method.
+ """
+
+ def __init__(self, group, method):
+ """Constructor.
+
+ Args:
+ group: The group identifier of the unrecognized RPC name.
+ method: The method identifier of the unrecognized RPC name.
+ """
+ super(NoSuchMethodError, self).__init__()
+ self.group = group
+ self.method = method
+
+ def __repr__(self):
return 'face.NoSuchMethodError(%s, %s)' % (
self.group,
self.method,
)
-
-
-class Abortion(
+
+
+class Abortion(
collections.namedtuple('Abortion', (
'kind',
'initial_metadata',
@@ -64,989 +64,989 @@ class Abortion(
'code',
'details',
))):
- """A value describing RPC abortion.
-
- Attributes:
- kind: A Kind value identifying how the RPC failed.
- initial_metadata: The initial metadata from the other side of the RPC or
- None if no initial metadata value was received.
- terminal_metadata: The terminal metadata from the other side of the RPC or
- None if no terminal metadata value was received.
- code: The code value from the other side of the RPC or None if no code value
- was received.
- details: The details value from the other side of the RPC or None if no
- details value was received.
- """
-
- @enum.unique
- class Kind(enum.Enum):
- """Types of RPC abortion."""
-
- CANCELLED = 'cancelled'
- EXPIRED = 'expired'
- LOCAL_SHUTDOWN = 'local shutdown'
- REMOTE_SHUTDOWN = 'remote shutdown'
- NETWORK_FAILURE = 'network failure'
- LOCAL_FAILURE = 'local failure'
- REMOTE_FAILURE = 'remote failure'
-
-
-class AbortionError(six.with_metaclass(abc.ABCMeta, Exception)):
- """Common super type for exceptions indicating RPC abortion.
-
- initial_metadata: The initial metadata from the other side of the RPC or
- None if no initial metadata value was received.
- terminal_metadata: The terminal metadata from the other side of the RPC or
- None if no terminal metadata value was received.
- code: The code value from the other side of the RPC or None if no code value
- was received.
- details: The details value from the other side of the RPC or None if no
- details value was received.
- """
-
- def __init__(self, initial_metadata, terminal_metadata, code, details):
- super(AbortionError, self).__init__()
- self.initial_metadata = initial_metadata
- self.terminal_metadata = terminal_metadata
- self.code = code
- self.details = details
-
- def __str__(self):
- return '%s(code=%s, details="%s")' % (self.__class__.__name__,
- self.code, self.details)
-
-
-class CancellationError(AbortionError):
- """Indicates that an RPC has been cancelled."""
-
-
-class ExpirationError(AbortionError):
- """Indicates that an RPC has expired ("timed out")."""
-
-
-class LocalShutdownError(AbortionError):
- """Indicates that an RPC has terminated due to local shutdown of RPCs."""
-
-
-class RemoteShutdownError(AbortionError):
- """Indicates that an RPC has terminated due to remote shutdown of RPCs."""
-
-
-class NetworkError(AbortionError):
- """Indicates that some error occurred on the network."""
-
-
-class LocalError(AbortionError):
- """Indicates that an RPC has terminated due to a local defect."""
-
-
-class RemoteError(AbortionError):
- """Indicates that an RPC has terminated due to a remote defect."""
-
-
-class RpcContext(six.with_metaclass(abc.ABCMeta)):
- """Provides RPC-related information and action."""
-
- @abc.abstractmethod
- def is_active(self):
- """Describes whether the RPC is active or has terminated."""
- raise NotImplementedError()
-
- @abc.abstractmethod
- def time_remaining(self):
- """Describes the length of allowed time remaining for the RPC.
-
- Returns:
- A nonnegative float indicating the length of allowed time in seconds
- remaining for the RPC to complete before it is considered to have timed
- out.
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def add_abortion_callback(self, abortion_callback):
- """Registers a callback to be called if the RPC is aborted.
-
- Args:
- abortion_callback: A callable to be called and passed an Abortion value
- in the event of RPC abortion.
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def cancel(self):
- """Cancels the RPC.
-
- Idempotent and has no effect if the RPC has already terminated.
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def protocol_context(self):
- """Accesses a custom object specified by an implementation provider.
-
- Returns:
- A value specified by the provider of a Face interface implementation
- affording custom state and behavior.
- """
- raise NotImplementedError()
-
-
-class Call(six.with_metaclass(abc.ABCMeta, RpcContext)):
- """Invocation-side utility object for an RPC."""
-
- @abc.abstractmethod
- def initial_metadata(self):
- """Accesses the initial metadata from the service-side of the RPC.
-
- This method blocks until the value is available or is known not to have been
- emitted from the service-side of the RPC.
-
- Returns:
- The initial metadata object emitted by the service-side of the RPC, or
- None if there was no such value.
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def terminal_metadata(self):
- """Accesses the terminal metadata from the service-side of the RPC.
-
- This method blocks until the value is available or is known not to have been
- emitted from the service-side of the RPC.
-
- Returns:
- The terminal metadata object emitted by the service-side of the RPC, or
- None if there was no such value.
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def code(self):
- """Accesses the code emitted by the service-side of the RPC.
-
- This method blocks until the value is available or is known not to have been
- emitted from the service-side of the RPC.
-
- Returns:
- The code object emitted by the service-side of the RPC, or None if there
- was no such value.
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def details(self):
- """Accesses the details value emitted by the service-side of the RPC.
-
- This method blocks until the value is available or is known not to have been
- emitted from the service-side of the RPC.
-
- Returns:
- The details value emitted by the service-side of the RPC, or None if there
- was no such value.
- """
- raise NotImplementedError()
-
-
-class ServicerContext(six.with_metaclass(abc.ABCMeta, RpcContext)):
- """A context object passed to method implementations."""
-
- @abc.abstractmethod
- def invocation_metadata(self):
- """Accesses the metadata from the invocation-side of the RPC.
-
- This method blocks until the value is available or is known not to have been
- emitted from the invocation-side of the RPC.
-
- Returns:
- The metadata object emitted by the invocation-side of the RPC, or None if
- there was no such value.
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def initial_metadata(self, initial_metadata):
- """Accepts the service-side initial metadata value of the RPC.
-
- This method need not be called by method implementations if they have no
- service-side initial metadata to transmit.
-
- Args:
- initial_metadata: The service-side initial metadata value of the RPC to
- be transmitted to the invocation side of the RPC.
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def terminal_metadata(self, terminal_metadata):
- """Accepts the service-side terminal metadata value of the RPC.
-
- This method need not be called by method implementations if they have no
- service-side terminal metadata to transmit.
-
- Args:
- terminal_metadata: The service-side terminal metadata value of the RPC to
- be transmitted to the invocation side of the RPC.
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def code(self, code):
- """Accepts the service-side code of the RPC.
-
- This method need not be called by method implementations if they have no
- code to transmit.
-
- Args:
- code: The code of the RPC to be transmitted to the invocation side of the
- RPC.
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def details(self, details):
- """Accepts the service-side details of the RPC.
-
- This method need not be called by method implementations if they have no
- service-side details to transmit.
-
- Args:
- details: The service-side details value of the RPC to be transmitted to
- the invocation side of the RPC.
- """
- raise NotImplementedError()
-
-
-class ResponseReceiver(six.with_metaclass(abc.ABCMeta)):
- """Invocation-side object used to accept the output of an RPC."""
-
- @abc.abstractmethod
- def initial_metadata(self, initial_metadata):
- """Receives the initial metadata from the service-side of the RPC.
-
- Args:
- initial_metadata: The initial metadata object emitted from the
- service-side of the RPC.
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def response(self, response):
- """Receives a response from the service-side of the RPC.
-
- Args:
- response: A response object emitted from the service-side of the RPC.
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def complete(self, terminal_metadata, code, details):
- """Receives the completion values emitted from the service-side of the RPC.
-
- Args:
- terminal_metadata: The terminal metadata object emitted from the
- service-side of the RPC.
- code: The code object emitted from the service-side of the RPC.
- details: The details object emitted from the service-side of the RPC.
- """
- raise NotImplementedError()
-
-
-class UnaryUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)):
- """Affords invoking a unary-unary RPC in any call style."""
-
- @abc.abstractmethod
- def __call__(self,
- request,
- timeout,
- metadata=None,
- with_call=False,
- protocol_options=None):
- """Synchronously invokes the underlying RPC.
-
- Args:
- request: The request value for the RPC.
- timeout: A duration of time in seconds to allow for the RPC.
- metadata: A metadata value to be passed to the service-side of
- the RPC.
- with_call: Whether or not to include return a Call for the RPC in addition
- to the response.
- protocol_options: A value specified by the provider of a Face interface
- implementation affording custom state and behavior.
-
- Returns:
- The response value for the RPC, and a Call for the RPC if with_call was
- set to True at invocation.
-
- Raises:
- AbortionError: Indicating that the RPC was aborted.
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def future(self, request, timeout, metadata=None, protocol_options=None):
- """Asynchronously invokes the underlying RPC.
-
- Args:
- request: The request value for the RPC.
- timeout: A duration of time in seconds to allow for the RPC.
- metadata: A metadata value to be passed to the service-side of
- the RPC.
- protocol_options: A value specified by the provider of a Face interface
- implementation affording custom state and behavior.
-
- Returns:
- An object that is both a Call for the RPC and a future.Future. In the
- event of RPC completion, the return Future's result value will be the
- response value of the RPC. In the event of RPC abortion, the returned
- Future's exception value will be an AbortionError.
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def event(self,
- request,
- receiver,
- abortion_callback,
- timeout,
- metadata=None,
- protocol_options=None):
- """Asynchronously invokes the underlying RPC.
-
- Args:
- request: The request value for the RPC.
- receiver: A ResponseReceiver to be passed the response data of the RPC.
- abortion_callback: A callback to be called and passed an Abortion value
- in the event of RPC abortion.
- timeout: A duration of time in seconds to allow for the RPC.
- metadata: A metadata value to be passed to the service-side of
- the RPC.
- protocol_options: A value specified by the provider of a Face interface
- implementation affording custom state and behavior.
-
- Returns:
- A Call for the RPC.
- """
- raise NotImplementedError()
-
-
-class UnaryStreamMultiCallable(six.with_metaclass(abc.ABCMeta)):
- """Affords invoking a unary-stream RPC in any call style."""
-
- @abc.abstractmethod
- def __call__(self, request, timeout, metadata=None, protocol_options=None):
- """Invokes the underlying RPC.
-
- Args:
- request: The request value for the RPC.
- timeout: A duration of time in seconds to allow for the RPC.
- metadata: A metadata value to be passed to the service-side of
- the RPC.
- protocol_options: A value specified by the provider of a Face interface
- implementation affording custom state and behavior.
-
- Returns:
- An object that is both a Call for the RPC and an iterator of response
- values. Drawing response values from the returned iterator may raise
- AbortionError indicating abortion of the RPC.
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def event(self,
- request,
- receiver,
- abortion_callback,
- timeout,
- metadata=None,
- protocol_options=None):
- """Asynchronously invokes the underlying RPC.
-
- Args:
- request: The request value for the RPC.
- receiver: A ResponseReceiver to be passed the response data of the RPC.
- abortion_callback: A callback to be called and passed an Abortion value
- in the event of RPC abortion.
- timeout: A duration of time in seconds to allow for the RPC.
- metadata: A metadata value to be passed to the service-side of
- the RPC.
- protocol_options: A value specified by the provider of a Face interface
- implementation affording custom state and behavior.
-
- Returns:
- A Call object for the RPC.
- """
- raise NotImplementedError()
-
-
-class StreamUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)):
- """Affords invoking a stream-unary RPC in any call style."""
-
- @abc.abstractmethod
- def __call__(self,
- request_iterator,
- timeout,
- metadata=None,
- with_call=False,
- protocol_options=None):
- """Synchronously invokes the underlying RPC.
-
- Args:
- request_iterator: An iterator that yields request values for the RPC.
- timeout: A duration of time in seconds to allow for the RPC.
- metadata: A metadata value to be passed to the service-side of
- the RPC.
- with_call: Whether or not to include return a Call for the RPC in addition
- to the response.
- protocol_options: A value specified by the provider of a Face interface
- implementation affording custom state and behavior.
-
- Returns:
- The response value for the RPC, and a Call for the RPC if with_call was
- set to True at invocation.
-
- Raises:
- AbortionError: Indicating that the RPC was aborted.
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def future(self,
- request_iterator,
- timeout,
- metadata=None,
- protocol_options=None):
- """Asynchronously invokes the underlying RPC.
-
- Args:
- request_iterator: An iterator that yields request values for the RPC.
- timeout: A duration of time in seconds to allow for the RPC.
- metadata: A metadata value to be passed to the service-side of
- the RPC.
- protocol_options: A value specified by the provider of a Face interface
- implementation affording custom state and behavior.
-
- Returns:
- An object that is both a Call for the RPC and a future.Future. In the
- event of RPC completion, the return Future's result value will be the
- response value of the RPC. In the event of RPC abortion, the returned
- Future's exception value will be an AbortionError.
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def event(self,
- receiver,
- abortion_callback,
- timeout,
- metadata=None,
- protocol_options=None):
- """Asynchronously invokes the underlying RPC.
-
- Args:
- receiver: A ResponseReceiver to be passed the response data of the RPC.
- abortion_callback: A callback to be called and passed an Abortion value
- in the event of RPC abortion.
- timeout: A duration of time in seconds to allow for the RPC.
- metadata: A metadata value to be passed to the service-side of
- the RPC.
- protocol_options: A value specified by the provider of a Face interface
- implementation affording custom state and behavior.
-
- Returns:
- A single object that is both a Call object for the RPC and a
- stream.Consumer to which the request values of the RPC should be passed.
- """
- raise NotImplementedError()
-
-
-class StreamStreamMultiCallable(six.with_metaclass(abc.ABCMeta)):
- """Affords invoking a stream-stream RPC in any call style."""
-
- @abc.abstractmethod
- def __call__(self,
- request_iterator,
- timeout,
- metadata=None,
- protocol_options=None):
- """Invokes the underlying RPC.
-
- Args:
- request_iterator: An iterator that yields request values for the RPC.
- timeout: A duration of time in seconds to allow for the RPC.
- metadata: A metadata value to be passed to the service-side of
- the RPC.
- protocol_options: A value specified by the provider of a Face interface
- implementation affording custom state and behavior.
-
- Returns:
- An object that is both a Call for the RPC and an iterator of response
- values. Drawing response values from the returned iterator may raise
- AbortionError indicating abortion of the RPC.
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def event(self,
- receiver,
- abortion_callback,
- timeout,
- metadata=None,
- protocol_options=None):
- """Asynchronously invokes the underlying RPC.
-
- Args:
- receiver: A ResponseReceiver to be passed the response data of the RPC.
- abortion_callback: A callback to be called and passed an Abortion value
- in the event of RPC abortion.
- timeout: A duration of time in seconds to allow for the RPC.
- metadata: A metadata value to be passed to the service-side of
- the RPC.
- protocol_options: A value specified by the provider of a Face interface
- implementation affording custom state and behavior.
-
- Returns:
- A single object that is both a Call object for the RPC and a
- stream.Consumer to which the request values of the RPC should be passed.
- """
- raise NotImplementedError()
-
-
-class MethodImplementation(six.with_metaclass(abc.ABCMeta)):
- """A sum type that describes a method implementation.
-
- Attributes:
- cardinality: A cardinality.Cardinality value.
- style: A style.Service value.
- unary_unary_inline: The implementation of the method as a callable value
- that takes a request value and a ServicerContext object and returns a
- response value. Only non-None if cardinality is
- cardinality.Cardinality.UNARY_UNARY and style is style.Service.INLINE.
- unary_stream_inline: The implementation of the method as a callable value
- that takes a request value and a ServicerContext object and returns an
- iterator of response values. Only non-None if cardinality is
- cardinality.Cardinality.UNARY_STREAM and style is style.Service.INLINE.
- stream_unary_inline: The implementation of the method as a callable value
- that takes an iterator of request values and a ServicerContext object and
- returns a response value. Only non-None if cardinality is
- cardinality.Cardinality.STREAM_UNARY and style is style.Service.INLINE.
- stream_stream_inline: The implementation of the method as a callable value
- that takes an iterator of request values and a ServicerContext object and
- returns an iterator of response values. Only non-None if cardinality is
- cardinality.Cardinality.STREAM_STREAM and style is style.Service.INLINE.
- unary_unary_event: The implementation of the method as a callable value that
- takes a request value, a response callback to which to pass the response
- value of the RPC, and a ServicerContext. Only non-None if cardinality is
- cardinality.Cardinality.UNARY_UNARY and style is style.Service.EVENT.
- unary_stream_event: The implementation of the method as a callable value
- that takes a request value, a stream.Consumer to which to pass the
- response values of the RPC, and a ServicerContext. Only non-None if
- cardinality is cardinality.Cardinality.UNARY_STREAM and style is
- style.Service.EVENT.
- stream_unary_event: The implementation of the method as a callable value
- that takes a response callback to which to pass the response value of the
- RPC and a ServicerContext and returns a stream.Consumer to which the
- request values of the RPC should be passed. Only non-None if cardinality
- is cardinality.Cardinality.STREAM_UNARY and style is style.Service.EVENT.
- stream_stream_event: The implementation of the method as a callable value
- that takes a stream.Consumer to which to pass the response values of the
- RPC and a ServicerContext and returns a stream.Consumer to which the
- request values of the RPC should be passed. Only non-None if cardinality
- is cardinality.Cardinality.STREAM_STREAM and style is
- style.Service.EVENT.
- """
-
-
-class MultiMethodImplementation(six.with_metaclass(abc.ABCMeta)):
- """A general type able to service many methods."""
-
- @abc.abstractmethod
- def service(self, group, method, response_consumer, context):
- """Services an RPC.
-
- Args:
- group: The group identifier of the RPC.
- method: The method identifier of the RPC.
- response_consumer: A stream.Consumer to be called to accept the response
- values of the RPC.
- context: a ServicerContext object.
-
- Returns:
- A stream.Consumer with which to accept the request values of the RPC. The
- consumer returned from this method may or may not be invoked to
- completion: in the case of RPC abortion, RPC Framework will simply stop
- passing values to this object. Implementations must not assume that this
- object will be called to completion of the request stream or even called
- at all.
-
- Raises:
- abandonment.Abandoned: May or may not be raised when the RPC has been
- aborted.
- NoSuchMethodError: If this MultiMethod does not recognize the given group
- and name for the RPC and is not able to service the RPC.
- """
- raise NotImplementedError()
-
-
-class GenericStub(six.with_metaclass(abc.ABCMeta)):
- """Affords RPC invocation via generic methods."""
-
- @abc.abstractmethod
- def blocking_unary_unary(self,
- group,
- method,
- request,
- timeout,
- metadata=None,
- with_call=False,
- protocol_options=None):
- """Invokes a unary-request-unary-response method.
-
- This method blocks until either returning the response value of the RPC
- (in the event of RPC completion) or raising an exception (in the event of
- RPC abortion).
-
- Args:
- group: The group identifier of the RPC.
- method: The method identifier of the RPC.
- request: The request value for the RPC.
- timeout: A duration of time in seconds to allow for the RPC.
- metadata: A metadata value to be passed to the service-side of the RPC.
- with_call: Whether or not to include return a Call for the RPC in addition
- to the response.
- protocol_options: A value specified by the provider of a Face interface
- implementation affording custom state and behavior.
-
- Returns:
- The response value for the RPC, and a Call for the RPC if with_call was
- set to True at invocation.
-
- Raises:
- AbortionError: Indicating that the RPC was aborted.
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def future_unary_unary(self,
- group,
- method,
- request,
- timeout,
- metadata=None,
- protocol_options=None):
- """Invokes a unary-request-unary-response method.
-
- Args:
- group: The group identifier of the RPC.
- method: The method identifier of the RPC.
- request: The request value for the RPC.
- timeout: A duration of time in seconds to allow for the RPC.
- metadata: A metadata value to be passed to the service-side of the RPC.
- protocol_options: A value specified by the provider of a Face interface
- implementation affording custom state and behavior.
-
- Returns:
- An object that is both a Call for the RPC and a future.Future. In the
- event of RPC completion, the return Future's result value will be the
- response value of the RPC. In the event of RPC abortion, the returned
- Future's exception value will be an AbortionError.
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def inline_unary_stream(self,
- group,
- method,
- request,
- timeout,
- metadata=None,
- protocol_options=None):
- """Invokes a unary-request-stream-response method.
-
- Args:
- group: The group identifier of the RPC.
- method: The method identifier of the RPC.
- request: The request value for the RPC.
- timeout: A duration of time in seconds to allow for the RPC.
- metadata: A metadata value to be passed to the service-side of the RPC.
- protocol_options: A value specified by the provider of a Face interface
- implementation affording custom state and behavior.
-
- Returns:
- An object that is both a Call for the RPC and an iterator of response
- values. Drawing response values from the returned iterator may raise
- AbortionError indicating abortion of the RPC.
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def blocking_stream_unary(self,
- group,
- method,
- request_iterator,
- timeout,
- metadata=None,
- with_call=False,
- protocol_options=None):
- """Invokes a stream-request-unary-response method.
-
- This method blocks until either returning the response value of the RPC
- (in the event of RPC completion) or raising an exception (in the event of
- RPC abortion).
-
- Args:
- group: The group identifier of the RPC.
- method: The method identifier of the RPC.
- request_iterator: An iterator that yields request values for the RPC.
- timeout: A duration of time in seconds to allow for the RPC.
- metadata: A metadata value to be passed to the service-side of the RPC.
- with_call: Whether or not to include return a Call for the RPC in addition
- to the response.
- protocol_options: A value specified by the provider of a Face interface
- implementation affording custom state and behavior.
-
- Returns:
- The response value for the RPC, and a Call for the RPC if with_call was
- set to True at invocation.
-
- Raises:
- AbortionError: Indicating that the RPC was aborted.
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def future_stream_unary(self,
- group,
- method,
- request_iterator,
- timeout,
- metadata=None,
- protocol_options=None):
- """Invokes a stream-request-unary-response method.
-
- Args:
- group: The group identifier of the RPC.
- method: The method identifier of the RPC.
- request_iterator: An iterator that yields request values for the RPC.
- timeout: A duration of time in seconds to allow for the RPC.
- metadata: A metadata value to be passed to the service-side of the RPC.
- protocol_options: A value specified by the provider of a Face interface
- implementation affording custom state and behavior.
-
- Returns:
- An object that is both a Call for the RPC and a future.Future. In the
- event of RPC completion, the return Future's result value will be the
- response value of the RPC. In the event of RPC abortion, the returned
- Future's exception value will be an AbortionError.
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def inline_stream_stream(self,
- group,
- method,
- request_iterator,
- timeout,
- metadata=None,
- protocol_options=None):
- """Invokes a stream-request-stream-response method.
-
- Args:
- group: The group identifier of the RPC.
- method: The method identifier of the RPC.
- request_iterator: An iterator that yields request values for the RPC.
- timeout: A duration of time in seconds to allow for the RPC.
- metadata: A metadata value to be passed to the service-side of the RPC.
- protocol_options: A value specified by the provider of a Face interface
- implementation affording custom state and behavior.
-
- Returns:
- An object that is both a Call for the RPC and an iterator of response
- values. Drawing response values from the returned iterator may raise
- AbortionError indicating abortion of the RPC.
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def event_unary_unary(self,
- group,
- method,
- request,
- receiver,
- abortion_callback,
- timeout,
- metadata=None,
- protocol_options=None):
- """Event-driven invocation of a unary-request-unary-response method.
-
- Args:
- group: The group identifier of the RPC.
- method: The method identifier of the RPC.
- request: The request value for the RPC.
- receiver: A ResponseReceiver to be passed the response data of the RPC.
- abortion_callback: A callback to be called and passed an Abortion value
- in the event of RPC abortion.
- timeout: A duration of time in seconds to allow for the RPC.
- metadata: A metadata value to be passed to the service-side of the RPC.
- protocol_options: A value specified by the provider of a Face interface
- implementation affording custom state and behavior.
-
- Returns:
- A Call for the RPC.
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def event_unary_stream(self,
- group,
- method,
- request,
- receiver,
- abortion_callback,
- timeout,
- metadata=None,
- protocol_options=None):
- """Event-driven invocation of a unary-request-stream-response method.
-
- Args:
- group: The group identifier of the RPC.
- method: The method identifier of the RPC.
- request: The request value for the RPC.
- receiver: A ResponseReceiver to be passed the response data of the RPC.
- abortion_callback: A callback to be called and passed an Abortion value
- in the event of RPC abortion.
- timeout: A duration of time in seconds to allow for the RPC.
- metadata: A metadata value to be passed to the service-side of the RPC.
- protocol_options: A value specified by the provider of a Face interface
- implementation affording custom state and behavior.
-
- Returns:
- A Call for the RPC.
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def event_stream_unary(self,
- group,
- method,
- receiver,
- abortion_callback,
- timeout,
- metadata=None,
- protocol_options=None):
- """Event-driven invocation of a unary-request-unary-response method.
-
- Args:
- group: The group identifier of the RPC.
- method: The method identifier of the RPC.
- receiver: A ResponseReceiver to be passed the response data of the RPC.
- abortion_callback: A callback to be called and passed an Abortion value
- in the event of RPC abortion.
- timeout: A duration of time in seconds to allow for the RPC.
- metadata: A metadata value to be passed to the service-side of the RPC.
- protocol_options: A value specified by the provider of a Face interface
- implementation affording custom state and behavior.
-
- Returns:
- A pair of a Call object for the RPC and a stream.Consumer to which the
- request values of the RPC should be passed.
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def event_stream_stream(self,
- group,
- method,
- receiver,
- abortion_callback,
- timeout,
- metadata=None,
- protocol_options=None):
- """Event-driven invocation of a unary-request-stream-response method.
-
- Args:
- group: The group identifier of the RPC.
- method: The method identifier of the RPC.
- receiver: A ResponseReceiver to be passed the response data of the RPC.
- abortion_callback: A callback to be called and passed an Abortion value
- in the event of RPC abortion.
- timeout: A duration of time in seconds to allow for the RPC.
- metadata: A metadata value to be passed to the service-side of the RPC.
- protocol_options: A value specified by the provider of a Face interface
- implementation affording custom state and behavior.
-
- Returns:
- A pair of a Call object for the RPC and a stream.Consumer to which the
- request values of the RPC should be passed.
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def unary_unary(self, group, method):
- """Creates a UnaryUnaryMultiCallable for a unary-unary method.
-
- Args:
- group: The group identifier of the RPC.
- method: The method identifier of the RPC.
-
- Returns:
- A UnaryUnaryMultiCallable value for the named unary-unary method.
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def unary_stream(self, group, method):
- """Creates a UnaryStreamMultiCallable for a unary-stream method.
-
- Args:
- group: The group identifier of the RPC.
- method: The method identifier of the RPC.
-
- Returns:
- A UnaryStreamMultiCallable value for the name unary-stream method.
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def stream_unary(self, group, method):
- """Creates a StreamUnaryMultiCallable for a stream-unary method.
-
- Args:
- group: The group identifier of the RPC.
- method: The method identifier of the RPC.
-
- Returns:
- A StreamUnaryMultiCallable value for the named stream-unary method.
- """
- raise NotImplementedError()
-
- @abc.abstractmethod
- def stream_stream(self, group, method):
- """Creates a StreamStreamMultiCallable for a stream-stream method.
-
- Args:
- group: The group identifier of the RPC.
- method: The method identifier of the RPC.
-
- Returns:
- A StreamStreamMultiCallable value for the named stream-stream method.
- """
- raise NotImplementedError()
-
-
-class DynamicStub(six.with_metaclass(abc.ABCMeta)):
- """Affords RPC invocation via attributes corresponding to afforded methods.
-
- Instances of this type may be scoped to a single group so that attribute
- access is unambiguous.
-
- Instances of this type respond to attribute access as follows: if the
- requested attribute is the name of a unary-unary method, the value of the
- attribute will be a UnaryUnaryMultiCallable with which to invoke an RPC; if
- the requested attribute is the name of a unary-stream method, the value of the
- attribute will be a UnaryStreamMultiCallable with which to invoke an RPC; if
- the requested attribute is the name of a stream-unary method, the value of the
- attribute will be a StreamUnaryMultiCallable with which to invoke an RPC; and
- if the requested attribute is the name of a stream-stream method, the value of
- the attribute will be a StreamStreamMultiCallable with which to invoke an RPC.
- """
+ """A value describing RPC abortion.
+
+ Attributes:
+ kind: A Kind value identifying how the RPC failed.
+ initial_metadata: The initial metadata from the other side of the RPC or
+ None if no initial metadata value was received.
+ terminal_metadata: The terminal metadata from the other side of the RPC or
+ None if no terminal metadata value was received.
+ code: The code value from the other side of the RPC or None if no code value
+ was received.
+ details: The details value from the other side of the RPC or None if no
+ details value was received.
+ """
+
+ @enum.unique
+ class Kind(enum.Enum):
+ """Types of RPC abortion."""
+
+ CANCELLED = 'cancelled'
+ EXPIRED = 'expired'
+ LOCAL_SHUTDOWN = 'local shutdown'
+ REMOTE_SHUTDOWN = 'remote shutdown'
+ NETWORK_FAILURE = 'network failure'
+ LOCAL_FAILURE = 'local failure'
+ REMOTE_FAILURE = 'remote failure'
+
+
+class AbortionError(six.with_metaclass(abc.ABCMeta, Exception)):
+ """Common super type for exceptions indicating RPC abortion.
+
+ initial_metadata: The initial metadata from the other side of the RPC or
+ None if no initial metadata value was received.
+ terminal_metadata: The terminal metadata from the other side of the RPC or
+ None if no terminal metadata value was received.
+ code: The code value from the other side of the RPC or None if no code value
+ was received.
+ details: The details value from the other side of the RPC or None if no
+ details value was received.
+ """
+
+ def __init__(self, initial_metadata, terminal_metadata, code, details):
+ super(AbortionError, self).__init__()
+ self.initial_metadata = initial_metadata
+ self.terminal_metadata = terminal_metadata
+ self.code = code
+ self.details = details
+
+ def __str__(self):
+ return '%s(code=%s, details="%s")' % (self.__class__.__name__,
+ self.code, self.details)
+
+
+class CancellationError(AbortionError):
+ """Indicates that an RPC has been cancelled."""
+
+
+class ExpirationError(AbortionError):
+ """Indicates that an RPC has expired ("timed out")."""
+
+
+class LocalShutdownError(AbortionError):
+ """Indicates that an RPC has terminated due to local shutdown of RPCs."""
+
+
+class RemoteShutdownError(AbortionError):
+ """Indicates that an RPC has terminated due to remote shutdown of RPCs."""
+
+
+class NetworkError(AbortionError):
+ """Indicates that some error occurred on the network."""
+
+
+class LocalError(AbortionError):
+ """Indicates that an RPC has terminated due to a local defect."""
+
+
+class RemoteError(AbortionError):
+ """Indicates that an RPC has terminated due to a remote defect."""
+
+
+class RpcContext(six.with_metaclass(abc.ABCMeta)):
+ """Provides RPC-related information and action."""
+
+ @abc.abstractmethod
+ def is_active(self):
+ """Describes whether the RPC is active or has terminated."""
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def time_remaining(self):
+ """Describes the length of allowed time remaining for the RPC.
+
+ Returns:
+ A nonnegative float indicating the length of allowed time in seconds
+ remaining for the RPC to complete before it is considered to have timed
+ out.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def add_abortion_callback(self, abortion_callback):
+ """Registers a callback to be called if the RPC is aborted.
+
+ Args:
+ abortion_callback: A callable to be called and passed an Abortion value
+ in the event of RPC abortion.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def cancel(self):
+ """Cancels the RPC.
+
+ Idempotent and has no effect if the RPC has already terminated.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def protocol_context(self):
+ """Accesses a custom object specified by an implementation provider.
+
+ Returns:
+ A value specified by the provider of a Face interface implementation
+ affording custom state and behavior.
+ """
+ raise NotImplementedError()
+
+
+class Call(six.with_metaclass(abc.ABCMeta, RpcContext)):
+ """Invocation-side utility object for an RPC."""
+
+ @abc.abstractmethod
+ def initial_metadata(self):
+ """Accesses the initial metadata from the service-side of the RPC.
+
+ This method blocks until the value is available or is known not to have been
+ emitted from the service-side of the RPC.
+
+ Returns:
+ The initial metadata object emitted by the service-side of the RPC, or
+ None if there was no such value.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def terminal_metadata(self):
+ """Accesses the terminal metadata from the service-side of the RPC.
+
+ This method blocks until the value is available or is known not to have been
+ emitted from the service-side of the RPC.
+
+ Returns:
+ The terminal metadata object emitted by the service-side of the RPC, or
+ None if there was no such value.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def code(self):
+ """Accesses the code emitted by the service-side of the RPC.
+
+ This method blocks until the value is available or is known not to have been
+ emitted from the service-side of the RPC.
+
+ Returns:
+ The code object emitted by the service-side of the RPC, or None if there
+ was no such value.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def details(self):
+ """Accesses the details value emitted by the service-side of the RPC.
+
+ This method blocks until the value is available or is known not to have been
+ emitted from the service-side of the RPC.
+
+ Returns:
+ The details value emitted by the service-side of the RPC, or None if there
+ was no such value.
+ """
+ raise NotImplementedError()
+
+
+class ServicerContext(six.with_metaclass(abc.ABCMeta, RpcContext)):
+ """A context object passed to method implementations."""
+
+ @abc.abstractmethod
+ def invocation_metadata(self):
+ """Accesses the metadata from the invocation-side of the RPC.
+
+ This method blocks until the value is available or is known not to have been
+ emitted from the invocation-side of the RPC.
+
+ Returns:
+ The metadata object emitted by the invocation-side of the RPC, or None if
+ there was no such value.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def initial_metadata(self, initial_metadata):
+ """Accepts the service-side initial metadata value of the RPC.
+
+ This method need not be called by method implementations if they have no
+ service-side initial metadata to transmit.
+
+ Args:
+ initial_metadata: The service-side initial metadata value of the RPC to
+ be transmitted to the invocation side of the RPC.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def terminal_metadata(self, terminal_metadata):
+ """Accepts the service-side terminal metadata value of the RPC.
+
+ This method need not be called by method implementations if they have no
+ service-side terminal metadata to transmit.
+
+ Args:
+ terminal_metadata: The service-side terminal metadata value of the RPC to
+ be transmitted to the invocation side of the RPC.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def code(self, code):
+ """Accepts the service-side code of the RPC.
+
+ This method need not be called by method implementations if they have no
+ code to transmit.
+
+ Args:
+ code: The code of the RPC to be transmitted to the invocation side of the
+ RPC.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def details(self, details):
+ """Accepts the service-side details of the RPC.
+
+ This method need not be called by method implementations if they have no
+ service-side details to transmit.
+
+ Args:
+ details: The service-side details value of the RPC to be transmitted to
+ the invocation side of the RPC.
+ """
+ raise NotImplementedError()
+
+
+class ResponseReceiver(six.with_metaclass(abc.ABCMeta)):
+ """Invocation-side object used to accept the output of an RPC."""
+
+ @abc.abstractmethod
+ def initial_metadata(self, initial_metadata):
+ """Receives the initial metadata from the service-side of the RPC.
+
+ Args:
+ initial_metadata: The initial metadata object emitted from the
+ service-side of the RPC.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def response(self, response):
+ """Receives a response from the service-side of the RPC.
+
+ Args:
+ response: A response object emitted from the service-side of the RPC.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def complete(self, terminal_metadata, code, details):
+ """Receives the completion values emitted from the service-side of the RPC.
+
+ Args:
+ terminal_metadata: The terminal metadata object emitted from the
+ service-side of the RPC.
+ code: The code object emitted from the service-side of the RPC.
+ details: The details object emitted from the service-side of the RPC.
+ """
+ raise NotImplementedError()
+
+
+class UnaryUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)):
+ """Affords invoking a unary-unary RPC in any call style."""
+
+ @abc.abstractmethod
+ def __call__(self,
+ request,
+ timeout,
+ metadata=None,
+ with_call=False,
+ protocol_options=None):
+ """Synchronously invokes the underlying RPC.
+
+ Args:
+ request: The request value for the RPC.
+ timeout: A duration of time in seconds to allow for the RPC.
+ metadata: A metadata value to be passed to the service-side of
+ the RPC.
+ with_call: Whether or not to include return a Call for the RPC in addition
+ to the response.
+ protocol_options: A value specified by the provider of a Face interface
+ implementation affording custom state and behavior.
+
+ Returns:
+ The response value for the RPC, and a Call for the RPC if with_call was
+ set to True at invocation.
+
+ Raises:
+ AbortionError: Indicating that the RPC was aborted.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def future(self, request, timeout, metadata=None, protocol_options=None):
+ """Asynchronously invokes the underlying RPC.
+
+ Args:
+ request: The request value for the RPC.
+ timeout: A duration of time in seconds to allow for the RPC.
+ metadata: A metadata value to be passed to the service-side of
+ the RPC.
+ protocol_options: A value specified by the provider of a Face interface
+ implementation affording custom state and behavior.
+
+ Returns:
+ An object that is both a Call for the RPC and a future.Future. In the
+ event of RPC completion, the return Future's result value will be the
+ response value of the RPC. In the event of RPC abortion, the returned
+ Future's exception value will be an AbortionError.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def event(self,
+ request,
+ receiver,
+ abortion_callback,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ """Asynchronously invokes the underlying RPC.
+
+ Args:
+ request: The request value for the RPC.
+ receiver: A ResponseReceiver to be passed the response data of the RPC.
+ abortion_callback: A callback to be called and passed an Abortion value
+ in the event of RPC abortion.
+ timeout: A duration of time in seconds to allow for the RPC.
+ metadata: A metadata value to be passed to the service-side of
+ the RPC.
+ protocol_options: A value specified by the provider of a Face interface
+ implementation affording custom state and behavior.
+
+ Returns:
+ A Call for the RPC.
+ """
+ raise NotImplementedError()
+
+
+class UnaryStreamMultiCallable(six.with_metaclass(abc.ABCMeta)):
+ """Affords invoking a unary-stream RPC in any call style."""
+
+ @abc.abstractmethod
+ def __call__(self, request, timeout, metadata=None, protocol_options=None):
+ """Invokes the underlying RPC.
+
+ Args:
+ request: The request value for the RPC.
+ timeout: A duration of time in seconds to allow for the RPC.
+ metadata: A metadata value to be passed to the service-side of
+ the RPC.
+ protocol_options: A value specified by the provider of a Face interface
+ implementation affording custom state and behavior.
+
+ Returns:
+ An object that is both a Call for the RPC and an iterator of response
+ values. Drawing response values from the returned iterator may raise
+ AbortionError indicating abortion of the RPC.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def event(self,
+ request,
+ receiver,
+ abortion_callback,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ """Asynchronously invokes the underlying RPC.
+
+ Args:
+ request: The request value for the RPC.
+ receiver: A ResponseReceiver to be passed the response data of the RPC.
+ abortion_callback: A callback to be called and passed an Abortion value
+ in the event of RPC abortion.
+ timeout: A duration of time in seconds to allow for the RPC.
+ metadata: A metadata value to be passed to the service-side of
+ the RPC.
+ protocol_options: A value specified by the provider of a Face interface
+ implementation affording custom state and behavior.
+
+ Returns:
+ A Call object for the RPC.
+ """
+ raise NotImplementedError()
+
+
+class StreamUnaryMultiCallable(six.with_metaclass(abc.ABCMeta)):
+ """Affords invoking a stream-unary RPC in any call style."""
+
+ @abc.abstractmethod
+ def __call__(self,
+ request_iterator,
+ timeout,
+ metadata=None,
+ with_call=False,
+ protocol_options=None):
+ """Synchronously invokes the underlying RPC.
+
+ Args:
+ request_iterator: An iterator that yields request values for the RPC.
+ timeout: A duration of time in seconds to allow for the RPC.
+ metadata: A metadata value to be passed to the service-side of
+ the RPC.
+ with_call: Whether or not to include return a Call for the RPC in addition
+ to the response.
+ protocol_options: A value specified by the provider of a Face interface
+ implementation affording custom state and behavior.
+
+ Returns:
+ The response value for the RPC, and a Call for the RPC if with_call was
+ set to True at invocation.
+
+ Raises:
+ AbortionError: Indicating that the RPC was aborted.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def future(self,
+ request_iterator,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ """Asynchronously invokes the underlying RPC.
+
+ Args:
+ request_iterator: An iterator that yields request values for the RPC.
+ timeout: A duration of time in seconds to allow for the RPC.
+ metadata: A metadata value to be passed to the service-side of
+ the RPC.
+ protocol_options: A value specified by the provider of a Face interface
+ implementation affording custom state and behavior.
+
+ Returns:
+ An object that is both a Call for the RPC and a future.Future. In the
+ event of RPC completion, the return Future's result value will be the
+ response value of the RPC. In the event of RPC abortion, the returned
+ Future's exception value will be an AbortionError.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def event(self,
+ receiver,
+ abortion_callback,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ """Asynchronously invokes the underlying RPC.
+
+ Args:
+ receiver: A ResponseReceiver to be passed the response data of the RPC.
+ abortion_callback: A callback to be called and passed an Abortion value
+ in the event of RPC abortion.
+ timeout: A duration of time in seconds to allow for the RPC.
+ metadata: A metadata value to be passed to the service-side of
+ the RPC.
+ protocol_options: A value specified by the provider of a Face interface
+ implementation affording custom state and behavior.
+
+ Returns:
+ A single object that is both a Call object for the RPC and a
+ stream.Consumer to which the request values of the RPC should be passed.
+ """
+ raise NotImplementedError()
+
+
+class StreamStreamMultiCallable(six.with_metaclass(abc.ABCMeta)):
+ """Affords invoking a stream-stream RPC in any call style."""
+
+ @abc.abstractmethod
+ def __call__(self,
+ request_iterator,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ """Invokes the underlying RPC.
+
+ Args:
+ request_iterator: An iterator that yields request values for the RPC.
+ timeout: A duration of time in seconds to allow for the RPC.
+ metadata: A metadata value to be passed to the service-side of
+ the RPC.
+ protocol_options: A value specified by the provider of a Face interface
+ implementation affording custom state and behavior.
+
+ Returns:
+ An object that is both a Call for the RPC and an iterator of response
+ values. Drawing response values from the returned iterator may raise
+ AbortionError indicating abortion of the RPC.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def event(self,
+ receiver,
+ abortion_callback,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ """Asynchronously invokes the underlying RPC.
+
+ Args:
+ receiver: A ResponseReceiver to be passed the response data of the RPC.
+ abortion_callback: A callback to be called and passed an Abortion value
+ in the event of RPC abortion.
+ timeout: A duration of time in seconds to allow for the RPC.
+ metadata: A metadata value to be passed to the service-side of
+ the RPC.
+ protocol_options: A value specified by the provider of a Face interface
+ implementation affording custom state and behavior.
+
+ Returns:
+ A single object that is both a Call object for the RPC and a
+ stream.Consumer to which the request values of the RPC should be passed.
+ """
+ raise NotImplementedError()
+
+
+class MethodImplementation(six.with_metaclass(abc.ABCMeta)):
+ """A sum type that describes a method implementation.
+
+ Attributes:
+ cardinality: A cardinality.Cardinality value.
+ style: A style.Service value.
+ unary_unary_inline: The implementation of the method as a callable value
+ that takes a request value and a ServicerContext object and returns a
+ response value. Only non-None if cardinality is
+ cardinality.Cardinality.UNARY_UNARY and style is style.Service.INLINE.
+ unary_stream_inline: The implementation of the method as a callable value
+ that takes a request value and a ServicerContext object and returns an
+ iterator of response values. Only non-None if cardinality is
+ cardinality.Cardinality.UNARY_STREAM and style is style.Service.INLINE.
+ stream_unary_inline: The implementation of the method as a callable value
+ that takes an iterator of request values and a ServicerContext object and
+ returns a response value. Only non-None if cardinality is
+ cardinality.Cardinality.STREAM_UNARY and style is style.Service.INLINE.
+ stream_stream_inline: The implementation of the method as a callable value
+ that takes an iterator of request values and a ServicerContext object and
+ returns an iterator of response values. Only non-None if cardinality is
+ cardinality.Cardinality.STREAM_STREAM and style is style.Service.INLINE.
+ unary_unary_event: The implementation of the method as a callable value that
+ takes a request value, a response callback to which to pass the response
+ value of the RPC, and a ServicerContext. Only non-None if cardinality is
+ cardinality.Cardinality.UNARY_UNARY and style is style.Service.EVENT.
+ unary_stream_event: The implementation of the method as a callable value
+ that takes a request value, a stream.Consumer to which to pass the
+ response values of the RPC, and a ServicerContext. Only non-None if
+ cardinality is cardinality.Cardinality.UNARY_STREAM and style is
+ style.Service.EVENT.
+ stream_unary_event: The implementation of the method as a callable value
+ that takes a response callback to which to pass the response value of the
+ RPC and a ServicerContext and returns a stream.Consumer to which the
+ request values of the RPC should be passed. Only non-None if cardinality
+ is cardinality.Cardinality.STREAM_UNARY and style is style.Service.EVENT.
+ stream_stream_event: The implementation of the method as a callable value
+ that takes a stream.Consumer to which to pass the response values of the
+ RPC and a ServicerContext and returns a stream.Consumer to which the
+ request values of the RPC should be passed. Only non-None if cardinality
+ is cardinality.Cardinality.STREAM_STREAM and style is
+ style.Service.EVENT.
+ """
+
+
+class MultiMethodImplementation(six.with_metaclass(abc.ABCMeta)):
+ """A general type able to service many methods."""
+
+ @abc.abstractmethod
+ def service(self, group, method, response_consumer, context):
+ """Services an RPC.
+
+ Args:
+ group: The group identifier of the RPC.
+ method: The method identifier of the RPC.
+ response_consumer: A stream.Consumer to be called to accept the response
+ values of the RPC.
+ context: a ServicerContext object.
+
+ Returns:
+ A stream.Consumer with which to accept the request values of the RPC. The
+ consumer returned from this method may or may not be invoked to
+ completion: in the case of RPC abortion, RPC Framework will simply stop
+ passing values to this object. Implementations must not assume that this
+ object will be called to completion of the request stream or even called
+ at all.
+
+ Raises:
+ abandonment.Abandoned: May or may not be raised when the RPC has been
+ aborted.
+ NoSuchMethodError: If this MultiMethod does not recognize the given group
+ and name for the RPC and is not able to service the RPC.
+ """
+ raise NotImplementedError()
+
+
+class GenericStub(six.with_metaclass(abc.ABCMeta)):
+ """Affords RPC invocation via generic methods."""
+
+ @abc.abstractmethod
+ def blocking_unary_unary(self,
+ group,
+ method,
+ request,
+ timeout,
+ metadata=None,
+ with_call=False,
+ protocol_options=None):
+ """Invokes a unary-request-unary-response method.
+
+ This method blocks until either returning the response value of the RPC
+ (in the event of RPC completion) or raising an exception (in the event of
+ RPC abortion).
+
+ Args:
+ group: The group identifier of the RPC.
+ method: The method identifier of the RPC.
+ request: The request value for the RPC.
+ timeout: A duration of time in seconds to allow for the RPC.
+ metadata: A metadata value to be passed to the service-side of the RPC.
+ with_call: Whether or not to include return a Call for the RPC in addition
+ to the response.
+ protocol_options: A value specified by the provider of a Face interface
+ implementation affording custom state and behavior.
+
+ Returns:
+ The response value for the RPC, and a Call for the RPC if with_call was
+ set to True at invocation.
+
+ Raises:
+ AbortionError: Indicating that the RPC was aborted.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def future_unary_unary(self,
+ group,
+ method,
+ request,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ """Invokes a unary-request-unary-response method.
+
+ Args:
+ group: The group identifier of the RPC.
+ method: The method identifier of the RPC.
+ request: The request value for the RPC.
+ timeout: A duration of time in seconds to allow for the RPC.
+ metadata: A metadata value to be passed to the service-side of the RPC.
+ protocol_options: A value specified by the provider of a Face interface
+ implementation affording custom state and behavior.
+
+ Returns:
+ An object that is both a Call for the RPC and a future.Future. In the
+ event of RPC completion, the return Future's result value will be the
+ response value of the RPC. In the event of RPC abortion, the returned
+ Future's exception value will be an AbortionError.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def inline_unary_stream(self,
+ group,
+ method,
+ request,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ """Invokes a unary-request-stream-response method.
+
+ Args:
+ group: The group identifier of the RPC.
+ method: The method identifier of the RPC.
+ request: The request value for the RPC.
+ timeout: A duration of time in seconds to allow for the RPC.
+ metadata: A metadata value to be passed to the service-side of the RPC.
+ protocol_options: A value specified by the provider of a Face interface
+ implementation affording custom state and behavior.
+
+ Returns:
+ An object that is both a Call for the RPC and an iterator of response
+ values. Drawing response values from the returned iterator may raise
+ AbortionError indicating abortion of the RPC.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def blocking_stream_unary(self,
+ group,
+ method,
+ request_iterator,
+ timeout,
+ metadata=None,
+ with_call=False,
+ protocol_options=None):
+ """Invokes a stream-request-unary-response method.
+
+ This method blocks until either returning the response value of the RPC
+ (in the event of RPC completion) or raising an exception (in the event of
+ RPC abortion).
+
+ Args:
+ group: The group identifier of the RPC.
+ method: The method identifier of the RPC.
+ request_iterator: An iterator that yields request values for the RPC.
+ timeout: A duration of time in seconds to allow for the RPC.
+ metadata: A metadata value to be passed to the service-side of the RPC.
+ with_call: Whether or not to include return a Call for the RPC in addition
+ to the response.
+ protocol_options: A value specified by the provider of a Face interface
+ implementation affording custom state and behavior.
+
+ Returns:
+ The response value for the RPC, and a Call for the RPC if with_call was
+ set to True at invocation.
+
+ Raises:
+ AbortionError: Indicating that the RPC was aborted.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def future_stream_unary(self,
+ group,
+ method,
+ request_iterator,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ """Invokes a stream-request-unary-response method.
+
+ Args:
+ group: The group identifier of the RPC.
+ method: The method identifier of the RPC.
+ request_iterator: An iterator that yields request values for the RPC.
+ timeout: A duration of time in seconds to allow for the RPC.
+ metadata: A metadata value to be passed to the service-side of the RPC.
+ protocol_options: A value specified by the provider of a Face interface
+ implementation affording custom state and behavior.
+
+ Returns:
+ An object that is both a Call for the RPC and a future.Future. In the
+ event of RPC completion, the return Future's result value will be the
+ response value of the RPC. In the event of RPC abortion, the returned
+ Future's exception value will be an AbortionError.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def inline_stream_stream(self,
+ group,
+ method,
+ request_iterator,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ """Invokes a stream-request-stream-response method.
+
+ Args:
+ group: The group identifier of the RPC.
+ method: The method identifier of the RPC.
+ request_iterator: An iterator that yields request values for the RPC.
+ timeout: A duration of time in seconds to allow for the RPC.
+ metadata: A metadata value to be passed to the service-side of the RPC.
+ protocol_options: A value specified by the provider of a Face interface
+ implementation affording custom state and behavior.
+
+ Returns:
+ An object that is both a Call for the RPC and an iterator of response
+ values. Drawing response values from the returned iterator may raise
+ AbortionError indicating abortion of the RPC.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def event_unary_unary(self,
+ group,
+ method,
+ request,
+ receiver,
+ abortion_callback,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ """Event-driven invocation of a unary-request-unary-response method.
+
+ Args:
+ group: The group identifier of the RPC.
+ method: The method identifier of the RPC.
+ request: The request value for the RPC.
+ receiver: A ResponseReceiver to be passed the response data of the RPC.
+ abortion_callback: A callback to be called and passed an Abortion value
+ in the event of RPC abortion.
+ timeout: A duration of time in seconds to allow for the RPC.
+ metadata: A metadata value to be passed to the service-side of the RPC.
+ protocol_options: A value specified by the provider of a Face interface
+ implementation affording custom state and behavior.
+
+ Returns:
+ A Call for the RPC.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def event_unary_stream(self,
+ group,
+ method,
+ request,
+ receiver,
+ abortion_callback,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ """Event-driven invocation of a unary-request-stream-response method.
+
+ Args:
+ group: The group identifier of the RPC.
+ method: The method identifier of the RPC.
+ request: The request value for the RPC.
+ receiver: A ResponseReceiver to be passed the response data of the RPC.
+ abortion_callback: A callback to be called and passed an Abortion value
+ in the event of RPC abortion.
+ timeout: A duration of time in seconds to allow for the RPC.
+ metadata: A metadata value to be passed to the service-side of the RPC.
+ protocol_options: A value specified by the provider of a Face interface
+ implementation affording custom state and behavior.
+
+ Returns:
+ A Call for the RPC.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def event_stream_unary(self,
+ group,
+ method,
+ receiver,
+ abortion_callback,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ """Event-driven invocation of a unary-request-unary-response method.
+
+ Args:
+ group: The group identifier of the RPC.
+ method: The method identifier of the RPC.
+ receiver: A ResponseReceiver to be passed the response data of the RPC.
+ abortion_callback: A callback to be called and passed an Abortion value
+ in the event of RPC abortion.
+ timeout: A duration of time in seconds to allow for the RPC.
+ metadata: A metadata value to be passed to the service-side of the RPC.
+ protocol_options: A value specified by the provider of a Face interface
+ implementation affording custom state and behavior.
+
+ Returns:
+ A pair of a Call object for the RPC and a stream.Consumer to which the
+ request values of the RPC should be passed.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def event_stream_stream(self,
+ group,
+ method,
+ receiver,
+ abortion_callback,
+ timeout,
+ metadata=None,
+ protocol_options=None):
+ """Event-driven invocation of a unary-request-stream-response method.
+
+ Args:
+ group: The group identifier of the RPC.
+ method: The method identifier of the RPC.
+ receiver: A ResponseReceiver to be passed the response data of the RPC.
+ abortion_callback: A callback to be called and passed an Abortion value
+ in the event of RPC abortion.
+ timeout: A duration of time in seconds to allow for the RPC.
+ metadata: A metadata value to be passed to the service-side of the RPC.
+ protocol_options: A value specified by the provider of a Face interface
+ implementation affording custom state and behavior.
+
+ Returns:
+ A pair of a Call object for the RPC and a stream.Consumer to which the
+ request values of the RPC should be passed.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def unary_unary(self, group, method):
+ """Creates a UnaryUnaryMultiCallable for a unary-unary method.
+
+ Args:
+ group: The group identifier of the RPC.
+ method: The method identifier of the RPC.
+
+ Returns:
+ A UnaryUnaryMultiCallable value for the named unary-unary method.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def unary_stream(self, group, method):
+ """Creates a UnaryStreamMultiCallable for a unary-stream method.
+
+ Args:
+ group: The group identifier of the RPC.
+ method: The method identifier of the RPC.
+
+ Returns:
+ A UnaryStreamMultiCallable value for the name unary-stream method.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def stream_unary(self, group, method):
+ """Creates a StreamUnaryMultiCallable for a stream-unary method.
+
+ Args:
+ group: The group identifier of the RPC.
+ method: The method identifier of the RPC.
+
+ Returns:
+ A StreamUnaryMultiCallable value for the named stream-unary method.
+ """
+ raise NotImplementedError()
+
+ @abc.abstractmethod
+ def stream_stream(self, group, method):
+ """Creates a StreamStreamMultiCallable for a stream-stream method.
+
+ Args:
+ group: The group identifier of the RPC.
+ method: The method identifier of the RPC.
+
+ Returns:
+ A StreamStreamMultiCallable value for the named stream-stream method.
+ """
+ raise NotImplementedError()
+
+
+class DynamicStub(six.with_metaclass(abc.ABCMeta)):
+ """Affords RPC invocation via attributes corresponding to afforded methods.
+
+ Instances of this type may be scoped to a single group so that attribute
+ access is unambiguous.
+
+ Instances of this type respond to attribute access as follows: if the
+ requested attribute is the name of a unary-unary method, the value of the
+ attribute will be a UnaryUnaryMultiCallable with which to invoke an RPC; if
+ the requested attribute is the name of a unary-stream method, the value of the
+ attribute will be a UnaryStreamMultiCallable with which to invoke an RPC; if
+ the requested attribute is the name of a stream-unary method, the value of the
+ attribute will be a StreamUnaryMultiCallable with which to invoke an RPC; and
+ if the requested attribute is the name of a stream-stream method, the value of
+ the attribute will be a StreamStreamMultiCallable with which to invoke an RPC.
+ """
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc/framework/interfaces/face/utilities.py b/contrib/libs/grpc/src/python/grpcio/grpc/framework/interfaces/face/utilities.py
index f27bd67615..ca018850fe 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc/framework/interfaces/face/utilities.py
+++ b/contrib/libs/grpc/src/python/grpcio/grpc/framework/interfaces/face/utilities.py
@@ -1,168 +1,168 @@
# Copyright 2015 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-"""Utilities for RPC Framework's Face interface."""
-
-import collections
-
-# stream is referenced from specification in this module.
-from grpc.framework.common import cardinality
-from grpc.framework.common import style
-from grpc.framework.foundation import stream # pylint: disable=unused-import
-from grpc.framework.interfaces.face import face
-
-
-class _MethodImplementation(face.MethodImplementation,
- collections.namedtuple('_MethodImplementation', [
- 'cardinality',
- 'style',
- 'unary_unary_inline',
- 'unary_stream_inline',
- 'stream_unary_inline',
- 'stream_stream_inline',
- 'unary_unary_event',
- 'unary_stream_event',
- 'stream_unary_event',
- 'stream_stream_event',
- ])):
- pass
-
-
-def unary_unary_inline(behavior):
- """Creates an face.MethodImplementation for the given behavior.
-
- Args:
- behavior: The implementation of a unary-unary RPC method as a callable value
- that takes a request value and an face.ServicerContext object and
- returns a response value.
-
- Returns:
- An face.MethodImplementation derived from the given behavior.
- """
- return _MethodImplementation(cardinality.Cardinality.UNARY_UNARY,
- style.Service.INLINE, behavior, None, None,
- None, None, None, None, None)
-
-
-def unary_stream_inline(behavior):
- """Creates an face.MethodImplementation for the given behavior.
-
- Args:
- behavior: The implementation of a unary-stream RPC method as a callable
- value that takes a request value and an face.ServicerContext object and
- returns an iterator of response values.
-
- Returns:
- An face.MethodImplementation derived from the given behavior.
- """
- return _MethodImplementation(cardinality.Cardinality.UNARY_STREAM,
- style.Service.INLINE, None, behavior, None,
- None, None, None, None, None)
-
-
-def stream_unary_inline(behavior):
- """Creates an face.MethodImplementation for the given behavior.
-
- Args:
- behavior: The implementation of a stream-unary RPC method as a callable
- value that takes an iterator of request values and an
- face.ServicerContext object and returns a response value.
-
- Returns:
- An face.MethodImplementation derived from the given behavior.
- """
- return _MethodImplementation(cardinality.Cardinality.STREAM_UNARY,
- style.Service.INLINE, None, None, behavior,
- None, None, None, None, None)
-
-
-def stream_stream_inline(behavior):
- """Creates an face.MethodImplementation for the given behavior.
-
- Args:
- behavior: The implementation of a stream-stream RPC method as a callable
- value that takes an iterator of request values and an
- face.ServicerContext object and returns an iterator of response values.
-
- Returns:
- An face.MethodImplementation derived from the given behavior.
- """
- return _MethodImplementation(cardinality.Cardinality.STREAM_STREAM,
- style.Service.INLINE, None, None, None,
- behavior, None, None, None, None)
-
-
-def unary_unary_event(behavior):
- """Creates an face.MethodImplementation for the given behavior.
-
- Args:
- behavior: The implementation of a unary-unary RPC method as a callable
- value that takes a request value, a response callback to which to pass
- the response value of the RPC, and an face.ServicerContext.
-
- Returns:
- An face.MethodImplementation derived from the given behavior.
- """
- return _MethodImplementation(cardinality.Cardinality.UNARY_UNARY,
- style.Service.EVENT, None, None, None, None,
- behavior, None, None, None)
-
-
-def unary_stream_event(behavior):
- """Creates an face.MethodImplementation for the given behavior.
-
- Args:
- behavior: The implementation of a unary-stream RPC method as a callable
- value that takes a request value, a stream.Consumer to which to pass the
- the response values of the RPC, and an face.ServicerContext.
-
- Returns:
- An face.MethodImplementation derived from the given behavior.
- """
- return _MethodImplementation(cardinality.Cardinality.UNARY_STREAM,
- style.Service.EVENT, None, None, None, None,
- None, behavior, None, None)
-
-
-def stream_unary_event(behavior):
- """Creates an face.MethodImplementation for the given behavior.
-
- Args:
- behavior: The implementation of a stream-unary RPC method as a callable
- value that takes a response callback to which to pass the response value
- of the RPC and an face.ServicerContext and returns a stream.Consumer to
- which the request values of the RPC should be passed.
-
- Returns:
- An face.MethodImplementation derived from the given behavior.
- """
- return _MethodImplementation(cardinality.Cardinality.STREAM_UNARY,
- style.Service.EVENT, None, None, None, None,
- None, None, behavior, None)
-
-
-def stream_stream_event(behavior):
- """Creates an face.MethodImplementation for the given behavior.
-
- Args:
- behavior: The implementation of a stream-stream RPC method as a callable
- value that takes a stream.Consumer to which to pass the response values
- of the RPC and an face.ServicerContext and returns a stream.Consumer to
- which the request values of the RPC should be passed.
-
- Returns:
- An face.MethodImplementation derived from the given behavior.
- """
- return _MethodImplementation(cardinality.Cardinality.STREAM_STREAM,
- style.Service.EVENT, None, None, None, None,
- None, None, None, behavior)
+"""Utilities for RPC Framework's Face interface."""
+
+import collections
+
+# stream is referenced from specification in this module.
+from grpc.framework.common import cardinality
+from grpc.framework.common import style
+from grpc.framework.foundation import stream # pylint: disable=unused-import
+from grpc.framework.interfaces.face import face
+
+
+class _MethodImplementation(face.MethodImplementation,
+ collections.namedtuple('_MethodImplementation', [
+ 'cardinality',
+ 'style',
+ 'unary_unary_inline',
+ 'unary_stream_inline',
+ 'stream_unary_inline',
+ 'stream_stream_inline',
+ 'unary_unary_event',
+ 'unary_stream_event',
+ 'stream_unary_event',
+ 'stream_stream_event',
+ ])):
+ pass
+
+
+def unary_unary_inline(behavior):
+ """Creates an face.MethodImplementation for the given behavior.
+
+ Args:
+ behavior: The implementation of a unary-unary RPC method as a callable value
+ that takes a request value and an face.ServicerContext object and
+ returns a response value.
+
+ Returns:
+ An face.MethodImplementation derived from the given behavior.
+ """
+ return _MethodImplementation(cardinality.Cardinality.UNARY_UNARY,
+ style.Service.INLINE, behavior, None, None,
+ None, None, None, None, None)
+
+
+def unary_stream_inline(behavior):
+ """Creates an face.MethodImplementation for the given behavior.
+
+ Args:
+ behavior: The implementation of a unary-stream RPC method as a callable
+ value that takes a request value and an face.ServicerContext object and
+ returns an iterator of response values.
+
+ Returns:
+ An face.MethodImplementation derived from the given behavior.
+ """
+ return _MethodImplementation(cardinality.Cardinality.UNARY_STREAM,
+ style.Service.INLINE, None, behavior, None,
+ None, None, None, None, None)
+
+
+def stream_unary_inline(behavior):
+ """Creates an face.MethodImplementation for the given behavior.
+
+ Args:
+ behavior: The implementation of a stream-unary RPC method as a callable
+ value that takes an iterator of request values and an
+ face.ServicerContext object and returns a response value.
+
+ Returns:
+ An face.MethodImplementation derived from the given behavior.
+ """
+ return _MethodImplementation(cardinality.Cardinality.STREAM_UNARY,
+ style.Service.INLINE, None, None, behavior,
+ None, None, None, None, None)
+
+
+def stream_stream_inline(behavior):
+ """Creates an face.MethodImplementation for the given behavior.
+
+ Args:
+ behavior: The implementation of a stream-stream RPC method as a callable
+ value that takes an iterator of request values and an
+ face.ServicerContext object and returns an iterator of response values.
+
+ Returns:
+ An face.MethodImplementation derived from the given behavior.
+ """
+ return _MethodImplementation(cardinality.Cardinality.STREAM_STREAM,
+ style.Service.INLINE, None, None, None,
+ behavior, None, None, None, None)
+
+
+def unary_unary_event(behavior):
+ """Creates an face.MethodImplementation for the given behavior.
+
+ Args:
+ behavior: The implementation of a unary-unary RPC method as a callable
+ value that takes a request value, a response callback to which to pass
+ the response value of the RPC, and an face.ServicerContext.
+
+ Returns:
+ An face.MethodImplementation derived from the given behavior.
+ """
+ return _MethodImplementation(cardinality.Cardinality.UNARY_UNARY,
+ style.Service.EVENT, None, None, None, None,
+ behavior, None, None, None)
+
+
+def unary_stream_event(behavior):
+ """Creates an face.MethodImplementation for the given behavior.
+
+ Args:
+ behavior: The implementation of a unary-stream RPC method as a callable
+ value that takes a request value, a stream.Consumer to which to pass the
+ the response values of the RPC, and an face.ServicerContext.
+
+ Returns:
+ An face.MethodImplementation derived from the given behavior.
+ """
+ return _MethodImplementation(cardinality.Cardinality.UNARY_STREAM,
+ style.Service.EVENT, None, None, None, None,
+ None, behavior, None, None)
+
+
+def stream_unary_event(behavior):
+ """Creates an face.MethodImplementation for the given behavior.
+
+ Args:
+ behavior: The implementation of a stream-unary RPC method as a callable
+ value that takes a response callback to which to pass the response value
+ of the RPC and an face.ServicerContext and returns a stream.Consumer to
+ which the request values of the RPC should be passed.
+
+ Returns:
+ An face.MethodImplementation derived from the given behavior.
+ """
+ return _MethodImplementation(cardinality.Cardinality.STREAM_UNARY,
+ style.Service.EVENT, None, None, None, None,
+ None, None, behavior, None)
+
+
+def stream_stream_event(behavior):
+ """Creates an face.MethodImplementation for the given behavior.
+
+ Args:
+ behavior: The implementation of a stream-stream RPC method as a callable
+ value that takes a stream.Consumer to which to pass the response values
+ of the RPC and an face.ServicerContext and returns a stream.Consumer to
+ which the request values of the RPC should be passed.
+
+ Returns:
+ An face.MethodImplementation derived from the given behavior.
+ """
+ return _MethodImplementation(cardinality.Cardinality.STREAM_STREAM,
+ style.Service.EVENT, None, None, None, None,
+ None, None, None, behavior)
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc_core_dependencies.py b/contrib/libs/grpc/src/python/grpcio/grpc_core_dependencies.py
index 67f985e1f4..27bbc873bb 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc_core_dependencies.py
+++ b/contrib/libs/grpc/src/python/grpcio/grpc_core_dependencies.py
@@ -1,20 +1,20 @@
# Copyright 2015 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
-# AUTO-GENERATED FROM `$REPO_ROOT/templates/src/python/grpcio/grpc_core_dependencies.py.template`!!!
-
-CORE_SOURCE_FILES = [
+
+# AUTO-GENERATED FROM `$REPO_ROOT/templates/src/python/grpcio/grpc_core_dependencies.py.template`!!!
+
+CORE_SOURCE_FILES = [
'src/core/ext/filters/census/grpc_context.cc',
'src/core/ext/filters/client_channel/backend_metric.cc',
'src/core/ext/filters/client_channel/backup_poller.cc',
@@ -931,7 +931,7 @@ CORE_SOURCE_FILES = [
'third_party/zlib/trees.c',
'third_party/zlib/uncompr.c',
'third_party/zlib/zutil.c',
-]
+]
ASM_SOURCE_FILES = {
'crypto_ios_aarch64': [
diff --git a/contrib/libs/grpc/src/python/grpcio/grpc_version.py b/contrib/libs/grpc/src/python/grpcio/grpc_version.py
index 9237f14e2f..86deb1034e 100644
--- a/contrib/libs/grpc/src/python/grpcio/grpc_version.py
+++ b/contrib/libs/grpc/src/python/grpcio/grpc_version.py
@@ -1,17 +1,17 @@
# Copyright 2015 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
-# AUTO-GENERATED FROM `$REPO_ROOT/templates/src/python/grpcio/grpc_version.py.template`!!!
-
+
+# AUTO-GENERATED FROM `$REPO_ROOT/templates/src/python/grpcio/grpc_version.py.template`!!!
+
VERSION = '1.33.2'
diff --git a/contrib/libs/grpc/src/python/grpcio/support.py b/contrib/libs/grpc/src/python/grpcio/support.py
index 217f3cb9ed..c6dc992db0 100644
--- a/contrib/libs/grpc/src/python/grpcio/support.py
+++ b/contrib/libs/grpc/src/python/grpcio/support.py
@@ -1,118 +1,118 @@
# Copyright 2016 gRPC authors.
-#
+#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
-#
+#
# http://www.apache.org/licenses/LICENSE-2.0
-#
+#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-
-import os
-import os.path
-import shutil
-import sys
-import tempfile
-
-from distutils import errors
-
-import commands
-
-C_PYTHON_DEV = """
-#include <Python.h>
-int main(int argc, char **argv) { return 0; }
-"""
-C_PYTHON_DEV_ERROR_MESSAGE = """
-Could not find <Python.h>. This could mean the following:
+
+import os
+import os.path
+import shutil
+import sys
+import tempfile
+
+from distutils import errors
+
+import commands
+
+C_PYTHON_DEV = """
+#include <Python.h>
+int main(int argc, char **argv) { return 0; }
+"""
+C_PYTHON_DEV_ERROR_MESSAGE = """
+Could not find <Python.h>. This could mean the following:
* You're on Ubuntu and haven't run `apt-get install <PY_REPR>-dev`.
* You're on RHEL/Fedora and haven't run `yum install <PY_REPR>-devel` or
`dnf install <PY_REPR>-devel` (make sure you also have redhat-rpm-config
- installed)
- * You're on Mac OS X and the usual Python framework was somehow corrupted
- (check your environment variables or try re-installing?)
- * You're on Windows and your Python installation was somehow corrupted
- (check your environment variables or try re-installing?)
-"""
+ installed)
+ * You're on Mac OS X and the usual Python framework was somehow corrupted
+ (check your environment variables or try re-installing?)
+ * You're on Windows and your Python installation was somehow corrupted
+ (check your environment variables or try re-installing?)
+"""
if sys.version_info[0] == 2:
PYTHON_REPRESENTATION = 'python'
elif sys.version_info[0] == 3:
PYTHON_REPRESENTATION = 'python3'
else:
raise NotImplementedError('Unsupported Python version: %s' % sys.version)
-
-C_CHECKS = {
+
+C_CHECKS = {
C_PYTHON_DEV:
C_PYTHON_DEV_ERROR_MESSAGE.replace('<PY_REPR>', PYTHON_REPRESENTATION),
-}
-
-
-def _compile(compiler, source_string):
- tempdir = tempfile.mkdtemp()
- cpath = os.path.join(tempdir, 'a.c')
- with open(cpath, 'w') as cfile:
- cfile.write(source_string)
- try:
- compiler.compile([cpath])
- except errors.CompileError as error:
- return error
- finally:
- shutil.rmtree(tempdir)
-
-
-def _expect_compile(compiler, source_string, error_message):
- if _compile(compiler, source_string) is not None:
- sys.stderr.write(error_message)
- raise commands.CommandError(
+}
+
+
+def _compile(compiler, source_string):
+ tempdir = tempfile.mkdtemp()
+ cpath = os.path.join(tempdir, 'a.c')
+ with open(cpath, 'w') as cfile:
+ cfile.write(source_string)
+ try:
+ compiler.compile([cpath])
+ except errors.CompileError as error:
+ return error
+ finally:
+ shutil.rmtree(tempdir)
+
+
+def _expect_compile(compiler, source_string, error_message):
+ if _compile(compiler, source_string) is not None:
+ sys.stderr.write(error_message)
+ raise commands.CommandError(
"Diagnostics found a compilation environment issue:\n{}".format(
error_message))
-
-
-def diagnose_compile_error(build_ext, error):
- """Attempt to diagnose an error during compilation."""
- for c_check, message in C_CHECKS.items():
- _expect_compile(build_ext.compiler, c_check, message)
- python_sources = [
- source for source in build_ext.get_source_files()
- if source.startswith('./src/python') and source.endswith('c')
- ]
- for source in python_sources:
- if not os.path.isfile(source):
- raise commands.CommandError((
- "Diagnostics found a missing Python extension source file:\n{}\n\n"
- "This is usually because the Cython sources haven't been transpiled "
- "into C yet and you're building from source.\n"
- "Try setting the environment variable "
- "`GRPC_PYTHON_BUILD_WITH_CYTHON=1` when invoking `setup.py` or "
- "when using `pip`, e.g.:\n\n"
- "pip install -rrequirements.txt\n"
- "GRPC_PYTHON_BUILD_WITH_CYTHON=1 pip install .").format(source))
-
-
-def diagnose_attribute_error(build_ext, error):
- if any('_needs_stub' in arg for arg in error.args):
- raise commands.CommandError(
- "We expect a missing `_needs_stub` attribute from older versions of "
- "setuptools. Consider upgrading setuptools.")
-
-
-_ERROR_DIAGNOSES = {
- errors.CompileError: diagnose_compile_error,
+
+
+def diagnose_compile_error(build_ext, error):
+ """Attempt to diagnose an error during compilation."""
+ for c_check, message in C_CHECKS.items():
+ _expect_compile(build_ext.compiler, c_check, message)
+ python_sources = [
+ source for source in build_ext.get_source_files()
+ if source.startswith('./src/python') and source.endswith('c')
+ ]
+ for source in python_sources:
+ if not os.path.isfile(source):
+ raise commands.CommandError((
+ "Diagnostics found a missing Python extension source file:\n{}\n\n"
+ "This is usually because the Cython sources haven't been transpiled "
+ "into C yet and you're building from source.\n"
+ "Try setting the environment variable "
+ "`GRPC_PYTHON_BUILD_WITH_CYTHON=1` when invoking `setup.py` or "
+ "when using `pip`, e.g.:\n\n"
+ "pip install -rrequirements.txt\n"
+ "GRPC_PYTHON_BUILD_WITH_CYTHON=1 pip install .").format(source))
+
+
+def diagnose_attribute_error(build_ext, error):
+ if any('_needs_stub' in arg for arg in error.args):
+ raise commands.CommandError(
+ "We expect a missing `_needs_stub` attribute from older versions of "
+ "setuptools. Consider upgrading setuptools.")
+
+
+_ERROR_DIAGNOSES = {
+ errors.CompileError: diagnose_compile_error,
AttributeError: diagnose_attribute_error,
-}
-
-
-def diagnose_build_ext_error(build_ext, error, formatted):
- diagnostic = _ERROR_DIAGNOSES.get(type(error))
- if diagnostic is None:
- raise commands.CommandError(
+}
+
+
+def diagnose_build_ext_error(build_ext, error, formatted):
+ diagnostic = _ERROR_DIAGNOSES.get(type(error))
+ if diagnostic is None:
+ raise commands.CommandError(
"\n\nWe could not diagnose your build failure. If you are unable to "
"proceed, please file an issue at http://www.github.com/grpc/grpc "
"with `[Python install]` in the title; please attach the whole log "
"(including everything that may have appeared above the Python "
"backtrace).\n\n{}".format(formatted))
- else:
- diagnostic(build_ext, error)
+ else:
+ diagnostic(build_ext, error)
diff --git a/contrib/libs/grpc/src/python/grpcio/ya.make b/contrib/libs/grpc/src/python/grpcio/ya.make
index 5cdb0230c4..eec8e3dc3f 100644
--- a/contrib/libs/grpc/src/python/grpcio/ya.make
+++ b/contrib/libs/grpc/src/python/grpcio/ya.make
@@ -1,5 +1,5 @@
PY23_LIBRARY()
-
+
LICENSE(Apache-2.0)
LICENSE_TEXTS(.yandex_meta/licenses.list.txt)
@@ -9,12 +9,12 @@ OWNER(
g:contrib
g:cpp-contrib
)
-
-PEERDIR(
+
+PEERDIR(
contrib/libs/grpc/grpc
- contrib/python/six
-)
-
+ contrib/python/six
+)
+
IF (PYTHON2)
PEERDIR(
contrib/python/enum34
@@ -27,7 +27,7 @@ ADDINCL(
contrib/libs/grpc
contrib/libs/grpc/include
)
-
+
IF (SANITIZER_TYPE == undefined)
# https://github.com/grpc/grpc/blob/v1.15.1/tools/bazel.rc#L43
CXXFLAGS(-fno-sanitize=function)
@@ -36,16 +36,16 @@ ENDIF()
NO_LINT()
NO_COMPILER_WARNINGS()
-
-PY_SRCS(
- TOP_LEVEL
+
+PY_SRCS(
+ TOP_LEVEL
grpc/__init__.py
- grpc/_auth.py
- grpc/_channel.py
- grpc/_common.py
+ grpc/_auth.py
+ grpc/_channel.py
+ grpc/_common.py
grpc/_compression.py
grpc/_cython/__init__.py
- grpc/_cython/_cygrpc/__init__.py
+ grpc/_cython/_cygrpc/__init__.py
grpc/_cython/cygrpc.pyx
grpc/_grpcio_metadata.py
grpc/_interceptor.py
@@ -65,24 +65,24 @@ PY_SRCS(
grpc/experimental/session_cache.py
grpc/framework/__init__.py
grpc/framework/common/__init__.py
- grpc/framework/common/cardinality.py
- grpc/framework/common/style.py
+ grpc/framework/common/cardinality.py
+ grpc/framework/common/style.py
grpc/framework/foundation/__init__.py
- grpc/framework/foundation/abandonment.py
- grpc/framework/foundation/callable_util.py
- grpc/framework/foundation/future.py
- grpc/framework/foundation/logging_pool.py
- grpc/framework/foundation/stream.py
- grpc/framework/foundation/stream_util.py
+ grpc/framework/foundation/abandonment.py
+ grpc/framework/foundation/callable_util.py
+ grpc/framework/foundation/future.py
+ grpc/framework/foundation/logging_pool.py
+ grpc/framework/foundation/stream.py
+ grpc/framework/foundation/stream_util.py
grpc/framework/interfaces/__init__.py
grpc/framework/interfaces/base/__init__.py
- grpc/framework/interfaces/base/base.py
- grpc/framework/interfaces/base/utilities.py
+ grpc/framework/interfaces/base/base.py
+ grpc/framework/interfaces/base/utilities.py
grpc/framework/interfaces/face/__init__.py
- grpc/framework/interfaces/face/face.py
- grpc/framework/interfaces/face/utilities.py
-)
-
+ grpc/framework/interfaces/face/face.py
+ grpc/framework/interfaces/face/utilities.py
+)
+
IF (PYTHON3)
PY_SRCS(
TOP_LEVEL
@@ -102,4 +102,4 @@ IF (PYTHON3)
)
ENDIF()
-END()
+END()