aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/python
diff options
context:
space:
mode:
authoriddqd <iddqd@yandex-team.com>2024-06-11 10:12:13 +0300
committeriddqd <iddqd@yandex-team.com>2024-06-11 10:22:43 +0300
commit07f57e35443ab7f09471caf2dbf1afbcced4d9f7 (patch)
treea4a7b66ead62e83fa988a2ec2ce6576311c1f4b1 /contrib/python
parent6db3b8ca95e44179e48306a58656fb1f9317d9c3 (diff)
downloadydb-07f57e35443ab7f09471caf2dbf1afbcced4d9f7.tar.gz
add contrib/python/yandexcloud to import
03b7d3cad2237366b55b393e18d4dc5eb222798c
Diffstat (limited to 'contrib/python')
-rw-r--r--contrib/python/PyJWT/py2/.dist-info/METADATA115
-rw-r--r--contrib/python/PyJWT/py2/.dist-info/entry_points.txt3
-rw-r--r--contrib/python/PyJWT/py2/.dist-info/top_level.txt1
-rw-r--r--contrib/python/PyJWT/py2/jwt/__init__.py31
-rw-r--r--contrib/python/PyJWT/py2/jwt/__main__.py168
-rw-r--r--contrib/python/PyJWT/py2/jwt/algorithms.py403
-rw-r--r--contrib/python/PyJWT/py2/jwt/api_jws.py242
-rw-r--r--contrib/python/PyJWT/py2/jwt/api_jwt.py222
-rw-r--r--contrib/python/PyJWT/py2/jwt/compat.py68
-rw-r--r--contrib/python/PyJWT/py2/jwt/contrib/__init__.py0
-rw-r--r--contrib/python/PyJWT/py2/jwt/contrib/algorithms/__init__.py0
-rw-r--r--contrib/python/PyJWT/py2/jwt/contrib/algorithms/py_ecdsa.py60
-rw-r--r--contrib/python/PyJWT/py2/jwt/contrib/algorithms/pycrypto.py46
-rw-r--r--contrib/python/PyJWT/py2/jwt/exceptions.py59
-rw-r--r--contrib/python/PyJWT/py2/jwt/help.py61
-rw-r--r--contrib/python/PyJWT/py2/jwt/utils.py113
-rw-r--r--contrib/python/PyJWT/py2/ya.make43
-rw-r--r--contrib/python/yandexcloud/.dist-info/METADATA203
-rw-r--r--contrib/python/yandexcloud/.dist-info/top_level.txt2
-rw-r--r--contrib/python/yandexcloud/AUTHORS4
-rw-r--r--contrib/python/yandexcloud/LICENSE21
-rw-r--r--contrib/python/yandexcloud/README.md177
-rw-r--r--contrib/python/yandexcloud/ya.make46
-rw-r--r--contrib/python/yandexcloud/yandexcloud/__init__.py13
-rw-r--r--contrib/python/yandexcloud/yandexcloud/_auth_fabric.py145
-rw-r--r--contrib/python/yandexcloud/yandexcloud/_auth_plugin.py96
-rw-r--r--contrib/python/yandexcloud/yandexcloud/_backoff.py45
-rw-r--r--contrib/python/yandexcloud/yandexcloud/_channels.py114
-rw-r--r--contrib/python/yandexcloud/yandexcloud/_helpers.py69
-rw-r--r--contrib/python/yandexcloud/yandexcloud/_operation_waiter.py128
-rw-r--r--contrib/python/yandexcloud/yandexcloud/_retry_interceptor.py208
-rw-r--r--contrib/python/yandexcloud/yandexcloud/_sdk.py193
-rw-r--r--contrib/python/yandexcloud/yandexcloud/_wrappers/__init__.py15
-rw-r--r--contrib/python/yandexcloud/yandexcloud/_wrappers/dataproc/__init__.py748
-rw-r--r--contrib/python/yandexcloud/yandexcloud/auth.py34
-rw-r--r--contrib/python/yandexcloud/yandexcloud/operations.py27
-rw-r--r--contrib/python/yandexcloud/yandexcloud/py.typed0
37 files changed, 3923 insertions, 0 deletions
diff --git a/contrib/python/PyJWT/py2/.dist-info/METADATA b/contrib/python/PyJWT/py2/.dist-info/METADATA
new file mode 100644
index 0000000000..47ee558907
--- /dev/null
+++ b/contrib/python/PyJWT/py2/.dist-info/METADATA
@@ -0,0 +1,115 @@
+Metadata-Version: 2.1
+Name: PyJWT
+Version: 1.7.1
+Summary: JSON Web Token implementation in Python
+Home-page: http://github.com/jpadilla/pyjwt
+Author: Jose Padilla
+Author-email: hello@jpadilla.com
+License: MIT
+Keywords: jwt json web token security signing
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Natural Language :: English
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Topic :: Utilities
+Provides-Extra: crypto
+Requires-Dist: cryptography (>=1.4) ; extra == 'crypto'
+Provides-Extra: flake8
+Requires-Dist: flake8 ; extra == 'flake8'
+Requires-Dist: flake8-import-order ; extra == 'flake8'
+Requires-Dist: pep8-naming ; extra == 'flake8'
+Provides-Extra: test
+Requires-Dist: pytest (<5.0.0,>=4.0.1) ; extra == 'test'
+Requires-Dist: pytest-cov (<3.0.0,>=2.6.0) ; extra == 'test'
+Requires-Dist: pytest-runner (<5.0.0,>=4.2) ; extra == 'test'
+
+PyJWT
+=====
+
+.. image:: https://travis-ci.com/jpadilla/pyjwt.svg?branch=master
+ :target: http://travis-ci.com/jpadilla/pyjwt?branch=master
+
+.. image:: https://ci.appveyor.com/api/projects/status/h8nt70aqtwhht39t?svg=true
+ :target: https://ci.appveyor.com/project/jpadilla/pyjwt
+
+.. image:: https://img.shields.io/pypi/v/pyjwt.svg
+ :target: https://pypi.python.org/pypi/pyjwt
+
+.. image:: https://coveralls.io/repos/jpadilla/pyjwt/badge.svg?branch=master
+ :target: https://coveralls.io/r/jpadilla/pyjwt?branch=master
+
+.. image:: https://readthedocs.org/projects/pyjwt/badge/?version=latest
+ :target: https://pyjwt.readthedocs.io
+
+A Python implementation of `RFC 7519 <https://tools.ietf.org/html/rfc7519>`_. Original implementation was written by `@progrium <https://github.com/progrium>`_.
+
+Sponsor
+-------
+
++--------------+---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+| |auth0-logo| | If you want to quickly add secure token-based authentication to Python projects, feel free to check Auth0's Python SDK and free plan at `auth0.com/overview <https://auth0.com/overview?utm_source=GHsponsor&utm_medium=GHsponsor&utm_campaign=pyjwt&utm_content=auth>`_. |
++--------------+-----------------------------------------------------------------+---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
+
+.. |auth0-logo| image:: https://user-images.githubusercontent.com/83319/31722733-de95bbde-b3ea-11e7-96bf-4f4e8f915588.png
+
+Installing
+----------
+
+Install with **pip**:
+
+.. code-block:: sh
+
+ $ pip install PyJWT
+
+
+Usage
+-----
+
+.. code:: python
+
+ >>> import jwt
+ >>> encoded = jwt.encode({'some': 'payload'}, 'secret', algorithm='HS256')
+ 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzb21lIjoicGF5bG9hZCJ9.4twFt5NiznN84AWoo1d7KO1T_yoc0Z6XOpOVswacPZg'
+
+ >>> jwt.decode(encoded, 'secret', algorithms=['HS256'])
+ {'some': 'payload'}
+
+
+Command line
+------------
+
+Usage::
+
+ pyjwt [options] INPUT
+
+Decoding examples::
+
+ pyjwt --key=secret decode TOKEN
+ pyjwt decode --no-verify TOKEN
+
+See more options executing ``pyjwt --help``.
+
+
+Documentation
+-------------
+
+View the full docs online at https://pyjwt.readthedocs.io/en/latest/
+
+
+Tests
+-----
+
+You can run tests from the project root after cloning with:
+
+.. code-block:: sh
+
+ $ python setup.py test
+
+
diff --git a/contrib/python/PyJWT/py2/.dist-info/entry_points.txt b/contrib/python/PyJWT/py2/.dist-info/entry_points.txt
new file mode 100644
index 0000000000..78717b2661
--- /dev/null
+++ b/contrib/python/PyJWT/py2/.dist-info/entry_points.txt
@@ -0,0 +1,3 @@
+[console_scripts]
+pyjwt = jwt.__main__:main
+
diff --git a/contrib/python/PyJWT/py2/.dist-info/top_level.txt b/contrib/python/PyJWT/py2/.dist-info/top_level.txt
new file mode 100644
index 0000000000..27ccc9bc3a
--- /dev/null
+++ b/contrib/python/PyJWT/py2/.dist-info/top_level.txt
@@ -0,0 +1 @@
+jwt
diff --git a/contrib/python/PyJWT/py2/jwt/__init__.py b/contrib/python/PyJWT/py2/jwt/__init__.py
new file mode 100644
index 0000000000..946983f022
--- /dev/null
+++ b/contrib/python/PyJWT/py2/jwt/__init__.py
@@ -0,0 +1,31 @@
+# -*- coding: utf-8 -*-
+# flake8: noqa
+
+"""
+JSON Web Token implementation
+
+Minimum implementation based on this spec:
+http://self-issued.info/docs/draft-jones-json-web-token-01.html
+"""
+
+
+__title__ = 'pyjwt'
+__version__ = '1.7.1'
+__author__ = 'José Padilla'
+__license__ = 'MIT'
+__copyright__ = 'Copyright 2015-2018 José Padilla'
+
+
+from .api_jwt import (
+ encode, decode, register_algorithm, unregister_algorithm,
+ get_unverified_header, PyJWT
+)
+from .api_jws import PyJWS
+from .exceptions import (
+ InvalidTokenError, DecodeError, InvalidAlgorithmError,
+ InvalidAudienceError, ExpiredSignatureError, ImmatureSignatureError,
+ InvalidIssuedAtError, InvalidIssuerError, ExpiredSignature,
+ InvalidAudience, InvalidIssuer, MissingRequiredClaimError,
+ InvalidSignatureError,
+ PyJWTError,
+)
diff --git a/contrib/python/PyJWT/py2/jwt/__main__.py b/contrib/python/PyJWT/py2/jwt/__main__.py
new file mode 100644
index 0000000000..bf50aabf4a
--- /dev/null
+++ b/contrib/python/PyJWT/py2/jwt/__main__.py
@@ -0,0 +1,168 @@
+#!/usr/bin/env python
+
+from __future__ import absolute_import, print_function
+
+import argparse
+import json
+import sys
+import time
+
+from . import DecodeError, __version__, decode, encode
+
+
+def encode_payload(args):
+ # Try to encode
+ if args.key is None:
+ raise ValueError('Key is required when encoding. See --help for usage.')
+
+ # Build payload object to encode
+ payload = {}
+
+ for arg in args.payload:
+ k, v = arg.split('=', 1)
+
+ # exp +offset special case?
+ if k == 'exp' and v[0] == '+' and len(v) > 1:
+ v = str(int(time.time()+int(v[1:])))
+
+ # Cast to integer?
+ if v.isdigit():
+ v = int(v)
+ else:
+ # Cast to float?
+ try:
+ v = float(v)
+ except ValueError:
+ pass
+
+ # Cast to true, false, or null?
+ constants = {'true': True, 'false': False, 'null': None}
+
+ if v in constants:
+ v = constants[v]
+
+ payload[k] = v
+
+ token = encode(
+ payload,
+ key=args.key,
+ algorithm=args.algorithm
+ )
+
+ return token.decode('utf-8')
+
+
+def decode_payload(args):
+ try:
+ if args.token:
+ token = args.token
+ else:
+ if sys.stdin.isatty():
+ token = sys.stdin.readline().strip()
+ else:
+ raise IOError('Cannot read from stdin: terminal not a TTY')
+
+ token = token.encode('utf-8')
+ data = decode(token, key=args.key, verify=args.verify)
+
+ return json.dumps(data)
+
+ except DecodeError as e:
+ raise DecodeError('There was an error decoding the token: %s' % e)
+
+
+def build_argparser():
+
+ usage = '''
+ Encodes or decodes JSON Web Tokens based on input.
+
+ %(prog)s [options] <command> [options] input
+
+ Decoding examples:
+
+ %(prog)s --key=secret decode json.web.token
+ %(prog)s decode --no-verify json.web.token
+
+ Encoding requires the key option and takes space separated key/value pairs
+ separated by equals (=) as input. Examples:
+
+ %(prog)s --key=secret encode iss=me exp=1302049071
+ %(prog)s --key=secret encode foo=bar exp=+10
+
+ The exp key is special and can take an offset to current Unix time.
+ '''
+
+ arg_parser = argparse.ArgumentParser(
+ prog='pyjwt',
+ usage=usage
+ )
+
+ arg_parser.add_argument(
+ '-v', '--version',
+ action='version',
+ version='%(prog)s ' + __version__
+ )
+
+ arg_parser.add_argument(
+ '--key',
+ dest='key',
+ metavar='KEY',
+ default=None,
+ help='set the secret key to sign with'
+ )
+
+ arg_parser.add_argument(
+ '--alg',
+ dest='algorithm',
+ metavar='ALG',
+ default='HS256',
+ help='set crypto algorithm to sign with. default=HS256'
+ )
+
+ subparsers = arg_parser.add_subparsers(
+ title='PyJWT subcommands',
+ description='valid subcommands',
+ help='additional help'
+ )
+
+ # Encode subcommand
+ encode_parser = subparsers.add_parser('encode', help='use to encode a supplied payload')
+
+ payload_help = """Payload to encode. Must be a space separated list of key/value
+ pairs separated by equals (=) sign."""
+
+ encode_parser.add_argument('payload', nargs='+', help=payload_help)
+ encode_parser.set_defaults(func=encode_payload)
+
+ # Decode subcommand
+ decode_parser = subparsers.add_parser('decode', help='use to decode a supplied JSON web token')
+ decode_parser.add_argument(
+ 'token',
+ help='JSON web token to decode.',
+ nargs='?')
+
+ decode_parser.add_argument(
+ '-n', '--no-verify',
+ action='store_false',
+ dest='verify',
+ default=True,
+ help='ignore signature and claims verification on decode'
+ )
+
+ decode_parser.set_defaults(func=decode_payload)
+
+ return arg_parser
+
+
+def main():
+ arg_parser = build_argparser()
+
+ try:
+ arguments = arg_parser.parse_args(sys.argv[1:])
+
+ output = arguments.func(arguments)
+
+ print(output)
+ except Exception as e:
+ print('There was an unforseen error: ', e)
+ arg_parser.print_help()
diff --git a/contrib/python/PyJWT/py2/jwt/algorithms.py b/contrib/python/PyJWT/py2/jwt/algorithms.py
new file mode 100644
index 0000000000..1343688341
--- /dev/null
+++ b/contrib/python/PyJWT/py2/jwt/algorithms.py
@@ -0,0 +1,403 @@
+import hashlib
+import hmac
+import json
+
+
+from .compat import constant_time_compare, string_types
+from .exceptions import InvalidKeyError
+from .utils import (
+ base64url_decode, base64url_encode, der_to_raw_signature,
+ force_bytes, force_unicode, from_base64url_uint, raw_to_der_signature,
+ to_base64url_uint
+)
+
+try:
+ from cryptography.hazmat.primitives import hashes
+ from cryptography.hazmat.primitives.serialization import (
+ load_pem_private_key, load_pem_public_key, load_ssh_public_key
+ )
+ from cryptography.hazmat.primitives.asymmetric.rsa import (
+ RSAPrivateKey, RSAPublicKey, RSAPrivateNumbers, RSAPublicNumbers,
+ rsa_recover_prime_factors, rsa_crt_dmp1, rsa_crt_dmq1, rsa_crt_iqmp
+ )
+ from cryptography.hazmat.primitives.asymmetric.ec import (
+ EllipticCurvePrivateKey, EllipticCurvePublicKey
+ )
+ from cryptography.hazmat.primitives.asymmetric import ec, padding
+ from cryptography.hazmat.backends import default_backend
+ from cryptography.exceptions import InvalidSignature
+
+ has_crypto = True
+except ImportError:
+ has_crypto = False
+
+requires_cryptography = set(['RS256', 'RS384', 'RS512', 'ES256', 'ES384',
+ 'ES521', 'ES512', 'PS256', 'PS384', 'PS512'])
+
+
+def get_default_algorithms():
+ """
+ Returns the algorithms that are implemented by the library.
+ """
+ default_algorithms = {
+ 'none': NoneAlgorithm(),
+ 'HS256': HMACAlgorithm(HMACAlgorithm.SHA256),
+ 'HS384': HMACAlgorithm(HMACAlgorithm.SHA384),
+ 'HS512': HMACAlgorithm(HMACAlgorithm.SHA512)
+ }
+
+ if has_crypto:
+ default_algorithms.update({
+ 'RS256': RSAAlgorithm(RSAAlgorithm.SHA256),
+ 'RS384': RSAAlgorithm(RSAAlgorithm.SHA384),
+ 'RS512': RSAAlgorithm(RSAAlgorithm.SHA512),
+ 'ES256': ECAlgorithm(ECAlgorithm.SHA256),
+ 'ES384': ECAlgorithm(ECAlgorithm.SHA384),
+ 'ES521': ECAlgorithm(ECAlgorithm.SHA512),
+ 'ES512': ECAlgorithm(ECAlgorithm.SHA512), # Backward compat for #219 fix
+ 'PS256': RSAPSSAlgorithm(RSAPSSAlgorithm.SHA256),
+ 'PS384': RSAPSSAlgorithm(RSAPSSAlgorithm.SHA384),
+ 'PS512': RSAPSSAlgorithm(RSAPSSAlgorithm.SHA512)
+ })
+
+ return default_algorithms
+
+
+class Algorithm(object):
+ """
+ The interface for an algorithm used to sign and verify tokens.
+ """
+ def prepare_key(self, key):
+ """
+ Performs necessary validation and conversions on the key and returns
+ the key value in the proper format for sign() and verify().
+ """
+ raise NotImplementedError
+
+ def sign(self, msg, key):
+ """
+ Returns a digital signature for the specified message
+ using the specified key value.
+ """
+ raise NotImplementedError
+
+ def verify(self, msg, key, sig):
+ """
+ Verifies that the specified digital signature is valid
+ for the specified message and key values.
+ """
+ raise NotImplementedError
+
+ @staticmethod
+ def to_jwk(key_obj):
+ """
+ Serializes a given RSA key into a JWK
+ """
+ raise NotImplementedError
+
+ @staticmethod
+ def from_jwk(jwk):
+ """
+ Deserializes a given RSA key from JWK back into a PublicKey or PrivateKey object
+ """
+ raise NotImplementedError
+
+
+class NoneAlgorithm(Algorithm):
+ """
+ Placeholder for use when no signing or verification
+ operations are required.
+ """
+ def prepare_key(self, key):
+ if key == '':
+ key = None
+
+ if key is not None:
+ raise InvalidKeyError('When alg = "none", key value must be None.')
+
+ return key
+
+ def sign(self, msg, key):
+ return b''
+
+ def verify(self, msg, key, sig):
+ return False
+
+
+class HMACAlgorithm(Algorithm):
+ """
+ Performs signing and verification operations using HMAC
+ and the specified hash function.
+ """
+ SHA256 = hashlib.sha256
+ SHA384 = hashlib.sha384
+ SHA512 = hashlib.sha512
+
+ def __init__(self, hash_alg):
+ self.hash_alg = hash_alg
+
+ def prepare_key(self, key):
+ key = force_bytes(key)
+
+ invalid_strings = [
+ b'-----BEGIN PUBLIC KEY-----',
+ b'-----BEGIN CERTIFICATE-----',
+ b'-----BEGIN RSA PUBLIC KEY-----',
+ b'ssh-rsa'
+ ]
+
+ if any([string_value in key for string_value in invalid_strings]):
+ raise InvalidKeyError(
+ 'The specified key is an asymmetric key or x509 certificate and'
+ ' should not be used as an HMAC secret.')
+
+ return key
+
+ @staticmethod
+ def to_jwk(key_obj):
+ return json.dumps({
+ 'k': force_unicode(base64url_encode(force_bytes(key_obj))),
+ 'kty': 'oct'
+ })
+
+ @staticmethod
+ def from_jwk(jwk):
+ obj = json.loads(jwk)
+
+ if obj.get('kty') != 'oct':
+ raise InvalidKeyError('Not an HMAC key')
+
+ return base64url_decode(obj['k'])
+
+ def sign(self, msg, key):
+ return hmac.new(key, msg, self.hash_alg).digest()
+
+ def verify(self, msg, key, sig):
+ return constant_time_compare(sig, self.sign(msg, key))
+
+
+if has_crypto:
+
+ class RSAAlgorithm(Algorithm):
+ """
+ Performs signing and verification operations using
+ RSASSA-PKCS-v1_5 and the specified hash function.
+ """
+ SHA256 = hashes.SHA256
+ SHA384 = hashes.SHA384
+ SHA512 = hashes.SHA512
+
+ def __init__(self, hash_alg):
+ self.hash_alg = hash_alg
+
+ def prepare_key(self, key):
+ if isinstance(key, RSAPrivateKey) or \
+ isinstance(key, RSAPublicKey):
+ return key
+
+ if isinstance(key, string_types):
+ key = force_bytes(key)
+
+ try:
+ if key.startswith(b'ssh-rsa'):
+ key = load_ssh_public_key(key, backend=default_backend())
+ else:
+ key = load_pem_private_key(key, password=None, backend=default_backend())
+ except ValueError:
+ key = load_pem_public_key(key, backend=default_backend())
+ else:
+ raise TypeError('Expecting a PEM-formatted key.')
+
+ return key
+
+ @staticmethod
+ def to_jwk(key_obj):
+ obj = None
+
+ if getattr(key_obj, 'private_numbers', None):
+ # Private key
+ numbers = key_obj.private_numbers()
+
+ obj = {
+ 'kty': 'RSA',
+ 'key_ops': ['sign'],
+ 'n': force_unicode(to_base64url_uint(numbers.public_numbers.n)),
+ 'e': force_unicode(to_base64url_uint(numbers.public_numbers.e)),
+ 'd': force_unicode(to_base64url_uint(numbers.d)),
+ 'p': force_unicode(to_base64url_uint(numbers.p)),
+ 'q': force_unicode(to_base64url_uint(numbers.q)),
+ 'dp': force_unicode(to_base64url_uint(numbers.dmp1)),
+ 'dq': force_unicode(to_base64url_uint(numbers.dmq1)),
+ 'qi': force_unicode(to_base64url_uint(numbers.iqmp))
+ }
+
+ elif getattr(key_obj, 'verify', None):
+ # Public key
+ numbers = key_obj.public_numbers()
+
+ obj = {
+ 'kty': 'RSA',
+ 'key_ops': ['verify'],
+ 'n': force_unicode(to_base64url_uint(numbers.n)),
+ 'e': force_unicode(to_base64url_uint(numbers.e))
+ }
+ else:
+ raise InvalidKeyError('Not a public or private key')
+
+ return json.dumps(obj)
+
+ @staticmethod
+ def from_jwk(jwk):
+ try:
+ obj = json.loads(jwk)
+ except ValueError:
+ raise InvalidKeyError('Key is not valid JSON')
+
+ if obj.get('kty') != 'RSA':
+ raise InvalidKeyError('Not an RSA key')
+
+ if 'd' in obj and 'e' in obj and 'n' in obj:
+ # Private key
+ if 'oth' in obj:
+ raise InvalidKeyError('Unsupported RSA private key: > 2 primes not supported')
+
+ other_props = ['p', 'q', 'dp', 'dq', 'qi']
+ props_found = [prop in obj for prop in other_props]
+ any_props_found = any(props_found)
+
+ if any_props_found and not all(props_found):
+ raise InvalidKeyError('RSA key must include all parameters if any are present besides d')
+
+ public_numbers = RSAPublicNumbers(
+ from_base64url_uint(obj['e']), from_base64url_uint(obj['n'])
+ )
+
+ if any_props_found:
+ numbers = RSAPrivateNumbers(
+ d=from_base64url_uint(obj['d']),
+ p=from_base64url_uint(obj['p']),
+ q=from_base64url_uint(obj['q']),
+ dmp1=from_base64url_uint(obj['dp']),
+ dmq1=from_base64url_uint(obj['dq']),
+ iqmp=from_base64url_uint(obj['qi']),
+ public_numbers=public_numbers
+ )
+ else:
+ d = from_base64url_uint(obj['d'])
+ p, q = rsa_recover_prime_factors(
+ public_numbers.n, d, public_numbers.e
+ )
+
+ numbers = RSAPrivateNumbers(
+ d=d,
+ p=p,
+ q=q,
+ dmp1=rsa_crt_dmp1(d, p),
+ dmq1=rsa_crt_dmq1(d, q),
+ iqmp=rsa_crt_iqmp(p, q),
+ public_numbers=public_numbers
+ )
+
+ return numbers.private_key(default_backend())
+ elif 'n' in obj and 'e' in obj:
+ # Public key
+ numbers = RSAPublicNumbers(
+ from_base64url_uint(obj['e']), from_base64url_uint(obj['n'])
+ )
+
+ return numbers.public_key(default_backend())
+ else:
+ raise InvalidKeyError('Not a public or private key')
+
+ def sign(self, msg, key):
+ return key.sign(msg, padding.PKCS1v15(), self.hash_alg())
+
+ def verify(self, msg, key, sig):
+ try:
+ key.verify(sig, msg, padding.PKCS1v15(), self.hash_alg())
+ return True
+ except InvalidSignature:
+ return False
+
+ class ECAlgorithm(Algorithm):
+ """
+ Performs signing and verification operations using
+ ECDSA and the specified hash function
+ """
+ SHA256 = hashes.SHA256
+ SHA384 = hashes.SHA384
+ SHA512 = hashes.SHA512
+
+ def __init__(self, hash_alg):
+ self.hash_alg = hash_alg
+
+ def prepare_key(self, key):
+ if isinstance(key, EllipticCurvePrivateKey) or \
+ isinstance(key, EllipticCurvePublicKey):
+ return key
+
+ if isinstance(key, string_types):
+ key = force_bytes(key)
+
+ # Attempt to load key. We don't know if it's
+ # a Signing Key or a Verifying Key, so we try
+ # the Verifying Key first.
+ try:
+ if key.startswith(b'ecdsa-sha2-'):
+ key = load_ssh_public_key(key, backend=default_backend())
+ else:
+ key = load_pem_public_key(key, backend=default_backend())
+ except ValueError:
+ key = load_pem_private_key(key, password=None, backend=default_backend())
+
+ else:
+ raise TypeError('Expecting a PEM-formatted key.')
+
+ return key
+
+ def sign(self, msg, key):
+ der_sig = key.sign(msg, ec.ECDSA(self.hash_alg()))
+
+ return der_to_raw_signature(der_sig, key.curve)
+
+ def verify(self, msg, key, sig):
+ try:
+ der_sig = raw_to_der_signature(sig, key.curve)
+ except ValueError:
+ return False
+
+ try:
+ key.verify(der_sig, msg, ec.ECDSA(self.hash_alg()))
+ return True
+ except InvalidSignature:
+ return False
+
+ class RSAPSSAlgorithm(RSAAlgorithm):
+ """
+ Performs a signature using RSASSA-PSS with MGF1
+ """
+
+ def sign(self, msg, key):
+ return key.sign(
+ msg,
+ padding.PSS(
+ mgf=padding.MGF1(self.hash_alg()),
+ salt_length=self.hash_alg.digest_size
+ ),
+ self.hash_alg()
+ )
+
+ def verify(self, msg, key, sig):
+ try:
+ key.verify(
+ sig,
+ msg,
+ padding.PSS(
+ mgf=padding.MGF1(self.hash_alg()),
+ salt_length=self.hash_alg.digest_size
+ ),
+ self.hash_alg()
+ )
+ return True
+ except InvalidSignature:
+ return False
diff --git a/contrib/python/PyJWT/py2/jwt/api_jws.py b/contrib/python/PyJWT/py2/jwt/api_jws.py
new file mode 100644
index 0000000000..a9354adb06
--- /dev/null
+++ b/contrib/python/PyJWT/py2/jwt/api_jws.py
@@ -0,0 +1,242 @@
+import binascii
+import json
+import warnings
+try:
+ # import required by mypy to perform type checking, not used for normal execution
+ from typing import Callable, Dict, List, Optional, Union # NOQA
+except ImportError:
+ pass
+
+from .algorithms import (
+ Algorithm, get_default_algorithms, has_crypto, requires_cryptography # NOQA
+)
+from .compat import Mapping, binary_type, string_types, text_type
+from .exceptions import (
+ DecodeError, InvalidAlgorithmError, InvalidSignatureError,
+ InvalidTokenError
+)
+from .utils import base64url_decode, base64url_encode, force_bytes, merge_dict
+
+
+class PyJWS(object):
+ header_typ = 'JWT'
+
+ def __init__(self, algorithms=None, options=None):
+ self._algorithms = get_default_algorithms()
+ self._valid_algs = (set(algorithms) if algorithms is not None
+ else set(self._algorithms))
+
+ # Remove algorithms that aren't on the whitelist
+ for key in list(self._algorithms.keys()):
+ if key not in self._valid_algs:
+ del self._algorithms[key]
+
+ if not options:
+ options = {}
+
+ self.options = merge_dict(self._get_default_options(), options)
+
+ @staticmethod
+ def _get_default_options():
+ return {
+ 'verify_signature': True
+ }
+
+ def register_algorithm(self, alg_id, alg_obj):
+ """
+ Registers a new Algorithm for use when creating and verifying tokens.
+ """
+ if alg_id in self._algorithms:
+ raise ValueError('Algorithm already has a handler.')
+
+ if not isinstance(alg_obj, Algorithm):
+ raise TypeError('Object is not of type `Algorithm`')
+
+ self._algorithms[alg_id] = alg_obj
+ self._valid_algs.add(alg_id)
+
+ def unregister_algorithm(self, alg_id):
+ """
+ Unregisters an Algorithm for use when creating and verifying tokens
+ Throws KeyError if algorithm is not registered.
+ """
+ if alg_id not in self._algorithms:
+ raise KeyError('The specified algorithm could not be removed'
+ ' because it is not registered.')
+
+ del self._algorithms[alg_id]
+ self._valid_algs.remove(alg_id)
+
+ def get_algorithms(self):
+ """
+ Returns a list of supported values for the 'alg' parameter.
+ """
+ return list(self._valid_algs)
+
+ def encode(self,
+ payload, # type: Union[Dict, bytes]
+ key, # type: str
+ algorithm='HS256', # type: str
+ headers=None, # type: Optional[Dict]
+ json_encoder=None # type: Optional[Callable]
+ ):
+ segments = []
+
+ if algorithm is None:
+ algorithm = 'none'
+
+ if algorithm not in self._valid_algs:
+ pass
+
+ # Header
+ header = {'typ': self.header_typ, 'alg': algorithm}
+
+ if headers:
+ self._validate_headers(headers)
+ header.update(headers)
+
+ json_header = force_bytes(
+ json.dumps(
+ header,
+ separators=(',', ':'),
+ cls=json_encoder
+ )
+ )
+
+ segments.append(base64url_encode(json_header))
+ segments.append(base64url_encode(payload))
+
+ # Segments
+ signing_input = b'.'.join(segments)
+ try:
+ alg_obj = self._algorithms[algorithm]
+ key = alg_obj.prepare_key(key)
+ signature = alg_obj.sign(signing_input, key)
+
+ except KeyError:
+ if not has_crypto and algorithm in requires_cryptography:
+ raise NotImplementedError(
+ "Algorithm '%s' could not be found. Do you have cryptography "
+ "installed?" % algorithm
+ )
+ else:
+ raise NotImplementedError('Algorithm not supported')
+
+ segments.append(base64url_encode(signature))
+
+ return b'.'.join(segments)
+
+ def decode(self,
+ jwt, # type: str
+ key='', # type: str
+ verify=True, # type: bool
+ algorithms=None, # type: List[str]
+ options=None, # type: Dict
+ **kwargs):
+
+ merged_options = merge_dict(self.options, options)
+ verify_signature = merged_options['verify_signature']
+
+ if verify_signature and not algorithms:
+ warnings.warn(
+ 'It is strongly recommended that you pass in a ' +
+ 'value for the "algorithms" argument when calling decode(). ' +
+ 'This argument will be mandatory in a future version.',
+ DeprecationWarning
+ )
+
+ payload, signing_input, header, signature = self._load(jwt)
+
+ if not verify:
+ warnings.warn('The verify parameter is deprecated. '
+ 'Please use verify_signature in options instead.',
+ DeprecationWarning, stacklevel=2)
+ elif verify_signature:
+ self._verify_signature(payload, signing_input, header, signature,
+ key, algorithms)
+
+ return payload
+
+ def get_unverified_header(self, jwt):
+ """Returns back the JWT header parameters as a dict()
+
+ Note: The signature is not verified so the header parameters
+ should not be fully trusted until signature verification is complete
+ """
+ headers = self._load(jwt)[2]
+ self._validate_headers(headers)
+
+ return headers
+
+ def _load(self, jwt):
+ if isinstance(jwt, text_type):
+ jwt = jwt.encode('utf-8')
+
+ if not issubclass(type(jwt), binary_type):
+ raise DecodeError("Invalid token type. Token must be a {0}".format(
+ binary_type))
+
+ try:
+ signing_input, crypto_segment = jwt.rsplit(b'.', 1)
+ header_segment, payload_segment = signing_input.split(b'.', 1)
+ except ValueError:
+ raise DecodeError('Not enough segments')
+
+ try:
+ header_data = base64url_decode(header_segment)
+ except (TypeError, binascii.Error):
+ raise DecodeError('Invalid header padding')
+
+ try:
+ header = json.loads(header_data.decode('utf-8'))
+ except ValueError as e:
+ raise DecodeError('Invalid header string: %s' % e)
+
+ if not isinstance(header, Mapping):
+ raise DecodeError('Invalid header string: must be a json object')
+
+ try:
+ payload = base64url_decode(payload_segment)
+ except (TypeError, binascii.Error):
+ raise DecodeError('Invalid payload padding')
+
+ try:
+ signature = base64url_decode(crypto_segment)
+ except (TypeError, binascii.Error):
+ raise DecodeError('Invalid crypto padding')
+
+ return (payload, signing_input, header, signature)
+
+ def _verify_signature(self, payload, signing_input, header, signature,
+ key='', algorithms=None):
+
+ alg = header.get('alg')
+
+ if algorithms is not None and alg not in algorithms:
+ raise InvalidAlgorithmError('The specified alg value is not allowed')
+
+ try:
+ alg_obj = self._algorithms[alg]
+ key = alg_obj.prepare_key(key)
+
+ if not alg_obj.verify(signing_input, key, signature):
+ raise InvalidSignatureError('Signature verification failed')
+
+ except KeyError:
+ raise InvalidAlgorithmError('Algorithm not supported')
+
+ def _validate_headers(self, headers):
+ if 'kid' in headers:
+ self._validate_kid(headers['kid'])
+
+ def _validate_kid(self, kid):
+ if not isinstance(kid, string_types):
+ raise InvalidTokenError('Key ID header parameter must be a string')
+
+
+_jws_global_obj = PyJWS()
+encode = _jws_global_obj.encode
+decode = _jws_global_obj.decode
+register_algorithm = _jws_global_obj.register_algorithm
+unregister_algorithm = _jws_global_obj.unregister_algorithm
+get_unverified_header = _jws_global_obj.get_unverified_header
diff --git a/contrib/python/PyJWT/py2/jwt/api_jwt.py b/contrib/python/PyJWT/py2/jwt/api_jwt.py
new file mode 100644
index 0000000000..85504acf93
--- /dev/null
+++ b/contrib/python/PyJWT/py2/jwt/api_jwt.py
@@ -0,0 +1,222 @@
+import json
+import warnings
+from calendar import timegm
+from datetime import datetime, timedelta
+try:
+ # import required by mypy to perform type checking, not used for normal execution
+ from typing import Callable, Dict, List, Optional, Union # NOQA
+except ImportError:
+ pass
+
+from .api_jws import PyJWS
+from .algorithms import Algorithm, get_default_algorithms # NOQA
+from .compat import Iterable, Mapping, string_types
+from .exceptions import (
+ DecodeError, ExpiredSignatureError, ImmatureSignatureError,
+ InvalidAudienceError, InvalidIssuedAtError,
+ InvalidIssuerError, MissingRequiredClaimError
+)
+from .utils import merge_dict
+
+
+class PyJWT(PyJWS):
+ header_type = 'JWT'
+
+ @staticmethod
+ def _get_default_options():
+ # type: () -> Dict[str, bool]
+ return {
+ 'verify_signature': True,
+ 'verify_exp': True,
+ 'verify_nbf': True,
+ 'verify_iat': True,
+ 'verify_aud': True,
+ 'verify_iss': True,
+ 'require_exp': False,
+ 'require_iat': False,
+ 'require_nbf': False
+ }
+
+ def encode(self,
+ payload, # type: Union[Dict, bytes]
+ key, # type: str
+ algorithm='HS256', # type: str
+ headers=None, # type: Optional[Dict]
+ json_encoder=None # type: Optional[Callable]
+ ):
+ # Check that we get a mapping
+ if not isinstance(payload, Mapping):
+ raise TypeError('Expecting a mapping object, as JWT only supports '
+ 'JSON objects as payloads.')
+
+ # Payload
+ for time_claim in ['exp', 'iat', 'nbf']:
+ # Convert datetime to a intDate value in known time-format claims
+ if isinstance(payload.get(time_claim), datetime):
+ payload[time_claim] = timegm(payload[time_claim].utctimetuple()) # type: ignore
+
+ json_payload = json.dumps(
+ payload,
+ separators=(',', ':'),
+ cls=json_encoder
+ ).encode('utf-8')
+
+ return super(PyJWT, self).encode(
+ json_payload, key, algorithm, headers, json_encoder
+ )
+
+ def decode(self,
+ jwt, # type: str
+ key='', # type: str
+ verify=True, # type: bool
+ algorithms=None, # type: List[str]
+ options=None, # type: Dict
+ **kwargs):
+
+ if verify and not algorithms:
+ warnings.warn(
+ 'It is strongly recommended that you pass in a ' +
+ 'value for the "algorithms" argument when calling decode(). ' +
+ 'This argument will be mandatory in a future version.',
+ DeprecationWarning
+ )
+
+ payload, _, _, _ = self._load(jwt)
+
+ if options is None:
+ options = {'verify_signature': verify}
+ else:
+ options.setdefault('verify_signature', verify)
+
+ decoded = super(PyJWT, self).decode(
+ jwt, key=key, algorithms=algorithms, options=options, **kwargs
+ )
+
+ try:
+ payload = json.loads(decoded.decode('utf-8'))
+ except ValueError as e:
+ raise DecodeError('Invalid payload string: %s' % e)
+ if not isinstance(payload, Mapping):
+ raise DecodeError('Invalid payload string: must be a json object')
+
+ if verify:
+ merged_options = merge_dict(self.options, options)
+ self._validate_claims(payload, merged_options, **kwargs)
+
+ return payload
+
+ def _validate_claims(self, payload, options, audience=None, issuer=None,
+ leeway=0, **kwargs):
+
+ if 'verify_expiration' in kwargs:
+ options['verify_exp'] = kwargs.get('verify_expiration', True)
+ warnings.warn('The verify_expiration parameter is deprecated. '
+ 'Please use verify_exp in options instead.',
+ DeprecationWarning)
+
+ if isinstance(leeway, timedelta):
+ leeway = leeway.total_seconds()
+
+ if not isinstance(audience, (string_types, type(None), Iterable)):
+ raise TypeError('audience must be a string, iterable, or None')
+
+ self._validate_required_claims(payload, options)
+
+ now = timegm(datetime.utcnow().utctimetuple())
+
+ if 'iat' in payload and options.get('verify_iat'):
+ self._validate_iat(payload, now, leeway)
+
+ if 'nbf' in payload and options.get('verify_nbf'):
+ self._validate_nbf(payload, now, leeway)
+
+ if 'exp' in payload and options.get('verify_exp'):
+ self._validate_exp(payload, now, leeway)
+
+ if options.get('verify_iss'):
+ self._validate_iss(payload, issuer)
+
+ if options.get('verify_aud'):
+ self._validate_aud(payload, audience)
+
+ def _validate_required_claims(self, payload, options):
+ if options.get('require_exp') and payload.get('exp') is None:
+ raise MissingRequiredClaimError('exp')
+
+ if options.get('require_iat') and payload.get('iat') is None:
+ raise MissingRequiredClaimError('iat')
+
+ if options.get('require_nbf') and payload.get('nbf') is None:
+ raise MissingRequiredClaimError('nbf')
+
+ def _validate_iat(self, payload, now, leeway):
+ try:
+ int(payload['iat'])
+ except ValueError:
+ raise InvalidIssuedAtError('Issued At claim (iat) must be an integer.')
+
+ def _validate_nbf(self, payload, now, leeway):
+ try:
+ nbf = int(payload['nbf'])
+ except ValueError:
+ raise DecodeError('Not Before claim (nbf) must be an integer.')
+
+ if nbf > (now + leeway):
+ raise ImmatureSignatureError('The token is not yet valid (nbf)')
+
+ def _validate_exp(self, payload, now, leeway):
+ try:
+ exp = int(payload['exp'])
+ except ValueError:
+ raise DecodeError('Expiration Time claim (exp) must be an'
+ ' integer.')
+
+ if exp < (now - leeway):
+ raise ExpiredSignatureError('Signature has expired')
+
+ def _validate_aud(self, payload, audience):
+ if audience is None and 'aud' not in payload:
+ return
+
+ if audience is not None and 'aud' not in payload:
+ # Application specified an audience, but it could not be
+ # verified since the token does not contain a claim.
+ raise MissingRequiredClaimError('aud')
+
+ if audience is None and 'aud' in payload:
+ # Application did not specify an audience, but
+ # the token has the 'aud' claim
+ raise InvalidAudienceError('Invalid audience')
+
+ audience_claims = payload['aud']
+
+ if isinstance(audience_claims, string_types):
+ audience_claims = [audience_claims]
+ if not isinstance(audience_claims, list):
+ raise InvalidAudienceError('Invalid claim format in token')
+ if any(not isinstance(c, string_types) for c in audience_claims):
+ raise InvalidAudienceError('Invalid claim format in token')
+
+ if isinstance(audience, string_types):
+ audience = [audience]
+
+ if not any(aud in audience_claims for aud in audience):
+ raise InvalidAudienceError('Invalid audience')
+
+ def _validate_iss(self, payload, issuer):
+ if issuer is None:
+ return
+
+ if 'iss' not in payload:
+ raise MissingRequiredClaimError('iss')
+
+ if payload['iss'] != issuer:
+ raise InvalidIssuerError('Invalid issuer')
+
+
+_jwt_global_obj = PyJWT()
+encode = _jwt_global_obj.encode
+decode = _jwt_global_obj.decode
+register_algorithm = _jwt_global_obj.register_algorithm
+unregister_algorithm = _jwt_global_obj.unregister_algorithm
+get_unverified_header = _jwt_global_obj.get_unverified_header
diff --git a/contrib/python/PyJWT/py2/jwt/compat.py b/contrib/python/PyJWT/py2/jwt/compat.py
new file mode 100644
index 0000000000..e79e258e56
--- /dev/null
+++ b/contrib/python/PyJWT/py2/jwt/compat.py
@@ -0,0 +1,68 @@
+"""
+The `compat` module provides support for backwards compatibility with older
+versions of python, and compatibility wrappers around optional packages.
+"""
+# flake8: noqa
+import hmac
+import struct
+import sys
+
+
+PY3 = sys.version_info[0] == 3
+
+
+if PY3:
+ text_type = str
+ binary_type = bytes
+else:
+ text_type = unicode
+ binary_type = str
+
+string_types = (text_type, binary_type)
+
+try:
+ # Importing ABCs from collections will be removed in PY3.8
+ from collections.abc import Iterable, Mapping
+except ImportError:
+ from collections import Iterable, Mapping
+
+try:
+ constant_time_compare = hmac.compare_digest
+except AttributeError:
+ # Fallback for Python < 2.7
+ def constant_time_compare(val1, val2):
+ """
+ Returns True if the two strings are equal, False otherwise.
+
+ The time taken is independent of the number of characters that match.
+ """
+ if len(val1) != len(val2):
+ return False
+
+ result = 0
+
+ for x, y in zip(val1, val2):
+ result |= ord(x) ^ ord(y)
+
+ return result == 0
+
+# Use int.to_bytes if it exists (Python 3)
+if getattr(int, 'to_bytes', None):
+ def bytes_from_int(val):
+ remaining = val
+ byte_length = 0
+
+ while remaining != 0:
+ remaining = remaining >> 8
+ byte_length += 1
+
+ return val.to_bytes(byte_length, 'big', signed=False)
+else:
+ def bytes_from_int(val):
+ buf = []
+ while val:
+ val, remainder = divmod(val, 256)
+ buf.append(remainder)
+
+ buf.reverse()
+ return struct.pack('%sB' % len(buf), *buf)
diff --git a/contrib/python/PyJWT/py2/jwt/contrib/__init__.py b/contrib/python/PyJWT/py2/jwt/contrib/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/contrib/python/PyJWT/py2/jwt/contrib/__init__.py
diff --git a/contrib/python/PyJWT/py2/jwt/contrib/algorithms/__init__.py b/contrib/python/PyJWT/py2/jwt/contrib/algorithms/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/contrib/python/PyJWT/py2/jwt/contrib/algorithms/__init__.py
diff --git a/contrib/python/PyJWT/py2/jwt/contrib/algorithms/py_ecdsa.py b/contrib/python/PyJWT/py2/jwt/contrib/algorithms/py_ecdsa.py
new file mode 100644
index 0000000000..bf0dea5ae2
--- /dev/null
+++ b/contrib/python/PyJWT/py2/jwt/contrib/algorithms/py_ecdsa.py
@@ -0,0 +1,60 @@
+# Note: This file is named py_ecdsa.py because import behavior in Python 2
+# would cause ecdsa.py to squash the ecdsa library that it depends upon.
+
+import hashlib
+
+import ecdsa
+
+from jwt.algorithms import Algorithm
+from jwt.compat import string_types, text_type
+
+
+class ECAlgorithm(Algorithm):
+ """
+ Performs signing and verification operations using
+ ECDSA and the specified hash function
+
+ This class requires the ecdsa package to be installed.
+
+ This is based off of the implementation in PyJWT 0.3.2
+ """
+ SHA256 = hashlib.sha256
+ SHA384 = hashlib.sha384
+ SHA512 = hashlib.sha512
+
+ def __init__(self, hash_alg):
+ self.hash_alg = hash_alg
+
+ def prepare_key(self, key):
+
+ if isinstance(key, ecdsa.SigningKey) or \
+ isinstance(key, ecdsa.VerifyingKey):
+ return key
+
+ if isinstance(key, string_types):
+ if isinstance(key, text_type):
+ key = key.encode('utf-8')
+
+ # Attempt to load key. We don't know if it's
+ # a Signing Key or a Verifying Key, so we try
+ # the Verifying Key first.
+ try:
+ key = ecdsa.VerifyingKey.from_pem(key)
+ except ecdsa.der.UnexpectedDER:
+ key = ecdsa.SigningKey.from_pem(key)
+
+ else:
+ raise TypeError('Expecting a PEM-formatted key.')
+
+ return key
+
+ def sign(self, msg, key):
+ return key.sign(msg, hashfunc=self.hash_alg,
+ sigencode=ecdsa.util.sigencode_string)
+
+ def verify(self, msg, key, sig):
+ try:
+ return key.verify(sig, msg, hashfunc=self.hash_alg,
+ sigdecode=ecdsa.util.sigdecode_string)
+ except AssertionError:
+ return False
diff --git a/contrib/python/PyJWT/py2/jwt/contrib/algorithms/pycrypto.py b/contrib/python/PyJWT/py2/jwt/contrib/algorithms/pycrypto.py
new file mode 100644
index 0000000000..e49cdbfe40
--- /dev/null
+++ b/contrib/python/PyJWT/py2/jwt/contrib/algorithms/pycrypto.py
@@ -0,0 +1,46 @@
+import Crypto.Hash.SHA256
+import Crypto.Hash.SHA384
+import Crypto.Hash.SHA512
+from Crypto.PublicKey import RSA
+from Crypto.Signature import PKCS1_v1_5
+
+from jwt.algorithms import Algorithm
+from jwt.compat import string_types, text_type
+
+
+class RSAAlgorithm(Algorithm):
+ """
+ Performs signing and verification operations using
+ RSASSA-PKCS-v1_5 and the specified hash function.
+
+ This class requires PyCrypto package to be installed.
+
+ This is based off of the implementation in PyJWT 0.3.2
+ """
+ SHA256 = Crypto.Hash.SHA256
+ SHA384 = Crypto.Hash.SHA384
+ SHA512 = Crypto.Hash.SHA512
+
+ def __init__(self, hash_alg):
+ self.hash_alg = hash_alg
+
+ def prepare_key(self, key):
+
+ if isinstance(key, RSA._RSAobj):
+ return key
+
+ if isinstance(key, string_types):
+ if isinstance(key, text_type):
+ key = key.encode('utf-8')
+
+ key = RSA.importKey(key)
+ else:
+ raise TypeError('Expecting a PEM- or RSA-formatted key.')
+
+ return key
+
+ def sign(self, msg, key):
+ return PKCS1_v1_5.new(key).sign(self.hash_alg.new(msg))
+
+ def verify(self, msg, key, sig):
+ return PKCS1_v1_5.new(key).verify(self.hash_alg.new(msg), sig)
diff --git a/contrib/python/PyJWT/py2/jwt/exceptions.py b/contrib/python/PyJWT/py2/jwt/exceptions.py
new file mode 100644
index 0000000000..2a6aa596ba
--- /dev/null
+++ b/contrib/python/PyJWT/py2/jwt/exceptions.py
@@ -0,0 +1,59 @@
+class PyJWTError(Exception):
+ """
+ Base class for all exceptions
+ """
+ pass
+
+
+class InvalidTokenError(PyJWTError):
+ pass
+
+
+class DecodeError(InvalidTokenError):
+ pass
+
+
+class InvalidSignatureError(DecodeError):
+ pass
+
+
+class ExpiredSignatureError(InvalidTokenError):
+ pass
+
+
+class InvalidAudienceError(InvalidTokenError):
+ pass
+
+
+class InvalidIssuerError(InvalidTokenError):
+ pass
+
+
+class InvalidIssuedAtError(InvalidTokenError):
+ pass
+
+
+class ImmatureSignatureError(InvalidTokenError):
+ pass
+
+
+class InvalidKeyError(PyJWTError):
+ pass
+
+
+class InvalidAlgorithmError(InvalidTokenError):
+ pass
+
+
+class MissingRequiredClaimError(InvalidTokenError):
+ def __init__(self, claim):
+ self.claim = claim
+
+ def __str__(self):
+ return 'Token is missing the "%s" claim' % self.claim
+
+
+# Compatibility aliases (deprecated)
+ExpiredSignature = ExpiredSignatureError
+InvalidAudience = InvalidAudienceError
+InvalidIssuer = InvalidIssuerError
diff --git a/contrib/python/PyJWT/py2/jwt/help.py b/contrib/python/PyJWT/py2/jwt/help.py
new file mode 100644
index 0000000000..55e39ebb27
--- /dev/null
+++ b/contrib/python/PyJWT/py2/jwt/help.py
@@ -0,0 +1,61 @@
+from __future__ import print_function
+
+import json
+import platform
+import sys
+
+from . import __version__ as pyjwt_version
+
+try:
+ import cryptography
+except ImportError:
+ cryptography = None
+
+try:
+ import ecdsa
+except ImportError:
+ ecdsa = None
+
+
+def info():
+ """
+ Generate information for a bug report.
+ Based on the requests package help utility module.
+ """
+ try:
+ platform_info = {"system": platform.system(), "release": platform.release()}
+ except IOError:
+ platform_info = {"system": "Unknown", "release": "Unknown"}
+
+ implementation = platform.python_implementation()
+
+ if implementation == "CPython":
+ implementation_version = platform.python_version()
+ elif implementation == "PyPy":
+ implementation_version = "%s.%s.%s" % (
+ sys.pypy_version_info.major,
+ sys.pypy_version_info.minor,
+ sys.pypy_version_info.micro,
+ )
+ if sys.pypy_version_info.releaselevel != "final":
+ implementation_version = "".join(
+ [implementation_version, sys.pypy_version_info.releaselevel]
+ )
+ else:
+ implementation_version = "Unknown"
+
+ return {
+ "platform": platform_info,
+ "implementation": {"name": implementation, "version": implementation_version},
+ "cryptography": {"version": getattr(cryptography, "__version__", "")},
+ "pyjwt": {"version": pyjwt_version},
+ }
+
+
+def main():
+ """Pretty-print the bug information as JSON."""
+ print(json.dumps(info(), sort_keys=True, indent=2))
+
+
+if __name__ == "__main__":
+ main()
diff --git a/contrib/python/PyJWT/py2/jwt/utils.py b/contrib/python/PyJWT/py2/jwt/utils.py
new file mode 100644
index 0000000000..b33c7a2d45
--- /dev/null
+++ b/contrib/python/PyJWT/py2/jwt/utils.py
@@ -0,0 +1,113 @@
+import base64
+import binascii
+import struct
+
+from .compat import binary_type, bytes_from_int, text_type
+
+try:
+ from cryptography.hazmat.primitives.asymmetric.utils import (
+ decode_dss_signature, encode_dss_signature
+ )
+except ImportError:
+ pass
+
+
+def force_unicode(value):
+ if isinstance(value, binary_type):
+ return value.decode('utf-8')
+ elif isinstance(value, text_type):
+ return value
+ else:
+ raise TypeError('Expected a string value')
+
+
+def force_bytes(value):
+ if isinstance(value, text_type):
+ return value.encode('utf-8')
+ elif isinstance(value, binary_type):
+ return value
+ else:
+ raise TypeError('Expected a string value')
+
+
+def base64url_decode(input):
+ if isinstance(input, text_type):
+ input = input.encode('ascii')
+
+ rem = len(input) % 4
+
+ if rem > 0:
+ input += b'=' * (4 - rem)
+
+ return base64.urlsafe_b64decode(input)
+
+
+def base64url_encode(input):
+ return base64.urlsafe_b64encode(input).replace(b'=', b'')
+
+
+def to_base64url_uint(val):
+ if val < 0:
+ raise ValueError('Must be a positive integer')
+
+ int_bytes = bytes_from_int(val)
+
+ if len(int_bytes) == 0:
+ int_bytes = b'\x00'
+
+ return base64url_encode(int_bytes)
+
+
+def from_base64url_uint(val):
+ if isinstance(val, text_type):
+ val = val.encode('ascii')
+
+ data = base64url_decode(val)
+
+ buf = struct.unpack('%sB' % len(data), data)
+ return int(''.join(["%02x" % byte for byte in buf]), 16)
+
+
+def merge_dict(original, updates):
+ if not updates:
+ return original
+
+ try:
+ merged_options = original.copy()
+ merged_options.update(updates)
+ except (AttributeError, ValueError) as e:
+ raise TypeError('original and updates must be a dictionary: %s' % e)
+
+ return merged_options
+
+
+def number_to_bytes(num, num_bytes):
+ padded_hex = '%0*x' % (2 * num_bytes, num)
+ big_endian = binascii.a2b_hex(padded_hex.encode('ascii'))
+ return big_endian
+
+
+def bytes_to_number(string):
+ return int(binascii.b2a_hex(string), 16)
+
+
+def der_to_raw_signature(der_sig, curve):
+ num_bits = curve.key_size
+ num_bytes = (num_bits + 7) // 8
+
+ r, s = decode_dss_signature(der_sig)
+
+ return number_to_bytes(r, num_bytes) + number_to_bytes(s, num_bytes)
+
+
+def raw_to_der_signature(raw_sig, curve):
+ num_bits = curve.key_size
+ num_bytes = (num_bits + 7) // 8
+
+ if len(raw_sig) != 2 * num_bytes:
+ raise ValueError('Invalid signature')
+
+ r = bytes_to_number(raw_sig[:num_bytes])
+ s = bytes_to_number(raw_sig[num_bytes:])
+
+ return encode_dss_signature(r, s)
diff --git a/contrib/python/PyJWT/py2/ya.make b/contrib/python/PyJWT/py2/ya.make
new file mode 100644
index 0000000000..57a9352fba
--- /dev/null
+++ b/contrib/python/PyJWT/py2/ya.make
@@ -0,0 +1,43 @@
+# Generated by devtools/yamaker (pypi).
+
+PY2_LIBRARY()
+
+VERSION(1.7.1)
+
+LICENSE(MIT)
+
+PEERDIR(
+ contrib/python/cryptography
+)
+
+NO_LINT()
+
+NO_CHECK_IMPORTS(
+ jwt.contrib.*
+)
+
+PY_SRCS(
+ TOP_LEVEL
+ jwt/__init__.py
+ jwt/__main__.py
+ jwt/algorithms.py
+ jwt/api_jws.py
+ jwt/api_jwt.py
+ jwt/compat.py
+ jwt/contrib/__init__.py
+ jwt/contrib/algorithms/__init__.py
+ jwt/contrib/algorithms/py_ecdsa.py
+ jwt/contrib/algorithms/pycrypto.py
+ jwt/exceptions.py
+ jwt/help.py
+ jwt/utils.py
+)
+
+RESOURCE_FILES(
+ PREFIX contrib/python/PyJWT/py2/
+ .dist-info/METADATA
+ .dist-info/entry_points.txt
+ .dist-info/top_level.txt
+)
+
+END()
diff --git a/contrib/python/yandexcloud/.dist-info/METADATA b/contrib/python/yandexcloud/.dist-info/METADATA
new file mode 100644
index 0000000000..dacbaf4ee3
--- /dev/null
+++ b/contrib/python/yandexcloud/.dist-info/METADATA
@@ -0,0 +1,203 @@
+Metadata-Version: 2.1
+Name: yandexcloud
+Version: 0.293.0
+Summary: The Yandex.Cloud official SDK
+Home-page: https://github.com/yandex-cloud/python-sdk
+Author: Yandex LLC
+Author-email: cloud@support.yandex.ru
+License: MIT
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Description-Content-Type: text/markdown
+License-File: LICENSE
+License-File: AUTHORS
+Requires-Dist: cryptography >=2.8
+Requires-Dist: grpcio >=1.64.0
+Requires-Dist: protobuf >=4.25.3
+Requires-Dist: googleapis-common-protos >=1.63.0
+Requires-Dist: pyjwt >=1.7.1
+Requires-Dist: requests >=2.22.0
+Requires-Dist: six >=1.14.0
+
+[![PyPI Version][pypi-image]][pypi-url]
+[![Build Status][build-image]][build-url]
+[![License][license-image]][license-url]
+
+<!-- Badges -->
+
+[pypi-image]: https://img.shields.io/pypi/v/yandexcloud
+[pypi-url]: https://pypi.org/project/yandexcloud/
+[build-image]: https://github.com/yandex-cloud/python-sdk/actions/workflows/run-tests.yml/badge.svg
+[build-url]: https://github.com/yandex-cloud/python-sdk/actions/workflows/run-tests.yml
+[license-image]: https://img.shields.io/github/license/yandex-cloud/python-sdk.svg
+[license-url]: https://github.com/yandex-cloud/python-sdk/blob/master/LICENSE
+
+# Yandex.Cloud SDK (Python)
+
+Need to automate your infrastructure or use services provided by Yandex.Cloud? We've got you covered.
+
+Installation:
+
+ pip install yandexcloud
+
+## Getting started
+
+There are several options for authorization your requests - OAuth Token,
+Metadata Service (if you're executing your code inside VMs or Cloud Functions
+running in Yandex.Cloud), Service Account Keys, and externally created IAM tokens.
+
+### OAuth Token
+
+```python
+sdk = yandexcloud.SDK(token='AQAD-.....')
+```
+
+### Metadata Service
+
+Don't forget to assign Service Account for your Instance or Function and grant required roles.
+
+```python
+sdk = yandexcloud.SDK()
+```
+
+### Service Account Keys
+
+```python
+# you can store and read it from JSON file
+sa_key = {
+ "id": "...",
+ "service_account_id": "...",
+ "private_key": "..."
+}
+
+sdk = yandexcloud.SDK(service_account_key=sa_key)
+```
+
+### IAM tokens
+
+```python
+sdk = yandexcloud.SDK(iam_token="t1.9eu...")
+```
+
+Check `examples` directory for more examples.
+
+### Override service endpoint
+
+#### Supported services
+
+| Service Name | Alias |
+|------------------------------------------------------------------------|--------------------------|
+| yandex.cloud.ai.foundation_models | ai-foundation-models |
+| yandex.cloud.ai.llm | ai-llm |
+| yandex.cloud.ai.stt | ai-stt |
+| yandex.cloud.ai.translate | ai-translate |
+| yandex.cloud.ai.tts | ai-speechkit |
+| yandex.cloud.ai.vision | ai-vision |
+| yandex.cloud.apploadbalancer | alb |
+| yandex.cloud.billing | billing |
+| yandex.cloud.cdn | cdn |
+| yandex.cloud.certificatemanager.v1.certificate_content_service | certificate-manager-data |
+| yandex.cloud.certificatemanager | certificate-manager |
+| yandex.cloud.compute | compute |
+| yandex.cloud.containerregistry | container-registry |
+| yandex.cloud.dataproc.manager | dataproc-manager |
+| yandex.cloud.dataproc | dataproc |
+| yandex.cloud.datatransfer | datatransfer |
+| yandex.cloud.dns | dns |
+| yandex.cloud.endpoint | endpoint |
+| yandex.cloud.iam | iam |
+| yandex.cloud.iot.devices | iot-devices |
+| yandex.cloud.k8s | managed-kubernetes |
+| yandex.cloud.kms | kms |
+| yandex.cloud.kms.v1.symmetric_crypto_service | kms-crypto |
+| yandex.cloud.loadbalancer | load-balancer |
+| yandex.cloud.loadtesting | loadtesting |
+| yandex.cloud.lockbox.v1.payload_service | lockbox-payload |
+| yandex.cloud.lockbox | lockbox |
+| yandex.cloud.logging.v1.log_ingestion_service | log-ingestion |
+| yandex.cloud.logging.v1.log_reading_service | log-reading |
+| yandex.cloud.logging | logging |
+| yandex.cloud.marketplace | marketplace |
+| yandex.cloud.mdb.clickhouse | managed-clickhouse |
+| yandex.cloud.mdb.elasticsearch | managed-elasticsearch |
+| yandex.cloud.mdb.greenplum | managed-greenplum |
+| yandex.cloud.mdb.kafka | managed-kafka |
+| yandex.cloud.mdb.mongodb | managed-mongodb |
+| yandex.cloud.mdb.mysql | managed-mysql |
+| yandex.cloud.mdb.opensearch | managed-opensearch |
+| yandex.cloud.mdb.postgresql | managed-postgresql |
+| yandex.cloud.mdb.redis | managed-redis |
+| yandex.cloud.mdb.sqlserver | managed-sqlserver |
+| yandex.cloud.operation | operation |
+| yandex.cloud.organizationmanager | organization-manager |
+| yandex.cloud.resourcemanager | resource-manager |
+| yandex.cloud.serverless.apigateway | serverless-apigateway |
+| yandex.cloud.serverless.apigateway.websocket | apigateway-connections |
+| yandex.cloud.serverless.containers | serverless-containers |
+| yandex.cloud.serverless.functions | serverless-functions |
+| yandex.cloud.serverless.triggers | serverless-triggers |
+| yandex.cloud.storage | storage-api |
+| yandex.cloud.vpc | vpc |
+| yandex.cloud.ydb | ydb |
+
+
+#### Override in client
+```python
+from yandex.cloud.vpc.v1.network_service_pb2_grpc import NetworkServiceStub
+from yandexcloud import SDK
+
+sdk = SDK(iam_token="t1.9eu...")
+new_network_client_endpoint = "example.new.vpc.very.new.yandex:50051"
+insecure = False # by default is False, but if server does not support verification can be set to True
+network_client = sdk.client(NetworkServiceStub, endpoint=new_network_client_endpoint, insecure=False)
+```
+
+#### Override in sdk config
+To override endpoints provide dict in format {alias : new-endpoint}
+```python
+from yandex.cloud.vpc.v1.network_service_pb2_grpc import NetworkServiceStub
+from yandexcloud import SDK
+new_network_client_endpoint = "example.new.vpc.very.new.yandex:50051"
+sdk = SDK(iam_token="t1.9eu...", endpoints={"vpc": new_network_client_endpoint})
+insecure = False # by default is False, but if server does not support verification can be set to True
+network_client = sdk.client(NetworkServiceStub, insecure=False)
+```
+
+Notice: if both overrides are used for same endpoint, override by client has priority
+
+#### Switch SDK region
+```python
+from yandexcloud import SDK, set_up_yc_api_endpoint
+kz_region_endpoint = "api.yandexcloud.kz"
+# this will make SDK list endpoints from KZ yc installation
+sdk = SDK(iam_token="t1.9eu...", endpoint="api.yandexcloud.kz")
+# or you can use global function
+set_up_yc_api_endpoint(kz_region_endpoint)
+```
+
+## Contributing
+### Dependencies
+Use `make deps` command to install library, its production and development dependencies.
+
+### Formatting
+Use `make format` to autoformat code with black tool.
+
+### Tests
+- `make test` to run tests for current python version
+- `make lint` to run only linters for current python version
+- `make tox-current` to run all checks (tests + code style checks + linters + format check) for current python version
+- `make tox` to run all checks for all supported (installed in your system) python versions
+- `make test-all-versions` to run all checks for all supported python versions in docker container
+
+
+### Maintaining
+If pull request consists of several meaningful commits, that should be preserved,
+then use "Rebase and merge" option. Otherwise use "Squash and merge".
+
+New release (changelog, tag and pypi upload) will be automatically created
+on each push to master via Github Actions workflow.
diff --git a/contrib/python/yandexcloud/.dist-info/top_level.txt b/contrib/python/yandexcloud/.dist-info/top_level.txt
new file mode 100644
index 0000000000..3fbe85c20c
--- /dev/null
+++ b/contrib/python/yandexcloud/.dist-info/top_level.txt
@@ -0,0 +1,2 @@
+yandex
+yandexcloud
diff --git a/contrib/python/yandexcloud/AUTHORS b/contrib/python/yandexcloud/AUTHORS
new file mode 100644
index 0000000000..06ab95beb9
--- /dev/null
+++ b/contrib/python/yandexcloud/AUTHORS
@@ -0,0 +1,4 @@
+The following authors have created the source code of "Yandex.Cloud Python Client Library" published and distributed by YANDEX LLC as the owner:
+
+Ivan Romanenko <iva-romanenko@yandex-team.ru>
+Islam Alibekov <everest@yandex-team.ru> \ No newline at end of file
diff --git a/contrib/python/yandexcloud/LICENSE b/contrib/python/yandexcloud/LICENSE
new file mode 100644
index 0000000000..0cd74fabf0
--- /dev/null
+++ b/contrib/python/yandexcloud/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2018 YANDEX LLC
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/contrib/python/yandexcloud/README.md b/contrib/python/yandexcloud/README.md
new file mode 100644
index 0000000000..bd8014faa6
--- /dev/null
+++ b/contrib/python/yandexcloud/README.md
@@ -0,0 +1,177 @@
+[![PyPI Version][pypi-image]][pypi-url]
+[![Build Status][build-image]][build-url]
+[![License][license-image]][license-url]
+
+<!-- Badges -->
+
+[pypi-image]: https://img.shields.io/pypi/v/yandexcloud
+[pypi-url]: https://pypi.org/project/yandexcloud/
+[build-image]: https://github.com/yandex-cloud/python-sdk/actions/workflows/run-tests.yml/badge.svg
+[build-url]: https://github.com/yandex-cloud/python-sdk/actions/workflows/run-tests.yml
+[license-image]: https://img.shields.io/github/license/yandex-cloud/python-sdk.svg
+[license-url]: https://github.com/yandex-cloud/python-sdk/blob/master/LICENSE
+
+# Yandex.Cloud SDK (Python)
+
+Need to automate your infrastructure or use services provided by Yandex.Cloud? We've got you covered.
+
+Installation:
+
+ pip install yandexcloud
+
+## Getting started
+
+There are several options for authorization your requests - OAuth Token,
+Metadata Service (if you're executing your code inside VMs or Cloud Functions
+running in Yandex.Cloud), Service Account Keys, and externally created IAM tokens.
+
+### OAuth Token
+
+```python
+sdk = yandexcloud.SDK(token='AQAD-.....')
+```
+
+### Metadata Service
+
+Don't forget to assign Service Account for your Instance or Function and grant required roles.
+
+```python
+sdk = yandexcloud.SDK()
+```
+
+### Service Account Keys
+
+```python
+# you can store and read it from JSON file
+sa_key = {
+ "id": "...",
+ "service_account_id": "...",
+ "private_key": "..."
+}
+
+sdk = yandexcloud.SDK(service_account_key=sa_key)
+```
+
+### IAM tokens
+
+```python
+sdk = yandexcloud.SDK(iam_token="t1.9eu...")
+```
+
+Check `examples` directory for more examples.
+
+### Override service endpoint
+
+#### Supported services
+
+| Service Name | Alias |
+|------------------------------------------------------------------------|--------------------------|
+| yandex.cloud.ai.foundation_models | ai-foundation-models |
+| yandex.cloud.ai.llm | ai-llm |
+| yandex.cloud.ai.stt | ai-stt |
+| yandex.cloud.ai.translate | ai-translate |
+| yandex.cloud.ai.tts | ai-speechkit |
+| yandex.cloud.ai.vision | ai-vision |
+| yandex.cloud.apploadbalancer | alb |
+| yandex.cloud.billing | billing |
+| yandex.cloud.cdn | cdn |
+| yandex.cloud.certificatemanager.v1.certificate_content_service | certificate-manager-data |
+| yandex.cloud.certificatemanager | certificate-manager |
+| yandex.cloud.compute | compute |
+| yandex.cloud.containerregistry | container-registry |
+| yandex.cloud.dataproc.manager | dataproc-manager |
+| yandex.cloud.dataproc | dataproc |
+| yandex.cloud.datatransfer | datatransfer |
+| yandex.cloud.dns | dns |
+| yandex.cloud.endpoint | endpoint |
+| yandex.cloud.iam | iam |
+| yandex.cloud.iot.devices | iot-devices |
+| yandex.cloud.k8s | managed-kubernetes |
+| yandex.cloud.kms | kms |
+| yandex.cloud.kms.v1.symmetric_crypto_service | kms-crypto |
+| yandex.cloud.loadbalancer | load-balancer |
+| yandex.cloud.loadtesting | loadtesting |
+| yandex.cloud.lockbox.v1.payload_service | lockbox-payload |
+| yandex.cloud.lockbox | lockbox |
+| yandex.cloud.logging.v1.log_ingestion_service | log-ingestion |
+| yandex.cloud.logging.v1.log_reading_service | log-reading |
+| yandex.cloud.logging | logging |
+| yandex.cloud.marketplace | marketplace |
+| yandex.cloud.mdb.clickhouse | managed-clickhouse |
+| yandex.cloud.mdb.elasticsearch | managed-elasticsearch |
+| yandex.cloud.mdb.greenplum | managed-greenplum |
+| yandex.cloud.mdb.kafka | managed-kafka |
+| yandex.cloud.mdb.mongodb | managed-mongodb |
+| yandex.cloud.mdb.mysql | managed-mysql |
+| yandex.cloud.mdb.opensearch | managed-opensearch |
+| yandex.cloud.mdb.postgresql | managed-postgresql |
+| yandex.cloud.mdb.redis | managed-redis |
+| yandex.cloud.mdb.sqlserver | managed-sqlserver |
+| yandex.cloud.operation | operation |
+| yandex.cloud.organizationmanager | organization-manager |
+| yandex.cloud.resourcemanager | resource-manager |
+| yandex.cloud.serverless.apigateway | serverless-apigateway |
+| yandex.cloud.serverless.apigateway.websocket | apigateway-connections |
+| yandex.cloud.serverless.containers | serverless-containers |
+| yandex.cloud.serverless.functions | serverless-functions |
+| yandex.cloud.serverless.triggers | serverless-triggers |
+| yandex.cloud.storage | storage-api |
+| yandex.cloud.vpc | vpc |
+| yandex.cloud.ydb | ydb |
+
+
+#### Override in client
+```python
+from yandex.cloud.vpc.v1.network_service_pb2_grpc import NetworkServiceStub
+from yandexcloud import SDK
+
+sdk = SDK(iam_token="t1.9eu...")
+new_network_client_endpoint = "example.new.vpc.very.new.yandex:50051"
+insecure = False # by default is False, but if server does not support verification can be set to True
+network_client = sdk.client(NetworkServiceStub, endpoint=new_network_client_endpoint, insecure=False)
+```
+
+#### Override in sdk config
+To override endpoints provide dict in format {alias : new-endpoint}
+```python
+from yandex.cloud.vpc.v1.network_service_pb2_grpc import NetworkServiceStub
+from yandexcloud import SDK
+new_network_client_endpoint = "example.new.vpc.very.new.yandex:50051"
+sdk = SDK(iam_token="t1.9eu...", endpoints={"vpc": new_network_client_endpoint})
+insecure = False # by default is False, but if server does not support verification can be set to True
+network_client = sdk.client(NetworkServiceStub, insecure=False)
+```
+
+Notice: if both overrides are used for same endpoint, override by client has priority
+
+#### Switch SDK region
+```python
+from yandexcloud import SDK, set_up_yc_api_endpoint
+kz_region_endpoint = "api.yandexcloud.kz"
+# this will make SDK list endpoints from KZ yc installation
+sdk = SDK(iam_token="t1.9eu...", endpoint="api.yandexcloud.kz")
+# or you can use global function
+set_up_yc_api_endpoint(kz_region_endpoint)
+```
+
+## Contributing
+### Dependencies
+Use `make deps` command to install library, its production and development dependencies.
+
+### Formatting
+Use `make format` to autoformat code with black tool.
+
+### Tests
+- `make test` to run tests for current python version
+- `make lint` to run only linters for current python version
+- `make tox-current` to run all checks (tests + code style checks + linters + format check) for current python version
+- `make tox` to run all checks for all supported (installed in your system) python versions
+- `make test-all-versions` to run all checks for all supported python versions in docker container
+
+
+### Maintaining
+If pull request consists of several meaningful commits, that should be preserved,
+then use "Rebase and merge" option. Otherwise use "Squash and merge".
+
+New release (changelog, tag and pypi upload) will be automatically created
+on each push to master via Github Actions workflow.
diff --git a/contrib/python/yandexcloud/ya.make b/contrib/python/yandexcloud/ya.make
new file mode 100644
index 0000000000..3815d07fae
--- /dev/null
+++ b/contrib/python/yandexcloud/ya.make
@@ -0,0 +1,46 @@
+# Generated by devtools/yamaker (pypi).
+
+PY3_LIBRARY()
+
+VERSION(0.293.0)
+
+LICENSE(MIT)
+
+PEERDIR(
+ contrib/libs/googleapis-common-protos
+ contrib/libs/yandex-cloud-api-protos
+ contrib/python/PyJWT
+ contrib/python/cryptography
+ contrib/python/grpcio
+ contrib/python/protobuf
+ contrib/python/requests
+ contrib/python/six
+)
+
+NO_LINT()
+
+PY_SRCS(
+ TOP_LEVEL
+ yandexcloud/__init__.py
+ yandexcloud/_auth_fabric.py
+ yandexcloud/_auth_plugin.py
+ yandexcloud/_backoff.py
+ yandexcloud/_channels.py
+ yandexcloud/_helpers.py
+ yandexcloud/_operation_waiter.py
+ yandexcloud/_retry_interceptor.py
+ yandexcloud/_sdk.py
+ yandexcloud/_wrappers/__init__.py
+ yandexcloud/_wrappers/dataproc/__init__.py
+ yandexcloud/auth.py
+ yandexcloud/operations.py
+)
+
+RESOURCE_FILES(
+ PREFIX contrib/python/yandexcloud/
+ .dist-info/METADATA
+ .dist-info/top_level.txt
+ yandexcloud/py.typed
+)
+
+END()
diff --git a/contrib/python/yandexcloud/yandexcloud/__init__.py b/contrib/python/yandexcloud/yandexcloud/__init__.py
new file mode 100644
index 0000000000..b036a68549
--- /dev/null
+++ b/contrib/python/yandexcloud/yandexcloud/__init__.py
@@ -0,0 +1,13 @@
+"""Main package for Yandex.Cloud SDK."""
+
+# flake8: noqa
+from yandexcloud._auth_fabric import set_up_yc_api_endpoint
+from yandexcloud._backoff import (
+ backoff_exponential_with_jitter,
+ backoff_linear_with_jitter,
+ default_backoff,
+)
+from yandexcloud._retry_interceptor import RetryInterceptor
+from yandexcloud._sdk import SDK
+
+__version__ = "0.0.2"
diff --git a/contrib/python/yandexcloud/yandexcloud/_auth_fabric.py b/contrib/python/yandexcloud/yandexcloud/_auth_fabric.py
new file mode 100644
index 0000000000..135dab4020
--- /dev/null
+++ b/contrib/python/yandexcloud/yandexcloud/_auth_fabric.py
@@ -0,0 +1,145 @@
+import os
+import time
+from datetime import datetime
+from typing import Dict, Optional, Union
+
+# noinspection PyUnresolvedReferences
+# jwt package depends on cryptography
+import cryptography # noqa: F401; pylint: disable=unused-import
+import jwt
+import requests
+import six
+
+from yandex.cloud.iam.v1.iam_token_service_pb2 import CreateIamTokenRequest
+
+_MDS_ADDR = "169.254.169.254"
+_MDS_URL = "http://{}/computeMetadata/v1/instance/service-accounts/default/token"
+_MDS_HEADERS = {"Metadata-Flavor": "Google"}
+_MDS_TIMEOUT = (1.0, 1.0) # 1sec connect, 1sec read
+
+YC_API_ENDPOINT = "api.cloud.yandex.net"
+
+
+def set_up_yc_api_endpoint(endpoint: str) -> str:
+ # pylint: disable-next=global-statement
+ global YC_API_ENDPOINT
+ YC_API_ENDPOINT = endpoint
+ return YC_API_ENDPOINT
+
+
+def __validate_service_account_key(sa_key: Optional[dict]) -> bool:
+ if not isinstance(sa_key, dict):
+ raise RuntimeError(f"Invalid Service Account Key: expecting dictionary, actually got {type(sa_key)}")
+
+ obj_id = sa_key.get("id")
+ sa_id = sa_key.get("service_account_id")
+ private_key = sa_key.get("private_key")
+
+ if not obj_id:
+ raise RuntimeError("Invalid Service Account Key: missing key object id.")
+
+ if not sa_id:
+ raise RuntimeError("Invalid Service Account Key: missing service account id.")
+
+ if not private_key:
+ raise RuntimeError("Invalid Service Account Key: missing private key.")
+
+ private_key_prefix = "-----BEGIN PRIVATE KEY-----"
+ if not isinstance(private_key, six.string_types) or private_key_prefix not in private_key:
+ error_message = (
+ "Invalid Service Account Key: private key is in incorrect format."
+ f"Should start with {private_key_prefix}.\n"
+ "To obtain one you can use YC CLI: yc iam key create --output sa.json --service-account-id <id>"
+ )
+ raise RuntimeError(error_message)
+ return True
+
+
+class MetadataAuth:
+ def __init__(self, metadata_addr: str):
+ self.__metadata_addr = metadata_addr
+
+ def url(self) -> str:
+ return _MDS_URL.format(self.__metadata_addr)
+
+ def get_token(self) -> str:
+ r = requests.get(self.url(), headers=_MDS_HEADERS, timeout=_MDS_TIMEOUT)
+ r.raise_for_status()
+ response = r.json()
+ return response["access_token"]
+
+
+class TokenAuth:
+ def __init__(self, token: str):
+ self.__oauth_token = token
+
+ def get_token_request(self) -> "CreateIamTokenRequest":
+ return CreateIamTokenRequest(yandex_passport_oauth_token=self.__oauth_token)
+
+
+class ServiceAccountAuth:
+ __SECONDS_IN_HOUR = 60.0 * 60.0
+
+ def __init__(self, sa_key: Dict[str, str], endpoint: Optional[str] = None):
+ self.__sa_key = sa_key
+ self._endpoint = endpoint if endpoint is not None else YC_API_ENDPOINT
+
+ def get_token_request(self) -> "CreateIamTokenRequest":
+ return CreateIamTokenRequest(jwt=self.__prepare_request(self._endpoint))
+
+ def __prepare_request(self, endpoint: str) -> str:
+ now = time.time()
+ now_utc = datetime.utcfromtimestamp(now)
+ exp_utc = datetime.utcfromtimestamp(now + self.__SECONDS_IN_HOUR)
+ url = f"https://iam.{endpoint}/iam/v1/tokens"
+ payload = {
+ "iss": self.__sa_key["service_account_id"],
+ "aud": url,
+ "iat": now_utc,
+ "exp": exp_utc,
+ }
+
+ headers = {
+ "typ": "JWT",
+ "alg": "PS256",
+ "kid": self.__sa_key["id"],
+ }
+
+ return jwt.encode(payload, self.__sa_key["private_key"], algorithm="PS256", headers=headers)
+
+
+class IamTokenAuth:
+ def __init__(self, iam_token: str):
+ self.__iam_token = iam_token
+
+ def get_token(self) -> str:
+ return self.__iam_token
+
+
+def get_auth_token_requester(
+ token: Optional[str] = None,
+ service_account_key: Optional[dict] = None,
+ iam_token: Optional[str] = None,
+ metadata_addr: Optional[str] = None,
+ endpoint: Optional[str] = None,
+) -> Union["MetadataAuth", "TokenAuth", "IamTokenAuth", "ServiceAccountAuth"]:
+ if endpoint is None:
+ endpoint = YC_API_ENDPOINT
+ auth_methods = [("token", token), ("service_account_key", service_account_key), ("iam_token", iam_token)]
+ auth_methods = [(auth_type, value) for auth_type, value in auth_methods if value is not None]
+
+ if len(auth_methods) == 0:
+ metadata_addr = metadata_addr if metadata_addr is not None else os.environ.get("YC_METADATA_ADDR", _MDS_ADDR)
+ return MetadataAuth(metadata_addr)
+
+ if len(auth_methods) > 1:
+ raise RuntimeError(f"Conflicting API credentials properties are set: {[auth[0] for auth in auth_methods]}.")
+
+ if token is not None:
+ return TokenAuth(token=token)
+ if iam_token is not None:
+ return IamTokenAuth(iam_token)
+ if service_account_key is not None and __validate_service_account_key(service_account_key):
+ return ServiceAccountAuth(service_account_key, endpoint)
+
+ raise RuntimeError("Unknown auth method")
diff --git a/contrib/python/yandexcloud/yandexcloud/_auth_plugin.py b/contrib/python/yandexcloud/yandexcloud/_auth_plugin.py
new file mode 100644
index 0000000000..46e724b31f
--- /dev/null
+++ b/contrib/python/yandexcloud/yandexcloud/_auth_plugin.py
@@ -0,0 +1,96 @@
+from datetime import datetime
+from typing import TYPE_CHECKING, Callable, Optional, Tuple, Union
+
+import grpc
+from six.moves.urllib.parse import urlparse
+
+from yandex.cloud.iam.v1.iam_token_service_pb2_grpc import IamTokenServiceStub
+
+if TYPE_CHECKING:
+ from yandex.cloud.iam.v1.iam_token_service_pb2 import CreateIamTokenResponse
+ from yandexcloud._auth_fabric import (
+ IamTokenAuth,
+ MetadataAuth,
+ ServiceAccountAuth,
+ TokenAuth,
+ )
+
+
+TIMEOUT_SECONDS = 20
+
+
+class Credentials(grpc.AuthMetadataPlugin):
+ def __init__(
+ self,
+ token_requester: Union["MetadataAuth", "TokenAuth", "IamTokenAuth", "ServiceAccountAuth"],
+ lazy_channel: Callable[[], "grpc.Channel"],
+ ):
+ # pylint: disable=super-init-not-called
+ self.__token_requester = token_requester
+ self._lazy_channel = lazy_channel
+ self._channel: Optional[grpc.Channel] = None
+ self._cached_iam_token: str = ""
+ self._iam_token_timestamp: Optional[datetime] = None
+
+ def __call__(self, context: "grpc.AuthMetadataContext", callback: "grpc.AuthMetadataPluginCallback") -> None:
+ try:
+ return self._call(context, callback)
+ except Exception as exception: # pylint: disable=broad-except
+ callback(tuple(), exception)
+ return None
+
+ def _call(self, context: "grpc.AuthMetadataContext", callback: "grpc.AuthMetadataPluginCallback") -> None:
+ u = urlparse(context.service_url)
+ if u.path in (
+ "/yandex.cloud.iam.v1.IamTokenService",
+ "/yandex.cloud.endpoint.ApiEndpointService",
+ ):
+ callback(tuple(), None)
+ return
+
+ if self._channel is None:
+ self._channel = self._lazy_channel()
+
+ if not self._fresh():
+ get_token = getattr(self.__token_requester, "get_token", None)
+ if callable(get_token):
+ self._cached_iam_token = get_token()
+ self._iam_token_timestamp = datetime.now()
+ callback(self._metadata(), None)
+ return
+
+ get_token_request = getattr(self.__token_requester, "get_token_request", None)
+ if callable(get_token_request):
+ token_future = IamTokenServiceStub(self._channel).Create.future(get_token_request())
+ token_future.add_done_callback(self.create_done_callback(callback))
+ return
+
+ callback(self._metadata(), None)
+
+ def create_done_callback(self, callback: "grpc.AuthMetadataPluginCallback") -> Callable[["grpc.Future"], None]:
+ def done_callback(future: "grpc.Future") -> None:
+ try:
+ resp = future.result()
+ except Exception as exception: # pylint: disable=broad-except
+ callback(tuple(), exception)
+ else:
+ self._save_token(resp)
+ callback(self._metadata(), None)
+
+ return done_callback
+
+ def _metadata(self) -> Tuple[Tuple[str, str]]:
+ metadata = (("authorization", f"Bearer {self._cached_iam_token}"),)
+ return metadata
+
+ def _save_token(self, resp: "CreateIamTokenResponse") -> None:
+ self._cached_iam_token = resp.iam_token
+ self._iam_token_timestamp = datetime.now()
+
+ def _fresh(self) -> bool:
+ if self._cached_iam_token == "":
+ return False
+ if self._iam_token_timestamp is None:
+ return False
+ diff = datetime.now() - self._iam_token_timestamp
+ return diff.total_seconds() < TIMEOUT_SECONDS
diff --git a/contrib/python/yandexcloud/yandexcloud/_backoff.py b/contrib/python/yandexcloud/yandexcloud/_backoff.py
new file mode 100644
index 0000000000..0ed4bdd9e4
--- /dev/null
+++ b/contrib/python/yandexcloud/yandexcloud/_backoff.py
@@ -0,0 +1,45 @@
+import random
+from typing import Callable
+
+
+def backoff_linear_with_jitter(wait_time: float, jitter: float) -> Callable[[int], float]:
+ def func(attempt: int) -> float: # pylint: disable=unused-argument
+ multiplier = jitter * (random.random() * 2 - 1)
+ return wait_time * (1 + multiplier)
+
+ return func
+
+
+def backoff_exponential_with_jitter(base: float, cap: float) -> Callable[[int], float]:
+ def func(attempt: int) -> float: # pylint: disable=unused-argument
+ try:
+ res = (2**attempt) * base * random.random()
+ except OverflowError:
+ return cap
+
+ if res > cap:
+ return cap
+
+ return res
+
+ return func
+
+
+def backoff_exponential_jittered_min_interval(base: float = 0.05, cap: float = 60) -> Callable[[int], float]:
+ def func(attempt: int) -> float: # pylint: disable=unused-argument
+ try:
+ base_interval = (2**attempt) * base
+ res = base_interval / 2 + base_interval * random.random()
+ except OverflowError:
+ return cap
+
+ if res > cap:
+ return cap
+
+ return res
+
+ return func
+
+
+def default_backoff() -> Callable[[int], float]:
+ return backoff_exponential_with_jitter(0.05, 60)
diff --git a/contrib/python/yandexcloud/yandexcloud/_channels.py b/contrib/python/yandexcloud/yandexcloud/_channels.py
new file mode 100644
index 0000000000..73aa3f9004
--- /dev/null
+++ b/contrib/python/yandexcloud/yandexcloud/_channels.py
@@ -0,0 +1,114 @@
+import logging
+from importlib.metadata import PackageNotFoundError, version
+from typing import Dict, Optional
+
+import grpc
+
+from yandex.cloud.endpoint.api_endpoint_service_pb2 import ListApiEndpointsRequest
+from yandex.cloud.endpoint.api_endpoint_service_pb2_grpc import ApiEndpointServiceStub
+from yandexcloud import _auth_plugin
+from yandexcloud._auth_fabric import YC_API_ENDPOINT, get_auth_token_requester
+
+try:
+ VERSION = version("yandexcloud")
+except PackageNotFoundError:
+ VERSION = "0.0.0"
+
+SDK_USER_AGENT = f"yandex-cloud-python-sdk/{VERSION}"
+logger = logging.getLogger(__name__)
+
+
+class Channels:
+ def __init__(
+ self,
+ client_user_agent: Optional[str] = None,
+ endpoints: Optional[Dict[str, str]] = None,
+ token: Optional[str] = None,
+ iam_token: Optional[str] = None,
+ endpoint: Optional[str] = None,
+ service_account_key: Optional[Dict[str, str]] = None,
+ root_certificates: Optional[bytes] = None,
+ private_key: Optional[bytes] = None,
+ certificate_chain: Optional[bytes] = None,
+ **_: str,
+ ) -> None:
+ self._channel_creds = grpc.ssl_channel_credentials(
+ root_certificates=root_certificates,
+ private_key=private_key,
+ certificate_chain=certificate_chain,
+ )
+ self._endpoint = endpoint if endpoint is not None else YC_API_ENDPOINT
+ self._token_requester = get_auth_token_requester(
+ token=token,
+ service_account_key=service_account_key,
+ iam_token=iam_token,
+ endpoint=self._endpoint,
+ )
+
+ self._client_user_agent = client_user_agent
+ self._config_endpoints = endpoints if endpoints is not None else {}
+ self._endpoints: Optional[Dict[str, str]] = None
+ self.channel_options = tuple(
+ ("grpc.primary_user_agent", user_agent)
+ for user_agent in [self._client_user_agent, SDK_USER_AGENT]
+ if user_agent is not None
+ )
+
+ def channel(self, service: str, endpoint: Optional[str] = None, insecure: bool = False) -> grpc.Channel:
+ if endpoint:
+ logger.info("Using provided service %s endpoint %s", service, endpoint)
+ if insecure:
+ logger.info("Insecure option is ON, no IAM endpoint used for verification")
+ return grpc.insecure_channel(endpoint, options=self.channel_options)
+ logger.info("Insecure option is OFF,IAM endpoint %s used for verification")
+ creds: grpc.ChannelCredentials = self._get_creds(self.endpoints["iam"])
+ return grpc.secure_channel(endpoint, creds, options=self.channel_options)
+ if service not in self._config_endpoints and insecure:
+ logger.warning(
+ "Unable to use insecure option for default {%s} service endpoint.\n"
+ "Option is ignored. To enable it override endpoint.",
+ service,
+ )
+ elif insecure:
+ logger.info("Insecure option is ON, no IAM endpoint used for verification")
+ return grpc.insecure_channel(self.endpoints[service], options=self.channel_options)
+
+ logger.info(
+ "Using endpoints from configuration, IAM %s, %s %s",
+ self.endpoints["iam"],
+ service,
+ self.endpoints[service],
+ )
+
+ creds = self._get_creds(self.endpoints["iam"])
+ if service not in self.endpoints:
+ raise RuntimeError(f"Unknown service: {service}")
+ return grpc.secure_channel(self.endpoints[service], creds, options=self.channel_options)
+
+ @property
+ def endpoints(self) -> Dict[str, str]:
+ if self._endpoints is None:
+ self._endpoints = self._get_endpoints()
+ for id_, address in self._config_endpoints.items():
+ logger.debug("Override service %s, endpoint %s", id_, address)
+ if id_ == "iam":
+ logger.warning(
+ "Be aware `iam` service endpoint is overridden. "
+ "That can produce unexpected results in SDK calls."
+ )
+ self._endpoints[id_] = address
+ return self._endpoints
+
+ def _get_endpoints(self) -> Dict[str, str]:
+ unauthenticated_channel = grpc.secure_channel(self._endpoint, self._channel_creds, options=self.channel_options)
+ endpoint_service = ApiEndpointServiceStub(unauthenticated_channel)
+ resp = endpoint_service.List(ListApiEndpointsRequest())
+ return {endpoint.id: endpoint.address for endpoint in resp.endpoints}
+
+ def _get_creds(self, iam_endpoint: str) -> grpc.ChannelCredentials:
+ plugin = _auth_plugin.Credentials(
+ self._token_requester, lambda: grpc.secure_channel(iam_endpoint, creds, options=self.channel_options)
+ )
+ call_creds = grpc.metadata_call_credentials(plugin)
+ creds = grpc.composite_channel_credentials(self._channel_creds, call_creds)
+ return creds
diff --git a/contrib/python/yandexcloud/yandexcloud/_helpers.py b/contrib/python/yandexcloud/yandexcloud/_helpers.py
new file mode 100644
index 0000000000..0df9b6c03c
--- /dev/null
+++ b/contrib/python/yandexcloud/yandexcloud/_helpers.py
@@ -0,0 +1,69 @@
+# -*- coding: utf-8 -*-
+from typing import TYPE_CHECKING, Optional
+
+from yandex.cloud.iam.v1.service_account_service_pb2 import ListServiceAccountsRequest
+from yandex.cloud.iam.v1.service_account_service_pb2_grpc import (
+ ServiceAccountServiceStub,
+)
+from yandex.cloud.vpc.v1.network_service_pb2 import ListNetworksRequest
+from yandex.cloud.vpc.v1.network_service_pb2_grpc import NetworkServiceStub
+from yandex.cloud.vpc.v1.subnet_service_pb2 import ListSubnetsRequest
+from yandex.cloud.vpc.v1.subnet_service_pb2_grpc import SubnetServiceStub
+
+if TYPE_CHECKING:
+ from yandexcloud._sdk import SDK
+
+
+class Helpers:
+ def __init__(self, sdk: "SDK"):
+ self.sdk = sdk
+
+ def find_service_account_id(self, folder_id: str) -> str:
+ """
+ Get service account id in case the folder has the only one service account
+
+ :param folder_id: ID of the folder
+ :return ID of the service account
+ """
+ service = self.sdk.client(ServiceAccountServiceStub)
+ service_accounts = service.List(ListServiceAccountsRequest(folder_id=folder_id)).service_accounts
+ if len(service_accounts) == 1:
+ return service_accounts[0].id
+ if len(service_accounts) == 0:
+ raise RuntimeError(f"There are no service accounts in folder {folder_id}, please create it.")
+ raise RuntimeError(f"There are more than one service account in folder {folder_id}, please specify it")
+
+ def find_network_id(self, folder_id: str) -> str:
+ """
+ Get ID of the first network in folder
+
+ :param folder_id: ID of the folder
+ :return ID of the network
+ """
+ networks = self.sdk.client(NetworkServiceStub).List(ListNetworksRequest(folder_id=folder_id)).networks
+ if not networks:
+ raise RuntimeError(f"No networks in folder: {folder_id}")
+ if len(networks) > 1:
+ raise RuntimeError("There are more than one network in folder {folder_id}, please specify it")
+ return networks[0].id
+
+ def find_subnet_id(self, folder_id: str, zone_id: str, network_id: Optional[str] = None) -> str:
+ """
+ Get ID of the subnetwork of specified network in specified availability zone
+
+ :param folder_id: ID of the folder
+ :param zone_id: ID of the availability zone
+ :param network_id: ID of the network
+ :return ID of the subnetwork
+ """
+ subnet_service = self.sdk.client(SubnetServiceStub)
+ subnets = subnet_service.List(ListSubnetsRequest(folder_id=folder_id)).subnets
+ if network_id:
+ applicable = [s for s in subnets if s.zone_id == zone_id and s.network_id == network_id]
+ else:
+ applicable = [s for s in subnets if s.zone_id == zone_id]
+ if len(applicable) == 1:
+ return applicable[0].id
+ if len(applicable) == 0:
+ raise RuntimeError(f"There are no subnets in {zone_id} zone, please create it.")
+ raise RuntimeError(f"There are more than one subnet in {zone_id} zone, please specify it")
diff --git a/contrib/python/yandexcloud/yandexcloud/_operation_waiter.py b/contrib/python/yandexcloud/yandexcloud/_operation_waiter.py
new file mode 100644
index 0000000000..6c321e235c
--- /dev/null
+++ b/contrib/python/yandexcloud/yandexcloud/_operation_waiter.py
@@ -0,0 +1,128 @@
+import logging
+import time
+from datetime import datetime
+from typing import TYPE_CHECKING, Optional, Type, Union
+
+import grpc
+from google.protobuf.empty_pb2 import Empty
+
+from yandex.cloud.operation.operation_service_pb2 import GetOperationRequest
+from yandex.cloud.operation.operation_service_pb2_grpc import OperationServiceStub
+from yandexcloud._backoff import backoff_exponential_jittered_min_interval
+from yandexcloud._retry_interceptor import RetryInterceptor
+from yandexcloud.operations import OperationError, OperationResult
+
+if TYPE_CHECKING:
+ import google.protobuf.message
+
+ from yandex.cloud.operation.operation_pb2 import Operation
+ from yandexcloud._sdk import SDK
+
+
+def operation_waiter(sdk: "SDK", operation_id: str, timeout: Optional[float]) -> "OperationWaiter":
+ retriable_codes = (
+ grpc.StatusCode.UNAVAILABLE,
+ grpc.StatusCode.RESOURCE_EXHAUSTED,
+ grpc.StatusCode.INTERNAL,
+ )
+ # withstand server downtime for ~3.4 minutes with an exponential backoff
+ retry_interceptor = RetryInterceptor(
+ max_retry_count=13,
+ per_call_timeout=30,
+ back_off_func=backoff_exponential_jittered_min_interval(),
+ retriable_codes=retriable_codes,
+ )
+ operation_service = sdk.client(
+ OperationServiceStub,
+ interceptor=retry_interceptor,
+ )
+ return OperationWaiter(operation_id, operation_service, timeout)
+
+
+def wait_for_operation(sdk: "SDK", operation_id: str, timeout: Optional[float]) -> Optional["Operation"]:
+ waiter = operation_waiter(sdk, operation_id, timeout)
+ for _ in waiter:
+ time.sleep(1)
+ return waiter.operation
+
+
+def get_operation_result(
+ sdk: "SDK",
+ operation: "Operation",
+ response_type: Optional[Type["google.protobuf.message.Message"]] = None,
+ meta_type: Optional[Type["google.protobuf.message.Message"]] = None,
+ timeout: Optional[float] = None,
+ logger: Optional[logging.Logger] = None,
+) -> Union["OperationResult", "OperationError"]:
+ if not logger:
+ logger = logging.getLogger()
+ logger.addHandler(logging.NullHandler())
+ operation_result = OperationResult(operation)
+ created_at = datetime.fromtimestamp(operation.created_at.seconds)
+ message = (
+ "Running Yandex.Cloud operation. ID: {id}. "
+ "Description: {description}. Created at: {created_at}. "
+ "Created by: {created_by}."
+ )
+ message = message.format(
+ id=operation.id,
+ description=operation.description,
+ created_at=created_at,
+ created_by=operation.created_by,
+ )
+ if meta_type and meta_type is not Empty:
+ unpacked_meta = meta_type()
+ operation.metadata.Unpack(unpacked_meta)
+ operation_result.meta = unpacked_meta
+ message += f" Meta: {unpacked_meta}."
+ logger.info(message)
+ result = wait_for_operation(sdk, operation.id, timeout=timeout)
+ if result is None:
+ return OperationError(message="Unexpected operation result", operation_result=OperationResult(operation))
+ if result.error and result.error.code:
+ error_message = (
+ "Error Yandex.Cloud operation. ID: {id}. Error code: {code}. Details: {details}. Message: {message}."
+ )
+ error_message = error_message.format(
+ id=result.id,
+ code=result.error.code,
+ details=result.error.details,
+ message=result.error.message,
+ )
+ logger.error(error_message)
+ raise OperationError(message=error_message, operation_result=OperationResult(operation))
+
+ log_message = f"Done Yandex.Cloud operation. ID: {operation.id}."
+ if response_type and response_type is not Empty:
+ unpacked_response = response_type()
+ result.response.Unpack(unpacked_response)
+ operation_result.response = unpacked_response
+ log_message += f" Response: {unpacked_response}."
+ logger.info(log_message)
+ return operation_result
+
+
+class OperationWaiter:
+ def __init__(self, operation_id: str, operation_service: "OperationServiceStub", timeout: Optional[float] = None):
+ self.__operation: Optional["Operation"] = None
+ self.__operation_id = operation_id
+ self.__operation_service = operation_service
+ self.__deadline = time.time() + timeout if timeout else None
+
+ @property
+ def operation(self) -> Optional["Operation"]:
+ return self.__operation
+
+ @property
+ def done(self) -> bool:
+ self.__operation = self.__operation_service.Get(GetOperationRequest(operation_id=self.__operation_id))
+ return self.__operation is not None and self.__operation.done
+
+ def __iter__(self) -> "OperationWaiter":
+ return self
+
+ def __next__(self) -> None:
+ if self.done or self.__deadline is not None and time.time() >= self.__deadline:
+ raise StopIteration()
+
+ next = __next__ # for Python 2
diff --git a/contrib/python/yandexcloud/yandexcloud/_retry_interceptor.py b/contrib/python/yandexcloud/yandexcloud/_retry_interceptor.py
new file mode 100644
index 0000000000..d3261f0113
--- /dev/null
+++ b/contrib/python/yandexcloud/yandexcloud/_retry_interceptor.py
@@ -0,0 +1,208 @@
+import collections
+import time
+import uuid
+from typing import Callable, Iterable, Optional
+
+import grpc
+
+
+class _ClientCallDetails(
+ collections.namedtuple(
+ "_ClientCallDetails", ("method", "timeout", "metadata", "credentials", "wait_for_ready", "compression")
+ ),
+ grpc.ClientCallDetails,
+):
+ pass
+
+
+class _RetryCall(Exception):
+ pass
+
+
+class RetryInterceptor(grpc.UnaryUnaryClientInterceptor):
+ """RetryInterceptor implements grpc retries.
+ It supports retries quantity, list of retriable codes, backoff function,
+ per retry call timeout, and writing retry attempt to call metadata.
+ Important nodes:
+ 1. If default parameters are used, no retries will be executed.
+ 2. It will always add idempotency token to grpc call metadata, if one is not already present.
+ 3. Negative max_retry_count parameter will result in INFINITE retries.
+ 4. DEADLINE_EXCEEDED and CANCELLED are not retriable codes.
+ 5. Default retriable codes are UNAVAILABLE and RESOURCE_EXHAUSTED.
+ 6. Backoff function is called with retry attempt counter and should return sleep time in seconds (float).
+ """
+
+ _DEFAULT_RETRIABLE_CODES = (
+ grpc.StatusCode.UNAVAILABLE,
+ grpc.StatusCode.RESOURCE_EXHAUSTED,
+ )
+ _NON_RETRIABLE_CODES = [grpc.StatusCode.CANCELLED, grpc.StatusCode.DEADLINE_EXCEEDED]
+ _IDEMPOTENCY_TOKEN_METADATA_KEY = "idempotency-key"
+ _ATTEMPT_METADATA_KEY = "x-retry-attempt"
+
+ def __init__(
+ self,
+ max_retry_count: int = 0,
+ retriable_codes: Iterable["grpc.StatusCode"] = _DEFAULT_RETRIABLE_CODES,
+ add_retry_count_to_header: bool = False,
+ back_off_func: Optional[Callable[[int], float]] = None,
+ per_call_timeout: Optional[float] = None,
+ ):
+ # pylint: disable=super-init-not-called
+ self.__max_retry_count = max_retry_count
+ self.__retriable_codes = retriable_codes
+ self.__add_retry_count_to_header = add_retry_count_to_header
+ self.__back_off_func = back_off_func
+ self.__per_call_timeout = per_call_timeout
+
+ def intercept_unary_unary(
+ self,
+ continuation: Callable[["grpc.ClientCallDetails", "grpc.TRequest"], "grpc.CallFuture[grpc.TResponse]"],
+ client_call_details: "grpc.ClientCallDetails",
+ request: "grpc.TRequest",
+ ) -> "grpc.CallFuture[grpc.TResponse]":
+ client_call_details = self.__add_idempotency_token(client_call_details)
+
+ attempt = 0
+ deadline = self.__deadline(client_call_details.timeout)
+
+ while True:
+ try:
+ return self.__grpc_call(attempt, deadline, continuation, client_call_details, request)
+ except _RetryCall:
+ attempt += 1
+
+ def __wait_backoff(self, attempt: int, deadline: Optional[float]) -> None:
+ if self.__back_off_func is None:
+ return
+
+ backoff_timeout = self.__back_off_func(attempt)
+
+ if deadline is not None:
+ deadline_timeout = deadline - time.time()
+
+ if backoff_timeout > deadline_timeout: # pylint: disable=consider-using-min-builtin
+ backoff_timeout = deadline_timeout
+
+ if backoff_timeout > 0.0:
+ time.sleep(backoff_timeout)
+
+ @staticmethod
+ def __deadline(timeout: Optional[float]) -> Optional[float]:
+ return time.time() + timeout if timeout is not None else None
+
+ def __is_retriable(self, error: "grpc.StatusCode") -> bool:
+ if error in self._NON_RETRIABLE_CODES:
+ return False
+
+ if error in self.__retriable_codes:
+ return True
+
+ return False
+
+ @staticmethod
+ def __min_deadline(d1: Optional[float], d2: Optional[float]) -> Optional[float]:
+ if d2 is None and d1 is None:
+ return None
+ if d1 is None:
+ return d2
+
+ if d2 is None:
+ return d1
+
+ return min(d1, d2)
+
+ def __grpc_call(
+ self,
+ attempt: int,
+ deadline: Optional[float],
+ continuation: Callable[["grpc.ClientCallDetails", "grpc.TRequest"], "grpc.CallFuture[grpc.TResponse]"],
+ client_call_details: "grpc.ClientCallDetails",
+ request: "grpc.TRequest",
+ ) -> "grpc.CallFuture[grpc.TResponse]":
+ if attempt > 0:
+ if self.__add_retry_count_to_header:
+ client_call_details = self.__append_retry_attempt_header(client_call_details, attempt)
+
+ call_deadline = self.__deadline(self.__per_call_timeout)
+ call_deadline = self.__min_deadline(deadline, call_deadline)
+
+ if call_deadline is not None:
+ client_call_details = self.__adjust_timeout(client_call_details, call_deadline)
+
+ def retry() -> None:
+ self.__wait_backoff(attempt, deadline)
+ raise _RetryCall()
+
+ try:
+ result = continuation(client_call_details, request)
+ if isinstance(result, grpc.RpcError): # type: ignore
+ raise result
+ return result
+ except grpc.RpcError as error:
+ # no retries left
+ if 0 <= self.__max_retry_count <= attempt:
+ raise
+
+ err_code = error.code() # pylint: disable=no-member
+ if err_code == grpc.StatusCode.DEADLINE_EXCEEDED:
+ # if there is no per_call_timeout, or it is original deadline -> abort, otherwise, retry call.
+ if self.__per_call_timeout is None or deadline is not None and deadline < time.time():
+ raise
+
+ retry()
+
+ if not self.__is_retriable(err_code):
+ raise
+
+ retry()
+ raise RuntimeError("Unexpected behavior")
+
+ @staticmethod
+ def __adjust_timeout(client_call_details: "grpc.ClientCallDetails", deadline: float) -> "grpc.ClientCallDetails":
+ timeout = max(deadline - time.time(), 0.0)
+ return _ClientCallDetails(
+ client_call_details.method,
+ timeout,
+ client_call_details.metadata,
+ client_call_details.credentials,
+ getattr(client_call_details, "wait_for_ready", None),
+ getattr(client_call_details, "compression", None),
+ )
+
+ def __add_idempotency_token(self, client_call_details: "grpc.ClientCallDetails") -> "grpc.ClientCallDetails":
+ return self.__append_metadata(client_call_details, self._IDEMPOTENCY_TOKEN_METADATA_KEY, str(uuid.uuid4()))
+
+ def __append_retry_attempt_header(
+ self, client_call_details: "grpc.ClientCallDetails", attempt: int
+ ) -> "grpc.ClientCallDetails":
+ return self.__append_metadata(client_call_details, self._ATTEMPT_METADATA_KEY, str(attempt), force=True)
+
+ @staticmethod
+ def __append_metadata(
+ client_call_details: "grpc.ClientCallDetails", header: str, value: str, force: bool = False
+ ) -> "grpc.ClientCallDetails":
+ metadata = []
+ if client_call_details.metadata is not None:
+ metadata = list(client_call_details.metadata)
+
+ if not force:
+ # Do not add value, if there is already one.
+ for item in metadata:
+ if item[0] == header:
+ return client_call_details
+
+ metadata.append(
+ (
+ header,
+ value,
+ )
+ )
+ return _ClientCallDetails(
+ client_call_details.method,
+ client_call_details.timeout,
+ metadata,
+ client_call_details.credentials,
+ getattr(client_call_details, "wait_for_ready", None),
+ getattr(client_call_details, "compression", None),
+ )
diff --git a/contrib/python/yandexcloud/yandexcloud/_sdk.py b/contrib/python/yandexcloud/yandexcloud/_sdk.py
new file mode 100644
index 0000000000..f13c27697f
--- /dev/null
+++ b/contrib/python/yandexcloud/yandexcloud/_sdk.py
@@ -0,0 +1,193 @@
+import inspect
+from typing import TYPE_CHECKING, Any, Dict, Optional, Type, Union
+
+import grpc
+
+from yandexcloud import _channels, _helpers, _operation_waiter
+from yandexcloud._backoff import backoff_exponential_with_jitter
+from yandexcloud._retry_interceptor import RetryInterceptor
+from yandexcloud._wrappers import Wrappers
+
+if TYPE_CHECKING:
+ import logging
+
+ import google.protobuf.message
+
+ from yandex.cloud.operation.operation_pb2 import Operation
+ from yandexcloud._operation_waiter import OperationWaiter
+ from yandexcloud.operations import OperationError, OperationResult
+
+
+class SDK:
+ def __init__(
+ self,
+ interceptor: Union[
+ grpc.UnaryUnaryClientInterceptor,
+ grpc.UnaryStreamClientInterceptor,
+ grpc.StreamUnaryClientInterceptor,
+ grpc.StreamStreamClientInterceptor,
+ None,
+ ] = None,
+ user_agent: Optional[str] = None,
+ endpoints: Optional[Dict[str, str]] = None,
+ token: Optional[str] = None,
+ iam_token: Optional[str] = None,
+ endpoint: Optional[str] = None,
+ service_account_key: Optional[Dict[str, str]] = None,
+ root_certificates: Optional[bytes] = None,
+ private_key: Optional[bytes] = None,
+ certificate_chain: Optional[bytes] = None,
+ **kwargs: str,
+ ):
+ """
+ API entry-point object.
+
+ :param interceptor: GRPC interceptor to be used instead of default RetryInterceptor
+ :param user_agent: String to prepend User-Agent metadata header for all GRPC requests made via SDK object
+ :param endpoints: Dict with services endpoints overrides. Example: {'vpc': 'new.vpc.endpoint:443'}
+
+ """
+ self._channels = _channels.Channels(
+ user_agent,
+ endpoints,
+ token,
+ iam_token,
+ endpoint,
+ service_account_key,
+ root_certificates,
+ private_key,
+ certificate_chain,
+ **kwargs,
+ )
+ if interceptor is None:
+ interceptor = RetryInterceptor(
+ max_retry_count=5,
+ per_call_timeout=30,
+ back_off_func=backoff_exponential_with_jitter(1, 30),
+ )
+ self._default_interceptor = interceptor
+ self.helpers = _helpers.Helpers(self)
+ self.wrappers = Wrappers(self)
+
+ def client(
+ self,
+ stub_ctor: Type,
+ interceptor: Union[
+ grpc.UnaryUnaryClientInterceptor,
+ grpc.UnaryStreamClientInterceptor,
+ grpc.StreamUnaryClientInterceptor,
+ grpc.StreamStreamClientInterceptor,
+ None,
+ ] = None,
+ endpoint: Optional[str] = None,
+ insecure: bool = False,
+ ) -> Any:
+ service = _service_for_ctor(stub_ctor)
+ channel = self._channels.channel(service, endpoint, insecure)
+ if interceptor is not None:
+ channel = grpc.intercept_channel(channel, interceptor)
+ elif self._default_interceptor is not None:
+ channel = grpc.intercept_channel(channel, self._default_interceptor)
+ return stub_ctor(channel)
+
+ def waiter(self, operation_id: str, timeout: Optional[float] = None) -> "OperationWaiter":
+ return _operation_waiter.operation_waiter(self, operation_id, timeout)
+
+ def wait_operation_and_get_result(
+ self,
+ operation: "Operation",
+ response_type: Optional[Type["google.protobuf.message.Message"]] = None,
+ meta_type: Optional[Type["google.protobuf.message.Message"]] = None,
+ timeout: Optional[float] = None,
+ logger: Optional["logging.Logger"] = None,
+ ) -> Union["OperationResult", "OperationError"]:
+ return _operation_waiter.get_operation_result(self, operation, response_type, meta_type, timeout, logger)
+
+ def create_operation_and_get_result(
+ self,
+ request: Type["google.protobuf.message.Message"],
+ service: Any,
+ method_name: str,
+ response_type: Optional[Type["google.protobuf.message.Message"]] = None,
+ meta_type: Optional[Type["google.protobuf.message.Message"]] = None,
+ timeout: Optional[float] = None,
+ logger: Optional["logging.Logger"] = None,
+ ) -> Union["OperationResult", "OperationError"]:
+ operation = getattr(self.client(service), method_name)(request)
+ return self.wait_operation_and_get_result(
+ operation,
+ response_type=response_type,
+ meta_type=meta_type,
+ timeout=timeout,
+ logger=logger,
+ )
+
+
+def _service_for_ctor(stub_ctor: Any) -> str:
+ m = inspect.getmodule(stub_ctor)
+ if m is not None:
+ name = m.__name__
+ if not name.startswith("yandex.cloud"):
+ raise RuntimeError(f"Not a yandex.cloud service {stub_ctor}")
+
+ for k, v in _supported_modules:
+ if name.startswith(k):
+ return v
+
+ raise RuntimeError(f"Unknown service {stub_ctor}")
+
+
+_supported_modules = [
+ ("yandex.cloud.ai.foundation_models", "ai-foundation-models"),
+ ("yandex.cloud.ai.llm", "ai-llm"),
+ ("yandex.cloud.ai.stt", "ai-stt"),
+ ("yandex.cloud.ai.translate", "ai-translate"),
+ ("yandex.cloud.ai.tts", "ai-speechkit"),
+ ("yandex.cloud.ai.vision", "ai-vision"),
+ ("yandex.cloud.apploadbalancer", "alb"),
+ ("yandex.cloud.billing", "billing"),
+ ("yandex.cloud.cdn", "cdn"),
+ ("yandex.cloud.certificatemanager.v1.certificate_content_service", "certificate-manager-data"),
+ ("yandex.cloud.certificatemanager", "certificate-manager"),
+ ("yandex.cloud.compute", "compute"),
+ ("yandex.cloud.containerregistry", "container-registry"),
+ ("yandex.cloud.dataproc.manager", "dataproc-manager"),
+ ("yandex.cloud.dataproc", "dataproc"),
+ ("yandex.cloud.datatransfer", "datatransfer"),
+ ("yandex.cloud.dns", "dns"),
+ ("yandex.cloud.endpoint", "endpoint"),
+ ("yandex.cloud.iam", "iam"),
+ ("yandex.cloud.iot.devices", "iot-devices"),
+ ("yandex.cloud.k8s", "managed-kubernetes"),
+ ("yandex.cloud.kms", "kms"),
+ ("yandex.cloud.kms.v1.symmetric_crypto_service", "kms-crypto"),
+ ("yandex.cloud.loadbalancer", "load-balancer"),
+ ("yandex.cloud.loadtesting", "loadtesting"),
+ ("yandex.cloud.lockbox.v1.payload_service", "lockbox-payload"),
+ ("yandex.cloud.lockbox", "lockbox"),
+ ("yandex.cloud.logging.v1.log_ingestion_service", "log-ingestion"),
+ ("yandex.cloud.logging.v1.log_reading_service", "log-reading"),
+ ("yandex.cloud.logging", "logging"),
+ ("yandex.cloud.marketplace", "marketplace"),
+ ("yandex.cloud.mdb.clickhouse", "managed-clickhouse"),
+ ("yandex.cloud.mdb.elasticsearch", "managed-elasticsearch"),
+ ("yandex.cloud.mdb.greenplum", "managed-greenplum"),
+ ("yandex.cloud.mdb.kafka", "managed-kafka"),
+ ("yandex.cloud.mdb.mongodb", "managed-mongodb"),
+ ("yandex.cloud.mdb.mysql", "managed-mysql"),
+ ("yandex.cloud.mdb.opensearch", "managed-opensearch"),
+ ("yandex.cloud.mdb.postgresql", "managed-postgresql"),
+ ("yandex.cloud.mdb.redis", "managed-redis"),
+ ("yandex.cloud.mdb.sqlserver", "managed-sqlserver"),
+ ("yandex.cloud.operation", "operation"),
+ ("yandex.cloud.organizationmanager", "organization-manager"),
+ ("yandex.cloud.resourcemanager", "resource-manager"),
+ ("yandex.cloud.serverless.apigateway", "serverless-apigateway"),
+ ("yandex.cloud.serverless.apigateway.websocket", "apigateway-connections"),
+ ("yandex.cloud.serverless.containers", "serverless-containers"),
+ ("yandex.cloud.serverless.functions", "serverless-functions"),
+ ("yandex.cloud.serverless.triggers", "serverless-triggers"),
+ ("yandex.cloud.storage", "storage-api"),
+ ("yandex.cloud.vpc", "vpc"),
+ ("yandex.cloud.ydb", "ydb"),
+]
diff --git a/contrib/python/yandexcloud/yandexcloud/_wrappers/__init__.py b/contrib/python/yandexcloud/yandexcloud/_wrappers/__init__.py
new file mode 100644
index 0000000000..2467325a9d
--- /dev/null
+++ b/contrib/python/yandexcloud/yandexcloud/_wrappers/__init__.py
@@ -0,0 +1,15 @@
+from typing import TYPE_CHECKING
+
+from yandexcloud._wrappers.dataproc import Dataproc, InitializationAction
+
+if TYPE_CHECKING:
+ from yandexcloud._sdk import SDK
+
+
+class Wrappers:
+ def __init__(self, sdk: "SDK"):
+ # pylint: disable-next=invalid-name
+ self.Dataproc = Dataproc
+ self.Dataproc.sdk = sdk
+ # pylint: disable-next=invalid-name
+ self.InitializationAction = InitializationAction
diff --git a/contrib/python/yandexcloud/yandexcloud/_wrappers/dataproc/__init__.py b/contrib/python/yandexcloud/yandexcloud/_wrappers/dataproc/__init__.py
new file mode 100644
index 0000000000..9f75ef1674
--- /dev/null
+++ b/contrib/python/yandexcloud/yandexcloud/_wrappers/dataproc/__init__.py
@@ -0,0 +1,748 @@
+# -*- coding: utf-8 -*-
+# pylint: disable=no-member
+# mypy: ignore-errors
+import logging
+import random
+from typing import List, NamedTuple
+
+from google.protobuf.field_mask_pb2 import FieldMask
+from six import string_types
+
+import yandex.cloud.dataproc.v1.cluster_pb2 as cluster_pb
+import yandex.cloud.dataproc.v1.cluster_service_pb2 as cluster_service_pb
+import yandex.cloud.dataproc.v1.cluster_service_pb2_grpc as cluster_service_grpc_pb
+import yandex.cloud.dataproc.v1.common_pb2 as common_pb
+import yandex.cloud.dataproc.v1.job_pb2 as job_pb
+import yandex.cloud.dataproc.v1.job_service_pb2 as job_service_pb
+import yandex.cloud.dataproc.v1.job_service_pb2_grpc as job_service_grpc_pb
+import yandex.cloud.dataproc.v1.subcluster_pb2 as subcluster_pb
+import yandex.cloud.dataproc.v1.subcluster_service_pb2 as subcluster_service_pb
+import yandex.cloud.dataproc.v1.subcluster_service_pb2_grpc as subcluster_service_grpc_pb
+
+
+class InitializationAction(NamedTuple):
+ uri: str # Uri of the executable file
+ args: List[str] # Arguments to the initialization action
+ timeout: int # Execution timeout
+
+ def to_grpc(self):
+ return cluster_pb.InitializationAction(
+ uri=self.uri,
+ args=self.args,
+ timeout=self.timeout,
+ )
+
+
+class Dataproc:
+ """
+ A base hook for Yandex.Cloud Data Proc.
+
+ :param default_folder_id: ID of the Yandex.Cloud folder that will be used by default for nodes and clusters creation
+ :type default_folder_id: Optional[str]
+ :param default_public_ssh_key: SSH public key that will be placed to created Compute nodes, providing a root shell
+ :type default_public_ssh_key: Optional[str]
+ :param logger: Logger object
+ :type logger: Optional[logging.Logger]
+ :param sdk: SDK object. Normally is being set by Wrappers constructor
+ :type sdk: yandexcloud.SDK
+ """
+
+ def __init__(self, default_folder_id=None, default_public_ssh_key=None, logger=None, sdk=None):
+ self.sdk = sdk or self.sdk
+ self.log = logger
+ if not self.log:
+ self.log = logging.getLogger()
+ self.log.addHandler(logging.NullHandler())
+ self.cluster_id = None
+ self.subnet_id = None
+ self.default_folder_id = default_folder_id
+ self.default_public_ssh_key = default_public_ssh_key
+
+ def create_cluster(
+ self,
+ s3_bucket=None,
+ folder_id=None,
+ cluster_name=None,
+ cluster_description="",
+ cluster_image_version=None,
+ ssh_public_keys=None,
+ subnet_id=None,
+ services=None,
+ zone="ru-central1-b",
+ service_account_id=None,
+ masternode_resource_preset=None,
+ masternode_disk_size=None,
+ masternode_disk_type=None,
+ datanode_resource_preset=None,
+ datanode_disk_size=None,
+ datanode_disk_type=None,
+ datanode_count=None,
+ computenode_resource_preset=None,
+ computenode_disk_size=None,
+ computenode_disk_type=None,
+ computenode_count=None,
+ computenode_max_hosts_count=None,
+ computenode_measurement_duration=None,
+ computenode_warmup_duration=None,
+ computenode_stabilization_duration=None,
+ computenode_preemptible=None,
+ computenode_cpu_utilization_target=None,
+ computenode_decommission_timeout=None,
+ log_group_id=None,
+ properties=None,
+ enable_ui_proxy=False,
+ host_group_ids=None,
+ security_group_ids=None,
+ initialization_actions=None,
+ labels=None,
+ ):
+ """
+ Create Yandex.Cloud Data Proc cluster.
+
+ :param s3_bucket: Yandex.Cloud S3 bucket to store cluster logs.
+ Jobs will not work if the bicket is not specified.
+ :type s3_bucket: str
+ :param folder_id: ID of the folder in which cluster should be created.
+ :type folder_id: str
+ :param cluster_name: Cluster name. Must be unique inside the folder.
+ :type folder_id: str
+ :param cluster_description: Cluster description.
+ :type cluster_description: str
+ :param cluster_image_version: Cluster image version. Use default.
+ :type cluster_image_version: str
+ :param ssh_public_keys: List of SSH public keys that will be deployed to created compute instances.
+ :type ssh_public_keys: List[str]
+ :param subnet_id: ID of the subnetwork. All Data Proc cluster nodes will use one subnetwork.
+ :type subnet_id: str
+ :param services: List of services that will be installed to the cluster. Possible options:
+ HDFS, YARN, MAPREDUCE, HIVE, TEZ, ZOOKEEPER, HBASE, SQOOP, FLUME, SPARK, SPARK, ZEPPELIN, OOZIE
+ :type services: List[str]
+ :param zone: Availability zone to create cluster in.
+ Currently there are ru-central1-a, ru-central1-b and ru-central1-c.
+ :type zone: str
+ :param service_account_id: Service account id for the cluster.
+ Service account can be created inside the folder.
+ :type service_account_id: str
+ :param masternode_resource_preset: Resources preset (CPU+RAM configuration)
+ for the master node of the cluster.
+ :type masternode_resource_preset: str
+ :param masternode_disk_size: Masternode storage size in GiB.
+ :type masternode_disk_size: int
+ :param masternode_disk_type: Masternode storage type. Possible options: network-ssd, network-hdd.
+ :type masternode_disk_type: str
+ :param datanode_resource_preset: Resources preset (CPU+RAM configuration)
+ for the data nodes of the cluster.
+ :type datanode_resource_preset: str
+ :param datanode_disk_size: Datanodes storage size in GiB.
+ :type datanode_disk_size: int
+ :param datanode_disk_type: Datanodes storage type. Possible options: network-ssd, network-hdd.
+ :type datanode_disk_type: str
+ :param computenode_resource_preset: Resources preset (CPU+RAM configuration)
+ for the compute nodes of the cluster.
+ :type datanode_count: int
+ :param datanode_count: Number of data nodes.
+ :type computenode_resource_preset: str
+ :param computenode_disk_size: Computenodes storage size in GiB.
+ :type computenode_disk_size: int
+ :param computenode_disk_type: Computenodes storage type. Possible options: network-ssd, network-hdd.
+ :type computenode_disk_type: str
+ :type computenode_count: int
+ :param computenode_count: Number of compute nodes.
+ :type computenode_max_count: int
+ :param computenode_max_count: Maximum number of nodes of compute autoscaling subcluster.
+ :param computenode_warmup_duration: The warmup time of the instance in seconds. During this time,
+ traffic is sent to the instance, but instance metrics are not collected. In seconds.
+ :type computenode_warmup_duration: int
+ :param computenode_stabilization_duration: Minimum amount of time in seconds allotted for monitoring before
+ Instance Groups can reduce the number of instances in the group.
+ During this time, the group size doesn't decrease, even if the new metric values
+ indicate that it should. In seconds.
+ :type computenode_stabilization_duration: int
+ :param computenode_preemptible: Preemptible instances are stopped at least once every 24 hours,
+ and can be stopped at any time if their resources are needed by Compute.
+ :type computenode_preemptible: bool
+ :param computenode_cpu_utilization_target: Defines an autoscaling rule
+ based on the average CPU utilization of the instance group.
+ in percents. 10-100.
+ :type computenode_cpu_utilization_target: int
+ :param computenode_decommission_timeout: Timeout to gracefully decommission nodes during downscaling.
+ In seconds.
+ :type computenode_decommission_timeout: int
+ :param log_group_id: Id of log group to write logs. By default logs will be sent to default log group.
+ To disable cloud log sending set cluster property dataproc:disable_cloud_logging = true
+ :type log_group_id: str
+ :param properties: Properties passed to main node software, in key you need to use prefix: 'hdfs:dfs.hosts'
+ :type properties: Dict[str, str]
+ :param enable_ui_proxy: Enable UI Proxy feature for forwarding Hadoop components web interfaces
+ Docs: https://cloud.yandex.com/en-ru/docs/data-proc/concepts/ui-proxy
+ :type enable_ui_proxy: Bool
+ :param host_group_ids: Dedicated host groups to place VMs of cluster on.
+ Docs: https://cloud.yandex.com/en-ru/docs/compute/concepts/dedicated-host
+ :type host_group_ids: List[str]
+ :param security_group_ids: User security groups
+ Docs: https://cloud.yandex.com/en-ru/docs/data-proc/concepts/network#security-groups
+ :type security_group_ids: List[str]
+ :param initialization_actions: Set of init-actions to run when cluster starts
+ :type initialization_actions: List[InitializationAction]
+ :param labels: Cluster labels as key:value pairs. No more than 64 per resource.
+ :type labels: Dict[str, str]
+
+ :return: Cluster ID
+ :rtype: str
+ """
+
+ # pylint: disable=too-many-arguments
+ # pylint: disable=too-many-locals
+ # pylint: disable=too-many-branches
+
+ folder_id = folder_id or self.default_folder_id
+ if not folder_id:
+ raise RuntimeError("Folder ID must be specified to create cluster.")
+
+ if not cluster_name:
+ random_int = random.randint(0, 999)
+ cluster_name = f"dataproc-{random_int}"
+
+ if not subnet_id:
+ network_id = self.sdk.helpers.find_network_id(folder_id)
+ subnet_id = self.sdk.helpers.find_subnet_id(folder_id, zone, network_id)
+
+ if not service_account_id:
+ service_account_id = self.sdk.helpers.find_service_account_id(folder_id)
+
+ if not ssh_public_keys:
+ if self.default_public_ssh_key:
+ ssh_public_keys = (self.default_public_ssh_key,)
+ else:
+ raise RuntimeError("Public ssh keys must be specified.")
+ elif isinstance(ssh_public_keys, string_types):
+ ssh_public_keys = [ssh_public_keys]
+
+ gib = 1024**3
+ if masternode_disk_size:
+ masternode_disk_size *= gib
+ if datanode_disk_size:
+ datanode_disk_size *= gib
+ if computenode_disk_size:
+ computenode_disk_size *= gib
+ subclusters = [
+ cluster_service_pb.CreateSubclusterConfigSpec(
+ name="master",
+ role=subcluster_pb.Role.MASTERNODE,
+ resources=common_pb.Resources(
+ resource_preset_id=masternode_resource_preset,
+ disk_size=masternode_disk_size,
+ disk_type_id=masternode_disk_type,
+ ),
+ subnet_id=subnet_id,
+ hosts_count=1,
+ )
+ ]
+ if datanode_count:
+ subclusters.append(
+ cluster_service_pb.CreateSubclusterConfigSpec(
+ name="data",
+ role=subcluster_pb.Role.DATANODE,
+ resources=common_pb.Resources(
+ resource_preset_id=datanode_resource_preset,
+ disk_size=datanode_disk_size,
+ disk_type_id=datanode_disk_type,
+ ),
+ subnet_id=subnet_id,
+ hosts_count=datanode_count,
+ )
+ )
+
+ if computenode_count:
+ autoscaling_config = None
+ if computenode_max_hosts_count:
+ autoscaling_config = subcluster_pb.AutoscalingConfig(
+ max_hosts_count=computenode_max_hosts_count,
+ measurement_duration=computenode_measurement_duration,
+ warmup_duration=computenode_warmup_duration,
+ stabilization_duration=computenode_stabilization_duration,
+ preemptible=computenode_preemptible,
+ cpu_utilization_target=computenode_cpu_utilization_target,
+ decommission_timeout=computenode_decommission_timeout,
+ )
+ subclusters.append(
+ cluster_service_pb.CreateSubclusterConfigSpec(
+ name="compute",
+ role=subcluster_pb.Role.COMPUTENODE,
+ resources=common_pb.Resources(
+ resource_preset_id=computenode_resource_preset,
+ disk_size=computenode_disk_size,
+ disk_type_id=computenode_disk_type,
+ ),
+ subnet_id=subnet_id,
+ hosts_count=computenode_count,
+ autoscaling_config=autoscaling_config,
+ )
+ )
+
+ request = cluster_service_pb.CreateClusterRequest(
+ folder_id=folder_id,
+ name=cluster_name,
+ description=cluster_description,
+ config_spec=cluster_service_pb.CreateClusterConfigSpec(
+ version_id=cluster_image_version,
+ hadoop=cluster_pb.HadoopConfig(
+ services=services,
+ ssh_public_keys=ssh_public_keys,
+ properties=properties,
+ initialization_actions=(
+ initialization_actions and [action.to_grpc() for action in initialization_actions]
+ ),
+ ),
+ subclusters_spec=subclusters,
+ ),
+ zone_id=zone,
+ service_account_id=service_account_id,
+ bucket=s3_bucket,
+ ui_proxy=enable_ui_proxy,
+ host_group_ids=host_group_ids,
+ security_group_ids=security_group_ids,
+ log_group_id=log_group_id,
+ labels=labels,
+ )
+ result = self.sdk.create_operation_and_get_result(
+ request,
+ service=cluster_service_grpc_pb.ClusterServiceStub,
+ method_name="Create",
+ response_type=cluster_pb.Cluster,
+ meta_type=cluster_service_pb.CreateClusterMetadata,
+ )
+ self.cluster_id = result.response.id
+ self.subnet_id = subnet_id # pylint: disable=attribute-defined-outside-init
+ return result
+
+ def create_subcluster(
+ self,
+ subcluster_type,
+ name,
+ resource_preset=None,
+ disk_size=None,
+ disk_type=None,
+ hosts_count=None,
+ subnet_id=None,
+ cluster_id=None,
+ max_hosts_count=None,
+ measurement_duration=None,
+ warmup_duration=None,
+ stabilization_duration=None,
+ preemptible=None,
+ cpu_utilization_target=None,
+ decommission_timeout=None,
+ ):
+ """
+ Create subcluster to Yandex.Cloud Data Proc cluster.
+
+ :param name: Name of the subcluster. Must be unique in the cluster
+ :type name: str
+ :param subcluster_type: Type of the subcluster. Either "data" or "compute".
+ :type subcluster_type: str
+ :param resource_preset: Resources preset (CPU+RAM configuration) for the nodes of the cluster.
+ :type resource_preset: str
+ :param disk_size: Storage size in GiB.
+ :type disk_size: int
+ :param disk_type: Storage type. Possible options: network-ssd, network-hdd.
+ :type disk_type: str
+ :param hosts_count: Number of nodes in subcluster.
+ :type hosts_count: int
+ :param subnet_id: Subnet ID of the cluster.
+ :type subnet_id: str
+ :param cluster_id: ID of the cluster.
+ :type cluster_id: str
+ :param max_hosts_count: Upper limit for total instance autoscaling compute subcluster count.
+ :type max_hosts_count: int
+ :param measurement_duration: Time in seconds allotted for averaging metrics. In seconds.
+ :type measurement_duration: int
+ :param warmup_duration: The warmup time of the instance in seconds. During this time,
+ traffic is sent to the instance, but instance metrics are not collected. In seconds.
+ :type warmup_duration: int
+ :param stabilization_duration: Minimum amount of time in seconds allotted for monitoring before
+ Instance Groups can reduce the number of instances in the group.
+ During this time, the group size doesn't decrease, even if the new metric values
+ indicate that it should. In seconds.
+ :type stabilization_duration: int
+ :param preemptible: Preemptible instances are stopped at least once every 24 hours,
+ and can be stopped at any time if their resources are needed by Compute.
+ :type preemptible: bool
+ :param cpu_utilization_target: Defines an autoscaling rule
+ based on the average CPU utilization of the instance group.
+ in percents. 10-100.
+ :type cpu_utilization_target: int
+ :param decommission_timeout: Timeout to gracefully decommission nodes during downscaling. In seconds.
+ :type decommission_timeout: int
+ """
+ # pylint: disable=too-many-locals
+ cluster_id = cluster_id or self.cluster_id
+ if not cluster_id:
+ raise RuntimeError("Cluster id must be specified.")
+ subnet_id = subnet_id or self.subnet_id
+ if not subnet_id:
+ raise RuntimeError("Subnet ID id must be specified.")
+ subnet_id = subnet_id or self.subnet_id
+ if not subnet_id:
+ raise RuntimeError("Subnet ID id must be specified.")
+
+ types = {
+ "compute": subcluster_pb.Role.COMPUTENODE,
+ "data": subcluster_pb.Role.DATANODE,
+ }
+ if disk_size:
+ disk_size *= 1024**3
+ resources = common_pb.Resources(
+ resource_preset_id=resource_preset,
+ disk_size=disk_size,
+ disk_type_id=disk_type,
+ )
+
+ self.log.info("Adding subcluster to cluster %s", cluster_id)
+ autoscaling_config = None
+ if max_hosts_count:
+ autoscaling_config = subcluster_pb.AutoscalingConfig(
+ max_hosts_count=max_hosts_count,
+ measurement_duration=measurement_duration,
+ warmup_duration=warmup_duration,
+ stabilization_duration=stabilization_duration,
+ preemptible=preemptible,
+ cpu_utilization_target=cpu_utilization_target,
+ decommission_timeout=decommission_timeout,
+ )
+ request = subcluster_service_pb.CreateSubclusterRequest(
+ cluster_id=cluster_id,
+ name=name,
+ role=types[subcluster_type],
+ resources=resources,
+ subnet_id=subnet_id,
+ hosts_count=hosts_count,
+ autoscaling_config=autoscaling_config,
+ )
+ return self.sdk.create_operation_and_get_result(
+ request,
+ service=subcluster_service_grpc_pb.SubclusterServiceStub,
+ method_name="Create",
+ response_type=subcluster_pb.Subcluster,
+ meta_type=subcluster_service_pb.CreateSubclusterMetadata,
+ )
+
+ def update_cluster_description(self, description, cluster_id=None):
+ """
+ Changes Yandex.Cloud Data Proc cluster description.
+
+ :param description: Description of the cluster.
+ :type description: str
+ :param cluster_id: ID of the cluster.
+ :type cluster_id: str
+ """
+ cluster_id = cluster_id or self.cluster_id
+ if not cluster_id:
+ raise RuntimeError("Cluster id must be specified.")
+
+ self.log.info("Updating cluster %s", cluster_id)
+ mask = FieldMask(paths=["description"])
+ request = cluster_service_pb.UpdateClusterRequest(
+ cluster_id=cluster_id,
+ update_mask=mask,
+ description=description,
+ )
+ return self.sdk.create_operation_and_get_result(
+ request,
+ service=cluster_service_grpc_pb.ClusterServiceStub,
+ method_name="Update",
+ response_type=cluster_pb.Cluster,
+ meta_type=cluster_service_pb.UpdateClusterMetadata,
+ )
+
+ def delete_cluster(self, cluster_id=None):
+ """
+ Delete Yandex.Cloud Data Proc cluster.
+ :param cluster_id: ID of the cluster to remove.
+ :type cluster_id: str
+ """
+ cluster_id = cluster_id or self.cluster_id
+ if not cluster_id:
+ raise RuntimeError("Cluster id must be specified.")
+
+ self.log.info("Deleting cluster %s", cluster_id)
+ request = cluster_service_pb.DeleteClusterRequest(cluster_id=cluster_id)
+ return self.sdk.create_operation_and_get_result(
+ request,
+ service=cluster_service_grpc_pb.ClusterServiceStub,
+ method_name="Delete",
+ meta_type=cluster_service_pb.DeleteClusterMetadata,
+ )
+
+ def create_hive_job(
+ self,
+ query=None,
+ query_file_uri=None,
+ script_variables=None,
+ continue_on_failure=False,
+ properties=None,
+ cluster_id=None,
+ name="Hive job",
+ ):
+ """
+ Run Hive job in Yandex.Cloud Data Proc cluster.
+
+ :param query: Hive query.
+ :type query: str
+ :param query_file_uri: URI of the script that contains Hive queries. Can be placed in HDFS or S3.
+ :type query_file_uri: str
+ :param properties: A mapping of property names to values, used to configure Hive.
+ :type properties: Dist[str, str]
+ :param script_variables: Mapping of query variable names to values.
+ :type script_variables: Dist[str, str]
+ :param continue_on_failure: Whether to continue executing queries if a query fails.
+ :type continue_on_failure: boole
+ :param cluster_id: ID of the cluster to run job in.
+ Will try to take the ID from Dataproc Hook object if ot specified.
+ :type cluster_id: str
+ :param name: Name of the job. Used for labeling.
+ :type name: str
+ """
+ cluster_id = cluster_id or self.cluster_id
+ if not cluster_id:
+ raise RuntimeError("Cluster id must be specified.")
+ if (query and query_file_uri) or not (query or query_file_uri):
+ raise RuntimeError("Either query or query_file_uri must be specified.")
+ self.log.info("Running Hive job. Cluster ID: %s", cluster_id)
+
+ hive_job = job_pb.HiveJob(
+ query_file_uri=query_file_uri,
+ script_variables=script_variables,
+ continue_on_failure=continue_on_failure,
+ properties=properties,
+ )
+ if query:
+ hive_job = job_pb.HiveJob(
+ query_list=job_pb.QueryList(queries=query.split("\n")),
+ script_variables=script_variables,
+ continue_on_failure=continue_on_failure,
+ properties=properties,
+ )
+ request = job_service_pb.CreateJobRequest(
+ cluster_id=cluster_id,
+ name=name,
+ hive_job=hive_job,
+ )
+ return self.sdk.create_operation_and_get_result(
+ request,
+ service=job_service_grpc_pb.JobServiceStub,
+ method_name="Create",
+ response_type=job_pb.Job,
+ meta_type=job_service_pb.CreateJobMetadata,
+ )
+
+ def create_mapreduce_job(
+ self,
+ main_class=None,
+ main_jar_file_uri=None,
+ jar_file_uris=None,
+ archive_uris=None,
+ file_uris=None,
+ args=None,
+ properties=None,
+ cluster_id=None,
+ name="Mapreduce job",
+ ):
+ """
+ Run Mapreduce job in Yandex.Cloud Data Proc cluster.
+
+ :param main_jar_file_uri: URI of jar file with job.
+ Can be placed in HDFS or S3. Can be specified instead of main_class.
+ :type main_class: str
+ :param main_class: Name of the main class of the job. Can be specified instead of main_jar_file_uri.
+ :type main_class: str
+ :param file_uris: URIs of files used in the job. Can be placed in HDFS or S3.
+ :type file_uris: List[str]
+ :param archive_uris: URIs of archive files used in the job. Can be placed in HDFS or S3.
+ :type archive_uris: List[str]
+ :param jar_file_uris: URIs of JAR files used in the job. Can be placed in HDFS or S3.
+ :type archive_uris: List[str]
+ :param properties: Properties for the job.
+ :type properties: Dist[str, str]
+ :param args: Arguments to be passed to the job.
+ :type args: List[str]
+ :param cluster_id: ID of the cluster to run job in.
+ Will try to take the ID from Dataproc Hook object if ot specified.
+ :type cluster_id: str
+ :param name: Name of the job. Used for labeling.
+ :type name: str
+ """
+ cluster_id = cluster_id or self.cluster_id
+ if not cluster_id:
+ raise RuntimeError("Cluster id must be specified.")
+ self.log.info("Running Mapreduce job. Cluster ID: %s", cluster_id)
+
+ request = job_service_pb.CreateJobRequest(
+ cluster_id=cluster_id,
+ name=name,
+ mapreduce_job=job_pb.MapreduceJob(
+ main_class=main_class,
+ main_jar_file_uri=main_jar_file_uri,
+ jar_file_uris=jar_file_uris,
+ archive_uris=archive_uris,
+ file_uris=file_uris,
+ args=args,
+ properties=properties,
+ ),
+ )
+ return self.sdk.create_operation_and_get_result(
+ request,
+ service=job_service_grpc_pb.JobServiceStub,
+ method_name="Create",
+ response_type=job_pb.Job,
+ meta_type=job_service_pb.CreateJobMetadata,
+ )
+
+ def create_spark_job(
+ self,
+ main_jar_file_uri=None,
+ main_class=None,
+ file_uris=None,
+ archive_uris=None,
+ jar_file_uris=None,
+ args=None,
+ properties=None,
+ cluster_id=None,
+ name="Spark job",
+ packages=None,
+ repositories=None,
+ exclude_packages=None,
+ ):
+ """
+ Run Spark job in Yandex.Cloud Data Proc cluster.
+
+ :param main_jar_file_uri: URI of jar file with job. Can be placed in HDFS or S3.
+ :type main_class: str
+ :param main_class: Name of the main class of the job.
+ :type main_class: str
+ :param file_uris: URIs of files used in the job. Can be placed in HDFS or S3.
+ :type file_uris: List[str]
+ :param archive_uris: URIs of archive files used in the job. Can be placed in HDFS or S3.
+ :type archive_uris: List[str]
+ :param jar_file_uris: URIs of JAR files used in the job. Can be placed in HDFS or S3.
+ :type archive_uris: List[str]
+ :param properties: Properties for the job.
+ :type properties: Dist[str, str]
+ :param args: Arguments to be passed to the job.
+ :type args: List[str]
+ :param cluster_id: ID of the cluster to run job in.
+ Will try to take the ID from Dataproc Hook object if ot specified.
+ :type cluster_id: str
+ :param name: Name of the job. Used for labeling.
+ :type name: str
+ :param packages: List of maven coordinates of jars to include on the driver and executor classpaths.
+ :type packages: List[str]
+ :param repositories: List of additional remote repositories to search for the maven
+ coordinates given with --packages.
+ :type repositories: List[str]
+ :param exclude_packages: List of groupId:artifactId, to exclude while resolving the
+ dependencies provided in --packages to avoid dependency conflicts.
+ :type exclude_packages: List[str]
+ """
+ cluster_id = cluster_id or self.cluster_id
+ if not cluster_id:
+ raise RuntimeError("Cluster id must be specified.")
+ self.log.info("Running Spark job. Cluster ID: %s", cluster_id)
+
+ request = job_service_pb.CreateJobRequest(
+ cluster_id=cluster_id,
+ name=name,
+ spark_job=job_pb.SparkJob(
+ main_jar_file_uri=main_jar_file_uri,
+ main_class=main_class,
+ file_uris=file_uris,
+ archive_uris=archive_uris,
+ jar_file_uris=jar_file_uris,
+ args=args,
+ properties=properties,
+ packages=packages,
+ repositories=repositories,
+ exclude_packages=exclude_packages,
+ ),
+ )
+ return self.sdk.create_operation_and_get_result(
+ request,
+ service=job_service_grpc_pb.JobServiceStub,
+ method_name="Create",
+ response_type=job_pb.Job,
+ meta_type=job_service_pb.CreateJobMetadata,
+ )
+
+ def create_pyspark_job(
+ self,
+ main_python_file_uri=None,
+ python_file_uris=None,
+ file_uris=None,
+ archive_uris=None,
+ jar_file_uris=None,
+ args=None,
+ properties=None,
+ cluster_id=None,
+ name="Pyspark job",
+ packages=None,
+ repositories=None,
+ exclude_packages=None,
+ ):
+ """
+ Run Pyspark job in Yandex.Cloud Data Proc cluster.
+
+ :param main_python_file_uri: URI of python file with job. Can be placed in HDFS or S3.
+ :type main_python_file_uri: str
+ :param python_file_uris: URIs of python files used in the job. Can be placed in HDFS or S3.
+ :type python_file_uris: List[str]
+ :param file_uris: URIs of files used in the job. Can be placed in HDFS or S3.
+ :type file_uris: List[str]
+ :param archive_uris: URIs of archive files used in the job. Can be placed in HDFS or S3.
+ :type archive_uris: List[str]
+ :param jar_file_uris: URIs of JAR files used in the job. Can be placed in HDFS or S3.
+ :type archive_uris: List[str]
+ :param properties: Properties for the job.
+ :type properties: Dist[str, str]
+ :param args: Arguments to be passed to the job.
+ :type args: List[str]
+ :param cluster_id: ID of the cluster to run job in.
+ Will try to take the ID from Dataproc Hook object if ot specified.
+ :type cluster_id: str
+ :param name: Name of the job. Used for labeling.
+ :type name: str
+ :param packages: List of maven coordinates of jars to include on the driver and executor classpaths.
+ :type packages: List[str]
+ :param repositories: List of additional remote repositories to search for the maven
+ coordinates given with --packages.
+ :type repositories: List[str]
+ :param exclude_packages: List of groupId:artifactId, to exclude while resolving the
+ dependencies provided in --packages to avoid dependency conflicts.
+ :type exclude_packages: List[str]
+ """
+ cluster_id = cluster_id or self.cluster_id
+ if not cluster_id:
+ raise RuntimeError("Cluster id must be specified.")
+ self.log.info("Running Pyspark job. Cluster ID: %s", cluster_id)
+ request = job_service_pb.CreateJobRequest(
+ cluster_id=cluster_id,
+ name=name,
+ pyspark_job=job_pb.PysparkJob(
+ main_python_file_uri=main_python_file_uri,
+ python_file_uris=python_file_uris,
+ file_uris=file_uris,
+ archive_uris=archive_uris,
+ jar_file_uris=jar_file_uris,
+ args=args,
+ properties=properties,
+ packages=packages,
+ repositories=repositories,
+ exclude_packages=exclude_packages,
+ ),
+ )
+ return self.sdk.create_operation_and_get_result(
+ request,
+ service=job_service_grpc_pb.JobServiceStub,
+ method_name="Create",
+ response_type=job_pb.Job,
+ meta_type=job_service_pb.CreateJobMetadata,
+ )
diff --git a/contrib/python/yandexcloud/yandexcloud/auth.py b/contrib/python/yandexcloud/yandexcloud/auth.py
new file mode 100644
index 0000000000..079a427f4b
--- /dev/null
+++ b/contrib/python/yandexcloud/yandexcloud/auth.py
@@ -0,0 +1,34 @@
+from typing import Dict, Optional
+
+from yandex.cloud.iam.v1.iam_token_service_pb2_grpc import IamTokenServiceStub
+from yandexcloud._auth_fabric import (
+ YC_API_ENDPOINT,
+ IamTokenAuth,
+ MetadataAuth,
+ get_auth_token_requester,
+)
+from yandexcloud._sdk import SDK
+
+
+def get_auth_token(
+ token: Optional[str] = None,
+ service_account_key: Optional[Dict[str, str]] = None,
+ iam_token: Optional[str] = None,
+ metadata_addr: Optional[str] = None,
+ endpoint: Optional[str] = None,
+) -> str:
+ if endpoint is None:
+ endpoint = YC_API_ENDPOINT
+ requester = get_auth_token_requester(
+ token=token,
+ service_account_key=service_account_key,
+ iam_token=iam_token,
+ metadata_addr=metadata_addr,
+ endpoint=endpoint,
+ )
+ if isinstance(requester, (MetadataAuth, IamTokenAuth)):
+ return requester.get_token()
+
+ sdk = SDK()
+ client = sdk.client(IamTokenServiceStub)
+ return client.Create(requester.get_token_request()).iam_token
diff --git a/contrib/python/yandexcloud/yandexcloud/operations.py b/contrib/python/yandexcloud/yandexcloud/operations.py
new file mode 100644
index 0000000000..2e7a615b0c
--- /dev/null
+++ b/contrib/python/yandexcloud/yandexcloud/operations.py
@@ -0,0 +1,27 @@
+from typing import TYPE_CHECKING, Optional
+
+if TYPE_CHECKING:
+ import google.protobuf.message
+
+ from yandex.cloud.operation.operation_pb2 import Operation
+
+ # from yandex.cloud.api.operation_pb2 import Operation
+
+
+class OperationResult:
+ def __init__(
+ self,
+ operation: "Operation",
+ response: Optional["google.protobuf.message.Message"] = None,
+ meta: Optional["google.protobuf.message.Message"] = None,
+ ):
+ self.operation = operation
+ self.response = response
+ self.meta = meta
+
+
+class OperationError(RuntimeError):
+ def __init__(self, message: str, operation_result: OperationResult):
+ super(OperationError, self).__init__(message) # pylint: disable=super-with-arguments
+ self.message = message
+ self.operation_result = operation_result
diff --git a/contrib/python/yandexcloud/yandexcloud/py.typed b/contrib/python/yandexcloud/yandexcloud/py.typed
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/contrib/python/yandexcloud/yandexcloud/py.typed