aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/python/cryptography/py2/cryptography
diff options
context:
space:
mode:
authorshadchin <shadchin@yandex-team.com>2023-10-02 15:08:44 +0300
committershadchin <shadchin@yandex-team.com>2023-10-02 15:35:28 +0300
commit78229dda48cda2402d17aa589f9e2165b3513caf (patch)
treeab840878d8609ee3f8b0d44f0dc04ef47e94ee23 /contrib/python/cryptography/py2/cryptography
parent991934594f5abcdb6405f3021b7a3ba9c1b38349 (diff)
downloadydb-78229dda48cda2402d17aa589f9e2165b3513caf.tar.gz
Split cryptography on py2/py3
Diffstat (limited to 'contrib/python/cryptography/py2/cryptography')
-rw-r--r--contrib/python/cryptography/py2/cryptography/__about__.py31
-rw-r--r--contrib/python/cryptography/py2/cryptography/__init__.py32
-rw-r--r--contrib/python/cryptography/py2/cryptography/exceptions.py58
-rw-r--r--contrib/python/cryptography/py2/cryptography/fernet.py190
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/__init__.py11
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/_der.py156
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/_oid.py77
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/backends/__init__.py26
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/backends/interfaces.py396
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/__init__.py10
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/aead.py166
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/backend.py2776
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/ciphers.py231
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/cmac.py82
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/decode_asn1.py878
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/dh.py271
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/dsa.py263
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/ec.py337
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/ed25519.py145
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/ed448.py146
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/encode_asn1.py670
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/hashes.py82
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/hmac.py76
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/ocsp.py401
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/poly1305.py65
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/rsa.py516
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/utils.py66
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/x25519.py123
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/x448.py107
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/x509.py587
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/bindings/__init__.py5
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/bindings/openssl/__init__.py5
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/bindings/openssl/_conditional.py322
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/bindings/openssl/binding.py172
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/primitives/__init__.py5
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/primitives/asymmetric/__init__.py40
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/primitives/asymmetric/dh.py224
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/primitives/asymmetric/dsa.py261
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/primitives/asymmetric/ec.py502
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/primitives/asymmetric/ed25519.py87
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/primitives/asymmetric/ed448.py82
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/primitives/asymmetric/padding.py80
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/primitives/asymmetric/rsa.py380
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/primitives/asymmetric/utils.py41
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/primitives/asymmetric/x25519.py76
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/primitives/asymmetric/x448.py76
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/primitives/ciphers/__init__.py26
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/primitives/ciphers/aead.py174
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/primitives/ciphers/algorithms.py170
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/primitives/ciphers/base.py241
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/primitives/ciphers/modes.py225
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/primitives/cmac.py64
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/primitives/constant_time.py14
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/primitives/hashes.py259
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/primitives/hmac.py70
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/primitives/kdf/__init__.py26
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/primitives/kdf/concatkdf.py131
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/primitives/kdf/hkdf.py115
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/primitives/kdf/kbkdf.py162
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/primitives/kdf/pbkdf2.py62
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/primitives/kdf/scrypt.py68
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/primitives/kdf/x963kdf.py74
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/primitives/keywrap.py161
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/primitives/padding.py214
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/primitives/poly1305.py58
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/primitives/serialization/__init__.py44
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/primitives/serialization/base.py91
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/primitives/serialization/pkcs12.py50
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/primitives/serialization/pkcs7.py132
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/primitives/serialization/ssh.py683
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/primitives/twofactor/__init__.py9
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/primitives/twofactor/hotp.py69
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/primitives/twofactor/totp.py51
-rw-r--r--contrib/python/cryptography/py2/cryptography/hazmat/primitives/twofactor/utils.py33
-rw-r--r--contrib/python/cryptography/py2/cryptography/utils.py171
-rw-r--r--contrib/python/cryptography/py2/cryptography/x509/__init__.py248
-rw-r--r--contrib/python/cryptography/py2/cryptography/x509/base.py892
-rw-r--r--contrib/python/cryptography/py2/cryptography/x509/certificate_transparency.py46
-rw-r--r--contrib/python/cryptography/py2/cryptography/x509/extensions.py1702
-rw-r--r--contrib/python/cryptography/py2/cryptography/x509/general_name.py294
-rw-r--r--contrib/python/cryptography/py2/cryptography/x509/name.py261
-rw-r--r--contrib/python/cryptography/py2/cryptography/x509/ocsp.py467
-rw-r--r--contrib/python/cryptography/py2/cryptography/x509/oid.py265
83 files changed, 19155 insertions, 0 deletions
diff --git a/contrib/python/cryptography/py2/cryptography/__about__.py b/contrib/python/cryptography/py2/cryptography/__about__.py
new file mode 100644
index 0000000000..f816509257
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/__about__.py
@@ -0,0 +1,31 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+__all__ = [
+ "__title__",
+ "__summary__",
+ "__uri__",
+ "__version__",
+ "__author__",
+ "__email__",
+ "__license__",
+ "__copyright__",
+]
+
+__title__ = "cryptography"
+__summary__ = (
+ "cryptography is a package which provides cryptographic recipes"
+ " and primitives to Python developers."
+)
+__uri__ = "https://github.com/pyca/cryptography"
+
+__version__ = "3.3.2"
+
+__author__ = "The cryptography developers"
+__email__ = "cryptography-dev@python.org"
+
+__license__ = "BSD or Apache License, Version 2.0"
+__copyright__ = "Copyright 2013-2021 {}".format(__author__)
diff --git a/contrib/python/cryptography/py2/cryptography/__init__.py b/contrib/python/cryptography/py2/cryptography/__init__.py
new file mode 100644
index 0000000000..0fcaef8370
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/__init__.py
@@ -0,0 +1,32 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import sys
+import warnings
+
+from cryptography.__about__ import (
+ __author__,
+ __copyright__,
+ __email__,
+ __license__,
+ __summary__,
+ __title__,
+ __uri__,
+ __version__,
+)
+from cryptography.utils import CryptographyDeprecationWarning
+
+
+__all__ = [
+ "__title__",
+ "__summary__",
+ "__uri__",
+ "__version__",
+ "__author__",
+ "__email__",
+ "__license__",
+ "__copyright__",
+]
diff --git a/contrib/python/cryptography/py2/cryptography/exceptions.py b/contrib/python/cryptography/py2/cryptography/exceptions.py
new file mode 100644
index 0000000000..1d52d7dcfc
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/exceptions.py
@@ -0,0 +1,58 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+from enum import Enum
+
+
+class _Reasons(Enum):
+ BACKEND_MISSING_INTERFACE = 0
+ UNSUPPORTED_HASH = 1
+ UNSUPPORTED_CIPHER = 2
+ UNSUPPORTED_PADDING = 3
+ UNSUPPORTED_MGF = 4
+ UNSUPPORTED_PUBLIC_KEY_ALGORITHM = 5
+ UNSUPPORTED_ELLIPTIC_CURVE = 6
+ UNSUPPORTED_SERIALIZATION = 7
+ UNSUPPORTED_X509 = 8
+ UNSUPPORTED_EXCHANGE_ALGORITHM = 9
+ UNSUPPORTED_DIFFIE_HELLMAN = 10
+ UNSUPPORTED_MAC = 11
+
+
+class UnsupportedAlgorithm(Exception):
+ def __init__(self, message, reason=None):
+ super(UnsupportedAlgorithm, self).__init__(message)
+ self._reason = reason
+
+
+class AlreadyFinalized(Exception):
+ pass
+
+
+class AlreadyUpdated(Exception):
+ pass
+
+
+class NotYetFinalized(Exception):
+ pass
+
+
+class InvalidTag(Exception):
+ pass
+
+
+class InvalidSignature(Exception):
+ pass
+
+
+class InternalError(Exception):
+ def __init__(self, msg, err_code):
+ super(InternalError, self).__init__(msg)
+ self.err_code = err_code
+
+
+class InvalidKey(Exception):
+ pass
diff --git a/contrib/python/cryptography/py2/cryptography/fernet.py b/contrib/python/cryptography/py2/cryptography/fernet.py
new file mode 100644
index 0000000000..00c2528671
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/fernet.py
@@ -0,0 +1,190 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import base64
+import binascii
+import os
+import struct
+import time
+
+import six
+
+from cryptography import utils
+from cryptography.exceptions import InvalidSignature
+from cryptography.hazmat.backends import _get_backend
+from cryptography.hazmat.primitives import hashes, padding
+from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
+from cryptography.hazmat.primitives.hmac import HMAC
+
+
+class InvalidToken(Exception):
+ pass
+
+
+_MAX_CLOCK_SKEW = 60
+
+
+class Fernet(object):
+ def __init__(self, key, backend=None):
+ backend = _get_backend(backend)
+
+ key = base64.urlsafe_b64decode(key)
+ if len(key) != 32:
+ raise ValueError(
+ "Fernet key must be 32 url-safe base64-encoded bytes."
+ )
+
+ self._signing_key = key[:16]
+ self._encryption_key = key[16:]
+ self._backend = backend
+
+ @classmethod
+ def generate_key(cls):
+ return base64.urlsafe_b64encode(os.urandom(32))
+
+ def encrypt(self, data):
+ return self.encrypt_at_time(data, int(time.time()))
+
+ def encrypt_at_time(self, data, current_time):
+ iv = os.urandom(16)
+ return self._encrypt_from_parts(data, current_time, iv)
+
+ def _encrypt_from_parts(self, data, current_time, iv):
+ utils._check_bytes("data", data)
+
+ padder = padding.PKCS7(algorithms.AES.block_size).padder()
+ padded_data = padder.update(data) + padder.finalize()
+ encryptor = Cipher(
+ algorithms.AES(self._encryption_key), modes.CBC(iv), self._backend
+ ).encryptor()
+ ciphertext = encryptor.update(padded_data) + encryptor.finalize()
+
+ basic_parts = (
+ b"\x80" + struct.pack(">Q", current_time) + iv + ciphertext
+ )
+
+ h = HMAC(self._signing_key, hashes.SHA256(), backend=self._backend)
+ h.update(basic_parts)
+ hmac = h.finalize()
+ return base64.urlsafe_b64encode(basic_parts + hmac)
+
+ def decrypt(self, token, ttl=None):
+ timestamp, data = Fernet._get_unverified_token_data(token)
+ return self._decrypt_data(data, timestamp, ttl, int(time.time()))
+
+ def decrypt_at_time(self, token, ttl, current_time):
+ if ttl is None:
+ raise ValueError(
+ "decrypt_at_time() can only be used with a non-None ttl"
+ )
+ timestamp, data = Fernet._get_unverified_token_data(token)
+ return self._decrypt_data(data, timestamp, ttl, current_time)
+
+ def extract_timestamp(self, token):
+ timestamp, data = Fernet._get_unverified_token_data(token)
+ # Verify the token was not tampered with.
+ self._verify_signature(data)
+ return timestamp
+
+ @staticmethod
+ def _get_unverified_token_data(token):
+ utils._check_bytes("token", token)
+ try:
+ data = base64.urlsafe_b64decode(token)
+ except (TypeError, binascii.Error):
+ raise InvalidToken
+
+ if not data or six.indexbytes(data, 0) != 0x80:
+ raise InvalidToken
+
+ try:
+ (timestamp,) = struct.unpack(">Q", data[1:9])
+ except struct.error:
+ raise InvalidToken
+ return timestamp, data
+
+ def _verify_signature(self, data):
+ h = HMAC(self._signing_key, hashes.SHA256(), backend=self._backend)
+ h.update(data[:-32])
+ try:
+ h.verify(data[-32:])
+ except InvalidSignature:
+ raise InvalidToken
+
+ def _decrypt_data(self, data, timestamp, ttl, current_time):
+ if ttl is not None:
+ if timestamp + ttl < current_time:
+ raise InvalidToken
+
+ if current_time + _MAX_CLOCK_SKEW < timestamp:
+ raise InvalidToken
+
+ self._verify_signature(data)
+
+ iv = data[9:25]
+ ciphertext = data[25:-32]
+ decryptor = Cipher(
+ algorithms.AES(self._encryption_key), modes.CBC(iv), self._backend
+ ).decryptor()
+ plaintext_padded = decryptor.update(ciphertext)
+ try:
+ plaintext_padded += decryptor.finalize()
+ except ValueError:
+ raise InvalidToken
+ unpadder = padding.PKCS7(algorithms.AES.block_size).unpadder()
+
+ unpadded = unpadder.update(plaintext_padded)
+ try:
+ unpadded += unpadder.finalize()
+ except ValueError:
+ raise InvalidToken
+ return unpadded
+
+
+class MultiFernet(object):
+ def __init__(self, fernets):
+ fernets = list(fernets)
+ if not fernets:
+ raise ValueError(
+ "MultiFernet requires at least one Fernet instance"
+ )
+ self._fernets = fernets
+
+ def encrypt(self, msg):
+ return self.encrypt_at_time(msg, int(time.time()))
+
+ def encrypt_at_time(self, msg, current_time):
+ return self._fernets[0].encrypt_at_time(msg, current_time)
+
+ def rotate(self, msg):
+ timestamp, data = Fernet._get_unverified_token_data(msg)
+ for f in self._fernets:
+ try:
+ p = f._decrypt_data(data, timestamp, None, None)
+ break
+ except InvalidToken:
+ pass
+ else:
+ raise InvalidToken
+
+ iv = os.urandom(16)
+ return self._fernets[0]._encrypt_from_parts(p, timestamp, iv)
+
+ def decrypt(self, msg, ttl=None):
+ for f in self._fernets:
+ try:
+ return f.decrypt(msg, ttl)
+ except InvalidToken:
+ pass
+ raise InvalidToken
+
+ def decrypt_at_time(self, msg, ttl, current_time):
+ for f in self._fernets:
+ try:
+ return f.decrypt_at_time(msg, ttl, current_time)
+ except InvalidToken:
+ pass
+ raise InvalidToken
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/__init__.py b/contrib/python/cryptography/py2/cryptography/hazmat/__init__.py
new file mode 100644
index 0000000000..9f06a9949a
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/__init__.py
@@ -0,0 +1,11 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+"""
+Hazardous Materials
+
+This is a "Hazardous Materials" module. You should ONLY use it if you're
+100% absolutely sure that you know what you're doing because this module
+is full of land mines, dragons, and dinosaurs with laser guns.
+"""
+from __future__ import absolute_import, division, print_function
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/_der.py b/contrib/python/cryptography/py2/cryptography/hazmat/_der.py
new file mode 100644
index 0000000000..462b911b45
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/_der.py
@@ -0,0 +1,156 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import six
+
+from cryptography.utils import int_from_bytes, int_to_bytes
+
+
+# This module contains a lightweight DER encoder and decoder. See X.690 for the
+# specification. This module intentionally does not implement the more complex
+# BER encoding, only DER.
+#
+# Note this implementation treats an element's constructed bit as part of the
+# tag. This is fine for DER, where the bit is always computable from the type.
+
+
+CONSTRUCTED = 0x20
+CONTEXT_SPECIFIC = 0x80
+
+INTEGER = 0x02
+BIT_STRING = 0x03
+OCTET_STRING = 0x04
+NULL = 0x05
+OBJECT_IDENTIFIER = 0x06
+SEQUENCE = 0x10 | CONSTRUCTED
+SET = 0x11 | CONSTRUCTED
+PRINTABLE_STRING = 0x13
+UTC_TIME = 0x17
+GENERALIZED_TIME = 0x18
+
+
+class DERReader(object):
+ def __init__(self, data):
+ self.data = memoryview(data)
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, tb):
+ if exc_value is None:
+ self.check_empty()
+
+ def is_empty(self):
+ return len(self.data) == 0
+
+ def check_empty(self):
+ if not self.is_empty():
+ raise ValueError("Invalid DER input: trailing data")
+
+ def read_byte(self):
+ if len(self.data) < 1:
+ raise ValueError("Invalid DER input: insufficient data")
+ ret = six.indexbytes(self.data, 0)
+ self.data = self.data[1:]
+ return ret
+
+ def read_bytes(self, n):
+ if len(self.data) < n:
+ raise ValueError("Invalid DER input: insufficient data")
+ ret = self.data[:n]
+ self.data = self.data[n:]
+ return ret
+
+ def read_any_element(self):
+ tag = self.read_byte()
+ # Tag numbers 31 or higher are stored in multiple bytes. No supported
+ # ASN.1 types use such tags, so reject these.
+ if tag & 0x1F == 0x1F:
+ raise ValueError("Invalid DER input: unexpected high tag number")
+ length_byte = self.read_byte()
+ if length_byte & 0x80 == 0:
+ # If the high bit is clear, the first length byte is the length.
+ length = length_byte
+ else:
+ # If the high bit is set, the first length byte encodes the length
+ # of the length.
+ length_byte &= 0x7F
+ if length_byte == 0:
+ raise ValueError(
+ "Invalid DER input: indefinite length form is not allowed "
+ "in DER"
+ )
+ length = 0
+ for i in range(length_byte):
+ length <<= 8
+ length |= self.read_byte()
+ if length == 0:
+ raise ValueError(
+ "Invalid DER input: length was not minimally-encoded"
+ )
+ if length < 0x80:
+ # If the length could have been encoded in short form, it must
+ # not use long form.
+ raise ValueError(
+ "Invalid DER input: length was not minimally-encoded"
+ )
+ body = self.read_bytes(length)
+ return tag, DERReader(body)
+
+ def read_element(self, expected_tag):
+ tag, body = self.read_any_element()
+ if tag != expected_tag:
+ raise ValueError("Invalid DER input: unexpected tag")
+ return body
+
+ def read_single_element(self, expected_tag):
+ with self:
+ return self.read_element(expected_tag)
+
+ def read_optional_element(self, expected_tag):
+ if len(self.data) > 0 and six.indexbytes(self.data, 0) == expected_tag:
+ return self.read_element(expected_tag)
+ return None
+
+ def as_integer(self):
+ if len(self.data) == 0:
+ raise ValueError("Invalid DER input: empty integer contents")
+ first = six.indexbytes(self.data, 0)
+ if first & 0x80 == 0x80:
+ raise ValueError("Negative DER integers are not supported")
+ # The first 9 bits must not all be zero or all be ones. Otherwise, the
+ # encoding should have been one byte shorter.
+ if len(self.data) > 1:
+ second = six.indexbytes(self.data, 1)
+ if first == 0 and second & 0x80 == 0:
+ raise ValueError(
+ "Invalid DER input: integer not minimally-encoded"
+ )
+ return int_from_bytes(self.data, "big")
+
+
+def encode_der_integer(x):
+ if not isinstance(x, six.integer_types):
+ raise ValueError("Value must be an integer")
+ if x < 0:
+ raise ValueError("Negative integers are not supported")
+ n = x.bit_length() // 8 + 1
+ return int_to_bytes(x, n)
+
+
+def encode_der(tag, *children):
+ length = 0
+ for child in children:
+ length += len(child)
+ chunks = [six.int2byte(tag)]
+ if length < 0x80:
+ chunks.append(six.int2byte(length))
+ else:
+ length_bytes = int_to_bytes(length)
+ chunks.append(six.int2byte(0x80 | len(length_bytes)))
+ chunks.append(length_bytes)
+ chunks.extend(children)
+ return b"".join(chunks)
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/_oid.py b/contrib/python/cryptography/py2/cryptography/hazmat/_oid.py
new file mode 100644
index 0000000000..de2771a737
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/_oid.py
@@ -0,0 +1,77 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+from cryptography import utils
+
+
+class ObjectIdentifier(object):
+ def __init__(self, dotted_string):
+ self._dotted_string = dotted_string
+
+ nodes = self._dotted_string.split(".")
+ intnodes = []
+
+ # There must be at least 2 nodes, the first node must be 0..2, and
+ # if less than 2, the second node cannot have a value outside the
+ # range 0..39. All nodes must be integers.
+ for node in nodes:
+ try:
+ node_value = int(node, 10)
+ except ValueError:
+ raise ValueError(
+ "Malformed OID: %s (non-integer nodes)"
+ % (self._dotted_string)
+ )
+ if node_value < 0:
+ raise ValueError(
+ "Malformed OID: %s (negative-integer nodes)"
+ % (self._dotted_string)
+ )
+ intnodes.append(node_value)
+
+ if len(nodes) < 2:
+ raise ValueError(
+ "Malformed OID: %s (insufficient number of nodes)"
+ % (self._dotted_string)
+ )
+
+ if intnodes[0] > 2:
+ raise ValueError(
+ "Malformed OID: %s (first node outside valid range)"
+ % (self._dotted_string)
+ )
+
+ if intnodes[0] < 2 and intnodes[1] >= 40:
+ raise ValueError(
+ "Malformed OID: %s (second node outside valid range)"
+ % (self._dotted_string)
+ )
+
+ def __eq__(self, other):
+ if not isinstance(other, ObjectIdentifier):
+ return NotImplemented
+
+ return self.dotted_string == other.dotted_string
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __repr__(self):
+ return "<ObjectIdentifier(oid={}, name={})>".format(
+ self.dotted_string, self._name
+ )
+
+ def __hash__(self):
+ return hash(self.dotted_string)
+
+ @property
+ def _name(self):
+ # Lazy import to avoid an import cycle
+ from cryptography.x509.oid import _OID_NAMES
+
+ return _OID_NAMES.get(self, "Unknown OID")
+
+ dotted_string = utils.read_only_property("_dotted_string")
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/backends/__init__.py b/contrib/python/cryptography/py2/cryptography/hazmat/backends/__init__.py
new file mode 100644
index 0000000000..1563936dde
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/backends/__init__.py
@@ -0,0 +1,26 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+
+_default_backend = None
+
+
+def default_backend():
+ global _default_backend
+
+ if _default_backend is None:
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ _default_backend = backend
+
+ return _default_backend
+
+
+def _get_backend(backend):
+ if backend is None:
+ return default_backend()
+ else:
+ return backend
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/backends/interfaces.py b/contrib/python/cryptography/py2/cryptography/hazmat/backends/interfaces.py
new file mode 100644
index 0000000000..418980a34e
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/backends/interfaces.py
@@ -0,0 +1,396 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import abc
+
+import six
+
+
+@six.add_metaclass(abc.ABCMeta)
+class CipherBackend(object):
+ @abc.abstractmethod
+ def cipher_supported(self, cipher, mode):
+ """
+ Return True if the given cipher and mode are supported.
+ """
+
+ @abc.abstractmethod
+ def create_symmetric_encryption_ctx(self, cipher, mode):
+ """
+ Get a CipherContext that can be used for encryption.
+ """
+
+ @abc.abstractmethod
+ def create_symmetric_decryption_ctx(self, cipher, mode):
+ """
+ Get a CipherContext that can be used for decryption.
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class HashBackend(object):
+ @abc.abstractmethod
+ def hash_supported(self, algorithm):
+ """
+ Return True if the hash algorithm is supported by this backend.
+ """
+
+ @abc.abstractmethod
+ def create_hash_ctx(self, algorithm):
+ """
+ Create a HashContext for calculating a message digest.
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class HMACBackend(object):
+ @abc.abstractmethod
+ def hmac_supported(self, algorithm):
+ """
+ Return True if the hash algorithm is supported for HMAC by this
+ backend.
+ """
+
+ @abc.abstractmethod
+ def create_hmac_ctx(self, key, algorithm):
+ """
+ Create a context for calculating a message authentication code.
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class CMACBackend(object):
+ @abc.abstractmethod
+ def cmac_algorithm_supported(self, algorithm):
+ """
+ Returns True if the block cipher is supported for CMAC by this backend
+ """
+
+ @abc.abstractmethod
+ def create_cmac_ctx(self, algorithm):
+ """
+ Create a context for calculating a message authentication code.
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class PBKDF2HMACBackend(object):
+ @abc.abstractmethod
+ def pbkdf2_hmac_supported(self, algorithm):
+ """
+ Return True if the hash algorithm is supported for PBKDF2 by this
+ backend.
+ """
+
+ @abc.abstractmethod
+ def derive_pbkdf2_hmac(
+ self, algorithm, length, salt, iterations, key_material
+ ):
+ """
+ Return length bytes derived from provided PBKDF2 parameters.
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class RSABackend(object):
+ @abc.abstractmethod
+ def generate_rsa_private_key(self, public_exponent, key_size):
+ """
+ Generate an RSAPrivateKey instance with public_exponent and a modulus
+ of key_size bits.
+ """
+
+ @abc.abstractmethod
+ def rsa_padding_supported(self, padding):
+ """
+ Returns True if the backend supports the given padding options.
+ """
+
+ @abc.abstractmethod
+ def generate_rsa_parameters_supported(self, public_exponent, key_size):
+ """
+ Returns True if the backend supports the given parameters for key
+ generation.
+ """
+
+ @abc.abstractmethod
+ def load_rsa_private_numbers(self, numbers):
+ """
+ Returns an RSAPrivateKey provider.
+ """
+
+ @abc.abstractmethod
+ def load_rsa_public_numbers(self, numbers):
+ """
+ Returns an RSAPublicKey provider.
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class DSABackend(object):
+ @abc.abstractmethod
+ def generate_dsa_parameters(self, key_size):
+ """
+ Generate a DSAParameters instance with a modulus of key_size bits.
+ """
+
+ @abc.abstractmethod
+ def generate_dsa_private_key(self, parameters):
+ """
+ Generate a DSAPrivateKey instance with parameters as a DSAParameters
+ object.
+ """
+
+ @abc.abstractmethod
+ def generate_dsa_private_key_and_parameters(self, key_size):
+ """
+ Generate a DSAPrivateKey instance using key size only.
+ """
+
+ @abc.abstractmethod
+ def dsa_hash_supported(self, algorithm):
+ """
+ Return True if the hash algorithm is supported by the backend for DSA.
+ """
+
+ @abc.abstractmethod
+ def dsa_parameters_supported(self, p, q, g):
+ """
+ Return True if the parameters are supported by the backend for DSA.
+ """
+
+ @abc.abstractmethod
+ def load_dsa_private_numbers(self, numbers):
+ """
+ Returns a DSAPrivateKey provider.
+ """
+
+ @abc.abstractmethod
+ def load_dsa_public_numbers(self, numbers):
+ """
+ Returns a DSAPublicKey provider.
+ """
+
+ @abc.abstractmethod
+ def load_dsa_parameter_numbers(self, numbers):
+ """
+ Returns a DSAParameters provider.
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class EllipticCurveBackend(object):
+ @abc.abstractmethod
+ def elliptic_curve_signature_algorithm_supported(
+ self, signature_algorithm, curve
+ ):
+ """
+ Returns True if the backend supports the named elliptic curve with the
+ specified signature algorithm.
+ """
+
+ @abc.abstractmethod
+ def elliptic_curve_supported(self, curve):
+ """
+ Returns True if the backend supports the named elliptic curve.
+ """
+
+ @abc.abstractmethod
+ def generate_elliptic_curve_private_key(self, curve):
+ """
+ Return an object conforming to the EllipticCurvePrivateKey interface.
+ """
+
+ @abc.abstractmethod
+ def load_elliptic_curve_public_numbers(self, numbers):
+ """
+ Return an EllipticCurvePublicKey provider using the given numbers.
+ """
+
+ @abc.abstractmethod
+ def load_elliptic_curve_private_numbers(self, numbers):
+ """
+ Return an EllipticCurvePrivateKey provider using the given numbers.
+ """
+
+ @abc.abstractmethod
+ def elliptic_curve_exchange_algorithm_supported(self, algorithm, curve):
+ """
+ Returns whether the exchange algorithm is supported by this backend.
+ """
+
+ @abc.abstractmethod
+ def derive_elliptic_curve_private_key(self, private_value, curve):
+ """
+ Compute the private key given the private value and curve.
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class PEMSerializationBackend(object):
+ @abc.abstractmethod
+ def load_pem_private_key(self, data, password):
+ """
+ Loads a private key from PEM encoded data, using the provided password
+ if the data is encrypted.
+ """
+
+ @abc.abstractmethod
+ def load_pem_public_key(self, data):
+ """
+ Loads a public key from PEM encoded data.
+ """
+
+ @abc.abstractmethod
+ def load_pem_parameters(self, data):
+ """
+ Load encryption parameters from PEM encoded data.
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class DERSerializationBackend(object):
+ @abc.abstractmethod
+ def load_der_private_key(self, data, password):
+ """
+ Loads a private key from DER encoded data. Uses the provided password
+ if the data is encrypted.
+ """
+
+ @abc.abstractmethod
+ def load_der_public_key(self, data):
+ """
+ Loads a public key from DER encoded data.
+ """
+
+ @abc.abstractmethod
+ def load_der_parameters(self, data):
+ """
+ Load encryption parameters from DER encoded data.
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class X509Backend(object):
+ @abc.abstractmethod
+ def load_pem_x509_certificate(self, data):
+ """
+ Load an X.509 certificate from PEM encoded data.
+ """
+
+ @abc.abstractmethod
+ def load_der_x509_certificate(self, data):
+ """
+ Load an X.509 certificate from DER encoded data.
+ """
+
+ @abc.abstractmethod
+ def load_der_x509_csr(self, data):
+ """
+ Load an X.509 CSR from DER encoded data.
+ """
+
+ @abc.abstractmethod
+ def load_pem_x509_csr(self, data):
+ """
+ Load an X.509 CSR from PEM encoded data.
+ """
+
+ @abc.abstractmethod
+ def create_x509_csr(self, builder, private_key, algorithm):
+ """
+ Create and sign an X.509 CSR from a CSR builder object.
+ """
+
+ @abc.abstractmethod
+ def create_x509_certificate(self, builder, private_key, algorithm):
+ """
+ Create and sign an X.509 certificate from a CertificateBuilder object.
+ """
+
+ @abc.abstractmethod
+ def create_x509_crl(self, builder, private_key, algorithm):
+ """
+ Create and sign an X.509 CertificateRevocationList from a
+ CertificateRevocationListBuilder object.
+ """
+
+ @abc.abstractmethod
+ def create_x509_revoked_certificate(self, builder):
+ """
+ Create a RevokedCertificate object from a RevokedCertificateBuilder
+ object.
+ """
+
+ @abc.abstractmethod
+ def x509_name_bytes(self, name):
+ """
+ Compute the DER encoded bytes of an X509 Name object.
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class DHBackend(object):
+ @abc.abstractmethod
+ def generate_dh_parameters(self, generator, key_size):
+ """
+ Generate a DHParameters instance with a modulus of key_size bits.
+ Using the given generator. Often 2 or 5.
+ """
+
+ @abc.abstractmethod
+ def generate_dh_private_key(self, parameters):
+ """
+ Generate a DHPrivateKey instance with parameters as a DHParameters
+ object.
+ """
+
+ @abc.abstractmethod
+ def generate_dh_private_key_and_parameters(self, generator, key_size):
+ """
+ Generate a DHPrivateKey instance using key size only.
+ Using the given generator. Often 2 or 5.
+ """
+
+ @abc.abstractmethod
+ def load_dh_private_numbers(self, numbers):
+ """
+ Load a DHPrivateKey from DHPrivateNumbers
+ """
+
+ @abc.abstractmethod
+ def load_dh_public_numbers(self, numbers):
+ """
+ Load a DHPublicKey from DHPublicNumbers.
+ """
+
+ @abc.abstractmethod
+ def load_dh_parameter_numbers(self, numbers):
+ """
+ Load DHParameters from DHParameterNumbers.
+ """
+
+ @abc.abstractmethod
+ def dh_parameters_supported(self, p, g, q=None):
+ """
+ Returns whether the backend supports DH with these parameter values.
+ """
+
+ @abc.abstractmethod
+ def dh_x942_serialization_supported(self):
+ """
+ Returns True if the backend supports the serialization of DH objects
+ with subgroup order (q).
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class ScryptBackend(object):
+ @abc.abstractmethod
+ def derive_scrypt(self, key_material, salt, length, n, r, p):
+ """
+ Return bytes derived from provided Scrypt parameters.
+ """
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/__init__.py b/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/__init__.py
new file mode 100644
index 0000000000..8eadeb6e18
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/__init__.py
@@ -0,0 +1,10 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+from cryptography.hazmat.backends.openssl.backend import backend
+
+
+__all__ = ["backend"]
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/aead.py b/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/aead.py
new file mode 100644
index 0000000000..4494916852
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/aead.py
@@ -0,0 +1,166 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+from cryptography.exceptions import InvalidTag
+
+
+_ENCRYPT = 1
+_DECRYPT = 0
+
+
+def _aead_cipher_name(cipher):
+ from cryptography.hazmat.primitives.ciphers.aead import (
+ AESCCM,
+ AESGCM,
+ ChaCha20Poly1305,
+ )
+
+ if isinstance(cipher, ChaCha20Poly1305):
+ return b"chacha20-poly1305"
+ elif isinstance(cipher, AESCCM):
+ return "aes-{}-ccm".format(len(cipher._key) * 8).encode("ascii")
+ else:
+ assert isinstance(cipher, AESGCM)
+ return "aes-{}-gcm".format(len(cipher._key) * 8).encode("ascii")
+
+
+def _aead_setup(backend, cipher_name, key, nonce, tag, tag_len, operation):
+ evp_cipher = backend._lib.EVP_get_cipherbyname(cipher_name)
+ backend.openssl_assert(evp_cipher != backend._ffi.NULL)
+ ctx = backend._lib.EVP_CIPHER_CTX_new()
+ ctx = backend._ffi.gc(ctx, backend._lib.EVP_CIPHER_CTX_free)
+ res = backend._lib.EVP_CipherInit_ex(
+ ctx,
+ evp_cipher,
+ backend._ffi.NULL,
+ backend._ffi.NULL,
+ backend._ffi.NULL,
+ int(operation == _ENCRYPT),
+ )
+ backend.openssl_assert(res != 0)
+ res = backend._lib.EVP_CIPHER_CTX_set_key_length(ctx, len(key))
+ backend.openssl_assert(res != 0)
+ res = backend._lib.EVP_CIPHER_CTX_ctrl(
+ ctx,
+ backend._lib.EVP_CTRL_AEAD_SET_IVLEN,
+ len(nonce),
+ backend._ffi.NULL,
+ )
+ backend.openssl_assert(res != 0)
+ if operation == _DECRYPT:
+ res = backend._lib.EVP_CIPHER_CTX_ctrl(
+ ctx, backend._lib.EVP_CTRL_AEAD_SET_TAG, len(tag), tag
+ )
+ backend.openssl_assert(res != 0)
+ elif cipher_name.endswith(b"-ccm"):
+ res = backend._lib.EVP_CIPHER_CTX_ctrl(
+ ctx, backend._lib.EVP_CTRL_AEAD_SET_TAG, tag_len, backend._ffi.NULL
+ )
+ backend.openssl_assert(res != 0)
+
+ nonce_ptr = backend._ffi.from_buffer(nonce)
+ key_ptr = backend._ffi.from_buffer(key)
+ res = backend._lib.EVP_CipherInit_ex(
+ ctx,
+ backend._ffi.NULL,
+ backend._ffi.NULL,
+ key_ptr,
+ nonce_ptr,
+ int(operation == _ENCRYPT),
+ )
+ backend.openssl_assert(res != 0)
+ return ctx
+
+
+def _set_length(backend, ctx, data_len):
+ intptr = backend._ffi.new("int *")
+ res = backend._lib.EVP_CipherUpdate(
+ ctx, backend._ffi.NULL, intptr, backend._ffi.NULL, data_len
+ )
+ backend.openssl_assert(res != 0)
+
+
+def _process_aad(backend, ctx, associated_data):
+ outlen = backend._ffi.new("int *")
+ res = backend._lib.EVP_CipherUpdate(
+ ctx, backend._ffi.NULL, outlen, associated_data, len(associated_data)
+ )
+ backend.openssl_assert(res != 0)
+
+
+def _process_data(backend, ctx, data):
+ outlen = backend._ffi.new("int *")
+ buf = backend._ffi.new("unsigned char[]", len(data))
+ res = backend._lib.EVP_CipherUpdate(ctx, buf, outlen, data, len(data))
+ backend.openssl_assert(res != 0)
+ return backend._ffi.buffer(buf, outlen[0])[:]
+
+
+def _encrypt(backend, cipher, nonce, data, associated_data, tag_length):
+ from cryptography.hazmat.primitives.ciphers.aead import AESCCM
+
+ cipher_name = _aead_cipher_name(cipher)
+ ctx = _aead_setup(
+ backend, cipher_name, cipher._key, nonce, None, tag_length, _ENCRYPT
+ )
+ # CCM requires us to pass the length of the data before processing anything
+ # However calling this with any other AEAD results in an error
+ if isinstance(cipher, AESCCM):
+ _set_length(backend, ctx, len(data))
+
+ _process_aad(backend, ctx, associated_data)
+ processed_data = _process_data(backend, ctx, data)
+ outlen = backend._ffi.new("int *")
+ res = backend._lib.EVP_CipherFinal_ex(ctx, backend._ffi.NULL, outlen)
+ backend.openssl_assert(res != 0)
+ backend.openssl_assert(outlen[0] == 0)
+ tag_buf = backend._ffi.new("unsigned char[]", tag_length)
+ res = backend._lib.EVP_CIPHER_CTX_ctrl(
+ ctx, backend._lib.EVP_CTRL_AEAD_GET_TAG, tag_length, tag_buf
+ )
+ backend.openssl_assert(res != 0)
+ tag = backend._ffi.buffer(tag_buf)[:]
+
+ return processed_data + tag
+
+
+def _decrypt(backend, cipher, nonce, data, associated_data, tag_length):
+ from cryptography.hazmat.primitives.ciphers.aead import AESCCM
+
+ if len(data) < tag_length:
+ raise InvalidTag
+ tag = data[-tag_length:]
+ data = data[:-tag_length]
+ cipher_name = _aead_cipher_name(cipher)
+ ctx = _aead_setup(
+ backend, cipher_name, cipher._key, nonce, tag, tag_length, _DECRYPT
+ )
+ # CCM requires us to pass the length of the data before processing anything
+ # However calling this with any other AEAD results in an error
+ if isinstance(cipher, AESCCM):
+ _set_length(backend, ctx, len(data))
+
+ _process_aad(backend, ctx, associated_data)
+ # CCM has a different error path if the tag doesn't match. Errors are
+ # raised in Update and Final is irrelevant.
+ if isinstance(cipher, AESCCM):
+ outlen = backend._ffi.new("int *")
+ buf = backend._ffi.new("unsigned char[]", len(data))
+ res = backend._lib.EVP_CipherUpdate(ctx, buf, outlen, data, len(data))
+ if res != 1:
+ backend._consume_errors()
+ raise InvalidTag
+
+ processed_data = backend._ffi.buffer(buf, outlen[0])[:]
+ else:
+ processed_data = _process_data(backend, ctx, data)
+ outlen = backend._ffi.new("int *")
+ res = backend._lib.EVP_CipherFinal_ex(ctx, backend._ffi.NULL, outlen)
+ if res == 0:
+ backend._consume_errors()
+ raise InvalidTag
+
+ return processed_data
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/backend.py b/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/backend.py
new file mode 100644
index 0000000000..45d4a1a1ee
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/backend.py
@@ -0,0 +1,2776 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import collections
+import contextlib
+import itertools
+import warnings
+from contextlib import contextmanager
+
+import six
+from six.moves import range
+
+from cryptography import utils, x509
+from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
+from cryptography.hazmat._der import (
+ INTEGER,
+ NULL,
+ SEQUENCE,
+ encode_der,
+ encode_der_integer,
+)
+from cryptography.hazmat.backends.interfaces import (
+ CMACBackend,
+ CipherBackend,
+ DERSerializationBackend,
+ DHBackend,
+ DSABackend,
+ EllipticCurveBackend,
+ HMACBackend,
+ HashBackend,
+ PBKDF2HMACBackend,
+ PEMSerializationBackend,
+ RSABackend,
+ ScryptBackend,
+ X509Backend,
+)
+from cryptography.hazmat.backends.openssl import aead
+from cryptography.hazmat.backends.openssl.ciphers import _CipherContext
+from cryptography.hazmat.backends.openssl.cmac import _CMACContext
+from cryptography.hazmat.backends.openssl.decode_asn1 import (
+ _CRL_ENTRY_REASON_ENUM_TO_CODE,
+ _CRL_EXTENSION_HANDLERS,
+ _EXTENSION_HANDLERS_BASE,
+ _EXTENSION_HANDLERS_SCT,
+ _OCSP_BASICRESP_EXTENSION_HANDLERS,
+ _OCSP_REQ_EXTENSION_HANDLERS,
+ _OCSP_SINGLERESP_EXTENSION_HANDLERS_SCT,
+ _REVOKED_EXTENSION_HANDLERS,
+ _X509ExtensionParser,
+)
+from cryptography.hazmat.backends.openssl.dh import (
+ _DHParameters,
+ _DHPrivateKey,
+ _DHPublicKey,
+ _dh_params_dup,
+)
+from cryptography.hazmat.backends.openssl.dsa import (
+ _DSAParameters,
+ _DSAPrivateKey,
+ _DSAPublicKey,
+)
+from cryptography.hazmat.backends.openssl.ec import (
+ _EllipticCurvePrivateKey,
+ _EllipticCurvePublicKey,
+)
+from cryptography.hazmat.backends.openssl.ed25519 import (
+ _Ed25519PrivateKey,
+ _Ed25519PublicKey,
+)
+from cryptography.hazmat.backends.openssl.ed448 import (
+ _ED448_KEY_SIZE,
+ _Ed448PrivateKey,
+ _Ed448PublicKey,
+)
+from cryptography.hazmat.backends.openssl.encode_asn1 import (
+ _CRL_ENTRY_EXTENSION_ENCODE_HANDLERS,
+ _CRL_EXTENSION_ENCODE_HANDLERS,
+ _EXTENSION_ENCODE_HANDLERS,
+ _OCSP_BASICRESP_EXTENSION_ENCODE_HANDLERS,
+ _OCSP_REQUEST_EXTENSION_ENCODE_HANDLERS,
+ _encode_asn1_int_gc,
+ _encode_asn1_str_gc,
+ _encode_name_gc,
+ _txt2obj_gc,
+)
+from cryptography.hazmat.backends.openssl.hashes import _HashContext
+from cryptography.hazmat.backends.openssl.hmac import _HMACContext
+from cryptography.hazmat.backends.openssl.ocsp import (
+ _OCSPRequest,
+ _OCSPResponse,
+)
+from cryptography.hazmat.backends.openssl.poly1305 import (
+ _POLY1305_KEY_SIZE,
+ _Poly1305Context,
+)
+from cryptography.hazmat.backends.openssl.rsa import (
+ _RSAPrivateKey,
+ _RSAPublicKey,
+)
+from cryptography.hazmat.backends.openssl.x25519 import (
+ _X25519PrivateKey,
+ _X25519PublicKey,
+)
+from cryptography.hazmat.backends.openssl.x448 import (
+ _X448PrivateKey,
+ _X448PublicKey,
+)
+from cryptography.hazmat.backends.openssl.x509 import (
+ _Certificate,
+ _CertificateRevocationList,
+ _CertificateSigningRequest,
+ _RevokedCertificate,
+)
+from cryptography.hazmat.bindings.openssl import binding
+from cryptography.hazmat.primitives import hashes, serialization
+from cryptography.hazmat.primitives.asymmetric import (
+ dh,
+ dsa,
+ ec,
+ ed25519,
+ ed448,
+ rsa,
+)
+from cryptography.hazmat.primitives.asymmetric.padding import (
+ MGF1,
+ OAEP,
+ PKCS1v15,
+ PSS,
+)
+from cryptography.hazmat.primitives.ciphers.algorithms import (
+ AES,
+ ARC4,
+ Blowfish,
+ CAST5,
+ Camellia,
+ ChaCha20,
+ IDEA,
+ SEED,
+ TripleDES,
+)
+from cryptography.hazmat.primitives.ciphers.modes import (
+ CBC,
+ CFB,
+ CFB8,
+ CTR,
+ ECB,
+ GCM,
+ OFB,
+ XTS,
+)
+from cryptography.hazmat.primitives.kdf import scrypt
+from cryptography.hazmat.primitives.serialization import pkcs7, ssh
+from cryptography.x509 import ocsp
+
+
+_MemoryBIO = collections.namedtuple("_MemoryBIO", ["bio", "char_ptr"])
+
+
+# Not actually supported, just used as a marker for some serialization tests.
+class _RC2(object):
+ pass
+
+
+@utils.register_interface(CipherBackend)
+@utils.register_interface(CMACBackend)
+@utils.register_interface(DERSerializationBackend)
+@utils.register_interface(DHBackend)
+@utils.register_interface(DSABackend)
+@utils.register_interface(EllipticCurveBackend)
+@utils.register_interface(HashBackend)
+@utils.register_interface(HMACBackend)
+@utils.register_interface(PBKDF2HMACBackend)
+@utils.register_interface(RSABackend)
+@utils.register_interface(PEMSerializationBackend)
+@utils.register_interface(X509Backend)
+@utils.register_interface_if(
+ binding.Binding().lib.Cryptography_HAS_SCRYPT, ScryptBackend
+)
+class Backend(object):
+ """
+ OpenSSL API binding interfaces.
+ """
+
+ name = "openssl"
+
+ # FIPS has opinions about acceptable algorithms and key sizes, but the
+ # disallowed algorithms are still present in OpenSSL. They just error if
+ # you try to use them. To avoid that we allowlist the algorithms in
+ # FIPS 140-3. This isn't ideal, but FIPS 140-3 is trash so here we are.
+ _fips_aead = {
+ b"aes-128-ccm",
+ b"aes-192-ccm",
+ b"aes-256-ccm",
+ b"aes-128-gcm",
+ b"aes-192-gcm",
+ b"aes-256-gcm",
+ }
+ _fips_ciphers = (AES, TripleDES)
+ _fips_hashes = (
+ hashes.SHA1,
+ hashes.SHA224,
+ hashes.SHA256,
+ hashes.SHA384,
+ hashes.SHA512,
+ hashes.SHA512_224,
+ hashes.SHA512_256,
+ hashes.SHA3_224,
+ hashes.SHA3_256,
+ hashes.SHA3_384,
+ hashes.SHA3_512,
+ hashes.SHAKE128,
+ hashes.SHAKE256,
+ )
+ _fips_rsa_min_key_size = 2048
+ _fips_rsa_min_public_exponent = 65537
+ _fips_dsa_min_modulus = 1 << 2048
+ _fips_dh_min_key_size = 2048
+ _fips_dh_min_modulus = 1 << _fips_dh_min_key_size
+
+ def __init__(self):
+ self._binding = binding.Binding()
+ self._ffi = self._binding.ffi
+ self._lib = self._binding.lib
+ self._fips_enabled = self._is_fips_enabled()
+
+ self._cipher_registry = {}
+ self._register_default_ciphers()
+ self._register_x509_ext_parsers()
+ self._register_x509_encoders()
+ if self._fips_enabled and self._lib.CRYPTOGRAPHY_NEEDS_OSRANDOM_ENGINE:
+ warnings.warn(
+ "OpenSSL FIPS mode is enabled. Can't enable DRBG fork safety.",
+ UserWarning,
+ )
+ else:
+ self.activate_osrandom_engine()
+ self._dh_types = [self._lib.EVP_PKEY_DH]
+ if self._lib.Cryptography_HAS_EVP_PKEY_DHX:
+ self._dh_types.append(self._lib.EVP_PKEY_DHX)
+
+ def openssl_assert(self, ok, errors=None):
+ return binding._openssl_assert(self._lib, ok, errors=errors)
+
+ def _is_fips_enabled(self):
+ fips_mode = getattr(self._lib, "FIPS_mode", lambda: 0)
+ mode = fips_mode()
+ if mode == 0:
+ # OpenSSL without FIPS pushes an error on the error stack
+ self._lib.ERR_clear_error()
+ return bool(mode)
+
+ def activate_builtin_random(self):
+ if self._lib.CRYPTOGRAPHY_NEEDS_OSRANDOM_ENGINE:
+ # Obtain a new structural reference.
+ e = self._lib.ENGINE_get_default_RAND()
+ if e != self._ffi.NULL:
+ self._lib.ENGINE_unregister_RAND(e)
+ # Reset the RNG to use the built-in.
+ res = self._lib.RAND_set_rand_method(self._ffi.NULL)
+ self.openssl_assert(res == 1)
+ # decrement the structural reference from get_default_RAND
+ res = self._lib.ENGINE_finish(e)
+ self.openssl_assert(res == 1)
+
+ @contextlib.contextmanager
+ def _get_osurandom_engine(self):
+ # Fetches an engine by id and returns it. This creates a structural
+ # reference.
+ e = self._lib.ENGINE_by_id(self._lib.Cryptography_osrandom_engine_id)
+ self.openssl_assert(e != self._ffi.NULL)
+ # Initialize the engine for use. This adds a functional reference.
+ res = self._lib.ENGINE_init(e)
+ self.openssl_assert(res == 1)
+
+ try:
+ yield e
+ finally:
+ # Decrement the structural ref incremented by ENGINE_by_id.
+ res = self._lib.ENGINE_free(e)
+ self.openssl_assert(res == 1)
+ # Decrement the functional ref incremented by ENGINE_init.
+ res = self._lib.ENGINE_finish(e)
+ self.openssl_assert(res == 1)
+
+ def activate_osrandom_engine(self):
+ if self._lib.CRYPTOGRAPHY_NEEDS_OSRANDOM_ENGINE:
+ # Unregister and free the current engine.
+ self.activate_builtin_random()
+ with self._get_osurandom_engine() as e:
+ # Set the engine as the default RAND provider.
+ res = self._lib.ENGINE_set_default_RAND(e)
+ self.openssl_assert(res == 1)
+ # Reset the RNG to use the engine
+ res = self._lib.RAND_set_rand_method(self._ffi.NULL)
+ self.openssl_assert(res == 1)
+
+ def osrandom_engine_implementation(self):
+ buf = self._ffi.new("char[]", 64)
+ with self._get_osurandom_engine() as e:
+ res = self._lib.ENGINE_ctrl_cmd(
+ e, b"get_implementation", len(buf), buf, self._ffi.NULL, 0
+ )
+ self.openssl_assert(res > 0)
+ return self._ffi.string(buf).decode("ascii")
+
+ def openssl_version_text(self):
+ """
+ Friendly string name of the loaded OpenSSL library. This is not
+ necessarily the same version as it was compiled against.
+
+ Example: OpenSSL 1.1.1d 10 Sep 2019
+ """
+ return self._ffi.string(
+ self._lib.OpenSSL_version(self._lib.OPENSSL_VERSION)
+ ).decode("ascii")
+
+ def openssl_version_number(self):
+ return self._lib.OpenSSL_version_num()
+
+ def create_hmac_ctx(self, key, algorithm):
+ return _HMACContext(self, key, algorithm)
+
+ def _evp_md_from_algorithm(self, algorithm):
+ if algorithm.name == "blake2b" or algorithm.name == "blake2s":
+ alg = "{}{}".format(
+ algorithm.name, algorithm.digest_size * 8
+ ).encode("ascii")
+ else:
+ alg = algorithm.name.encode("ascii")
+
+ evp_md = self._lib.EVP_get_digestbyname(alg)
+ return evp_md
+
+ def _evp_md_non_null_from_algorithm(self, algorithm):
+ evp_md = self._evp_md_from_algorithm(algorithm)
+ self.openssl_assert(evp_md != self._ffi.NULL)
+ return evp_md
+
+ def hash_supported(self, algorithm):
+ if self._fips_enabled and not isinstance(algorithm, self._fips_hashes):
+ return False
+
+ evp_md = self._evp_md_from_algorithm(algorithm)
+ return evp_md != self._ffi.NULL
+
+ def hmac_supported(self, algorithm):
+ return self.hash_supported(algorithm)
+
+ def create_hash_ctx(self, algorithm):
+ return _HashContext(self, algorithm)
+
+ def cipher_supported(self, cipher, mode):
+ if self._fips_enabled and not isinstance(cipher, self._fips_ciphers):
+ return False
+ try:
+ adapter = self._cipher_registry[type(cipher), type(mode)]
+ except KeyError:
+ return False
+ evp_cipher = adapter(self, cipher, mode)
+ return self._ffi.NULL != evp_cipher
+
+ def register_cipher_adapter(self, cipher_cls, mode_cls, adapter):
+ if (cipher_cls, mode_cls) in self._cipher_registry:
+ raise ValueError(
+ "Duplicate registration for: {} {}.".format(
+ cipher_cls, mode_cls
+ )
+ )
+ self._cipher_registry[cipher_cls, mode_cls] = adapter
+
+ def _register_default_ciphers(self):
+ for mode_cls in [CBC, CTR, ECB, OFB, CFB, CFB8, GCM]:
+ self.register_cipher_adapter(
+ AES,
+ mode_cls,
+ GetCipherByName("{cipher.name}-{cipher.key_size}-{mode.name}"),
+ )
+ for mode_cls in [CBC, CTR, ECB, OFB, CFB]:
+ self.register_cipher_adapter(
+ Camellia,
+ mode_cls,
+ GetCipherByName("{cipher.name}-{cipher.key_size}-{mode.name}"),
+ )
+ for mode_cls in [CBC, CFB, CFB8, OFB]:
+ self.register_cipher_adapter(
+ TripleDES, mode_cls, GetCipherByName("des-ede3-{mode.name}")
+ )
+ self.register_cipher_adapter(
+ TripleDES, ECB, GetCipherByName("des-ede3")
+ )
+ for mode_cls in [CBC, CFB, OFB, ECB]:
+ self.register_cipher_adapter(
+ Blowfish, mode_cls, GetCipherByName("bf-{mode.name}")
+ )
+ for mode_cls in [CBC, CFB, OFB, ECB]:
+ self.register_cipher_adapter(
+ SEED, mode_cls, GetCipherByName("seed-{mode.name}")
+ )
+ for cipher_cls, mode_cls in itertools.product(
+ [CAST5, IDEA],
+ [CBC, OFB, CFB, ECB],
+ ):
+ self.register_cipher_adapter(
+ cipher_cls,
+ mode_cls,
+ GetCipherByName("{cipher.name}-{mode.name}"),
+ )
+ self.register_cipher_adapter(ARC4, type(None), GetCipherByName("rc4"))
+ # We don't actually support RC2, this is just used by some tests.
+ self.register_cipher_adapter(_RC2, type(None), GetCipherByName("rc2"))
+ self.register_cipher_adapter(
+ ChaCha20, type(None), GetCipherByName("chacha20")
+ )
+ self.register_cipher_adapter(AES, XTS, _get_xts_cipher)
+
+ def _register_x509_ext_parsers(self):
+ ext_handlers = _EXTENSION_HANDLERS_BASE.copy()
+ # All revoked extensions are valid single response extensions, see:
+ # https://tools.ietf.org/html/rfc6960#section-4.4.5
+ singleresp_handlers = _REVOKED_EXTENSION_HANDLERS.copy()
+
+ if self._lib.Cryptography_HAS_SCT:
+ ext_handlers.update(_EXTENSION_HANDLERS_SCT)
+ singleresp_handlers.update(_OCSP_SINGLERESP_EXTENSION_HANDLERS_SCT)
+
+ self._certificate_extension_parser = _X509ExtensionParser(
+ self,
+ ext_count=self._lib.X509_get_ext_count,
+ get_ext=self._lib.X509_get_ext,
+ handlers=ext_handlers,
+ )
+ self._csr_extension_parser = _X509ExtensionParser(
+ self,
+ ext_count=self._lib.sk_X509_EXTENSION_num,
+ get_ext=self._lib.sk_X509_EXTENSION_value,
+ handlers=ext_handlers,
+ )
+ self._revoked_cert_extension_parser = _X509ExtensionParser(
+ self,
+ ext_count=self._lib.X509_REVOKED_get_ext_count,
+ get_ext=self._lib.X509_REVOKED_get_ext,
+ handlers=_REVOKED_EXTENSION_HANDLERS,
+ )
+ self._crl_extension_parser = _X509ExtensionParser(
+ self,
+ ext_count=self._lib.X509_CRL_get_ext_count,
+ get_ext=self._lib.X509_CRL_get_ext,
+ handlers=_CRL_EXTENSION_HANDLERS,
+ )
+ self._ocsp_req_ext_parser = _X509ExtensionParser(
+ self,
+ ext_count=self._lib.OCSP_REQUEST_get_ext_count,
+ get_ext=self._lib.OCSP_REQUEST_get_ext,
+ handlers=_OCSP_REQ_EXTENSION_HANDLERS,
+ )
+ self._ocsp_basicresp_ext_parser = _X509ExtensionParser(
+ self,
+ ext_count=self._lib.OCSP_BASICRESP_get_ext_count,
+ get_ext=self._lib.OCSP_BASICRESP_get_ext,
+ handlers=_OCSP_BASICRESP_EXTENSION_HANDLERS,
+ )
+ self._ocsp_singleresp_ext_parser = _X509ExtensionParser(
+ self,
+ ext_count=self._lib.OCSP_SINGLERESP_get_ext_count,
+ get_ext=self._lib.OCSP_SINGLERESP_get_ext,
+ handlers=singleresp_handlers,
+ )
+
+ def _register_x509_encoders(self):
+ self._extension_encode_handlers = _EXTENSION_ENCODE_HANDLERS.copy()
+ self._crl_extension_encode_handlers = (
+ _CRL_EXTENSION_ENCODE_HANDLERS.copy()
+ )
+ self._crl_entry_extension_encode_handlers = (
+ _CRL_ENTRY_EXTENSION_ENCODE_HANDLERS.copy()
+ )
+ self._ocsp_request_extension_encode_handlers = (
+ _OCSP_REQUEST_EXTENSION_ENCODE_HANDLERS.copy()
+ )
+ self._ocsp_basicresp_extension_encode_handlers = (
+ _OCSP_BASICRESP_EXTENSION_ENCODE_HANDLERS.copy()
+ )
+
+ def create_symmetric_encryption_ctx(self, cipher, mode):
+ return _CipherContext(self, cipher, mode, _CipherContext._ENCRYPT)
+
+ def create_symmetric_decryption_ctx(self, cipher, mode):
+ return _CipherContext(self, cipher, mode, _CipherContext._DECRYPT)
+
+ def pbkdf2_hmac_supported(self, algorithm):
+ return self.hmac_supported(algorithm)
+
+ def derive_pbkdf2_hmac(
+ self, algorithm, length, salt, iterations, key_material
+ ):
+ buf = self._ffi.new("unsigned char[]", length)
+ evp_md = self._evp_md_non_null_from_algorithm(algorithm)
+ key_material_ptr = self._ffi.from_buffer(key_material)
+ res = self._lib.PKCS5_PBKDF2_HMAC(
+ key_material_ptr,
+ len(key_material),
+ salt,
+ len(salt),
+ iterations,
+ evp_md,
+ length,
+ buf,
+ )
+ self.openssl_assert(res == 1)
+ return self._ffi.buffer(buf)[:]
+
+ def _consume_errors(self):
+ return binding._consume_errors(self._lib)
+
+ def _consume_errors_with_text(self):
+ return binding._consume_errors_with_text(self._lib)
+
+ def _bn_to_int(self, bn):
+ assert bn != self._ffi.NULL
+
+ if not six.PY2:
+ # Python 3 has constant time from_bytes, so use that.
+ bn_num_bytes = self._lib.BN_num_bytes(bn)
+ bin_ptr = self._ffi.new("unsigned char[]", bn_num_bytes)
+ bin_len = self._lib.BN_bn2bin(bn, bin_ptr)
+ # A zero length means the BN has value 0
+ self.openssl_assert(bin_len >= 0)
+ val = int.from_bytes(self._ffi.buffer(bin_ptr)[:bin_len], "big")
+ if self._lib.BN_is_negative(bn):
+ val = -val
+ return val
+ else:
+ # Under Python 2 the best we can do is hex()
+ hex_cdata = self._lib.BN_bn2hex(bn)
+ self.openssl_assert(hex_cdata != self._ffi.NULL)
+ hex_str = self._ffi.string(hex_cdata)
+ self._lib.OPENSSL_free(hex_cdata)
+ return int(hex_str, 16)
+
+ def _int_to_bn(self, num, bn=None):
+ """
+ Converts a python integer to a BIGNUM. The returned BIGNUM will not
+ be garbage collected (to support adding them to structs that take
+ ownership of the object). Be sure to register it for GC if it will
+ be discarded after use.
+ """
+ assert bn is None or bn != self._ffi.NULL
+
+ if bn is None:
+ bn = self._ffi.NULL
+
+ if not six.PY2:
+ # Python 3 has constant time to_bytes, so use that.
+
+ binary = num.to_bytes(int(num.bit_length() / 8.0 + 1), "big")
+ bn_ptr = self._lib.BN_bin2bn(binary, len(binary), bn)
+ self.openssl_assert(bn_ptr != self._ffi.NULL)
+ return bn_ptr
+
+ else:
+ # Under Python 2 the best we can do is hex(), [2:] removes the 0x
+ # prefix.
+ hex_num = hex(num).rstrip("L")[2:].encode("ascii")
+ bn_ptr = self._ffi.new("BIGNUM **")
+ bn_ptr[0] = bn
+ res = self._lib.BN_hex2bn(bn_ptr, hex_num)
+ self.openssl_assert(res != 0)
+ self.openssl_assert(bn_ptr[0] != self._ffi.NULL)
+ return bn_ptr[0]
+
+ def generate_rsa_private_key(self, public_exponent, key_size):
+ rsa._verify_rsa_parameters(public_exponent, key_size)
+
+ rsa_cdata = self._lib.RSA_new()
+ self.openssl_assert(rsa_cdata != self._ffi.NULL)
+ rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free)
+
+ bn = self._int_to_bn(public_exponent)
+ bn = self._ffi.gc(bn, self._lib.BN_free)
+
+ res = self._lib.RSA_generate_key_ex(
+ rsa_cdata, key_size, bn, self._ffi.NULL
+ )
+ self.openssl_assert(res == 1)
+ evp_pkey = self._rsa_cdata_to_evp_pkey(rsa_cdata)
+
+ return _RSAPrivateKey(self, rsa_cdata, evp_pkey)
+
+ def generate_rsa_parameters_supported(self, public_exponent, key_size):
+ return (
+ public_exponent >= 3
+ and public_exponent & 1 != 0
+ and key_size >= 512
+ )
+
+ def load_rsa_private_numbers(self, numbers):
+ rsa._check_private_key_components(
+ numbers.p,
+ numbers.q,
+ numbers.d,
+ numbers.dmp1,
+ numbers.dmq1,
+ numbers.iqmp,
+ numbers.public_numbers.e,
+ numbers.public_numbers.n,
+ )
+ rsa_cdata = self._lib.RSA_new()
+ self.openssl_assert(rsa_cdata != self._ffi.NULL)
+ rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free)
+ p = self._int_to_bn(numbers.p)
+ q = self._int_to_bn(numbers.q)
+ d = self._int_to_bn(numbers.d)
+ dmp1 = self._int_to_bn(numbers.dmp1)
+ dmq1 = self._int_to_bn(numbers.dmq1)
+ iqmp = self._int_to_bn(numbers.iqmp)
+ e = self._int_to_bn(numbers.public_numbers.e)
+ n = self._int_to_bn(numbers.public_numbers.n)
+ res = self._lib.RSA_set0_factors(rsa_cdata, p, q)
+ self.openssl_assert(res == 1)
+ res = self._lib.RSA_set0_key(rsa_cdata, n, e, d)
+ self.openssl_assert(res == 1)
+ res = self._lib.RSA_set0_crt_params(rsa_cdata, dmp1, dmq1, iqmp)
+ self.openssl_assert(res == 1)
+ evp_pkey = self._rsa_cdata_to_evp_pkey(rsa_cdata)
+
+ return _RSAPrivateKey(self, rsa_cdata, evp_pkey)
+
+ def load_rsa_public_numbers(self, numbers):
+ rsa._check_public_key_components(numbers.e, numbers.n)
+ rsa_cdata = self._lib.RSA_new()
+ self.openssl_assert(rsa_cdata != self._ffi.NULL)
+ rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free)
+ e = self._int_to_bn(numbers.e)
+ n = self._int_to_bn(numbers.n)
+ res = self._lib.RSA_set0_key(rsa_cdata, n, e, self._ffi.NULL)
+ self.openssl_assert(res == 1)
+ evp_pkey = self._rsa_cdata_to_evp_pkey(rsa_cdata)
+
+ return _RSAPublicKey(self, rsa_cdata, evp_pkey)
+
+ def _create_evp_pkey_gc(self):
+ evp_pkey = self._lib.EVP_PKEY_new()
+ self.openssl_assert(evp_pkey != self._ffi.NULL)
+ evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free)
+ return evp_pkey
+
+ def _rsa_cdata_to_evp_pkey(self, rsa_cdata):
+ evp_pkey = self._create_evp_pkey_gc()
+ res = self._lib.EVP_PKEY_set1_RSA(evp_pkey, rsa_cdata)
+ self.openssl_assert(res == 1)
+ return evp_pkey
+
+ def _bytes_to_bio(self, data):
+ """
+ Return a _MemoryBIO namedtuple of (BIO, char*).
+
+ The char* is the storage for the BIO and it must stay alive until the
+ BIO is finished with.
+ """
+ data_ptr = self._ffi.from_buffer(data)
+ bio = self._lib.BIO_new_mem_buf(data_ptr, len(data))
+ self.openssl_assert(bio != self._ffi.NULL)
+
+ return _MemoryBIO(self._ffi.gc(bio, self._lib.BIO_free), data_ptr)
+
+ def _create_mem_bio_gc(self):
+ """
+ Creates an empty memory BIO.
+ """
+ bio_method = self._lib.BIO_s_mem()
+ self.openssl_assert(bio_method != self._ffi.NULL)
+ bio = self._lib.BIO_new(bio_method)
+ self.openssl_assert(bio != self._ffi.NULL)
+ bio = self._ffi.gc(bio, self._lib.BIO_free)
+ return bio
+
+ def _read_mem_bio(self, bio):
+ """
+ Reads a memory BIO. This only works on memory BIOs.
+ """
+ buf = self._ffi.new("char **")
+ buf_len = self._lib.BIO_get_mem_data(bio, buf)
+ self.openssl_assert(buf_len > 0)
+ self.openssl_assert(buf[0] != self._ffi.NULL)
+ bio_data = self._ffi.buffer(buf[0], buf_len)[:]
+ return bio_data
+
+ def _evp_pkey_to_private_key(self, evp_pkey):
+ """
+ Return the appropriate type of PrivateKey given an evp_pkey cdata
+ pointer.
+ """
+
+ key_type = self._lib.EVP_PKEY_id(evp_pkey)
+
+ if key_type == self._lib.EVP_PKEY_RSA:
+ rsa_cdata = self._lib.EVP_PKEY_get1_RSA(evp_pkey)
+ self.openssl_assert(rsa_cdata != self._ffi.NULL)
+ rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free)
+ return _RSAPrivateKey(self, rsa_cdata, evp_pkey)
+ elif key_type == self._lib.EVP_PKEY_DSA:
+ dsa_cdata = self._lib.EVP_PKEY_get1_DSA(evp_pkey)
+ self.openssl_assert(dsa_cdata != self._ffi.NULL)
+ dsa_cdata = self._ffi.gc(dsa_cdata, self._lib.DSA_free)
+ return _DSAPrivateKey(self, dsa_cdata, evp_pkey)
+ elif key_type == self._lib.EVP_PKEY_EC:
+ ec_cdata = self._lib.EVP_PKEY_get1_EC_KEY(evp_pkey)
+ self.openssl_assert(ec_cdata != self._ffi.NULL)
+ ec_cdata = self._ffi.gc(ec_cdata, self._lib.EC_KEY_free)
+ return _EllipticCurvePrivateKey(self, ec_cdata, evp_pkey)
+ elif key_type in self._dh_types:
+ dh_cdata = self._lib.EVP_PKEY_get1_DH(evp_pkey)
+ self.openssl_assert(dh_cdata != self._ffi.NULL)
+ dh_cdata = self._ffi.gc(dh_cdata, self._lib.DH_free)
+ return _DHPrivateKey(self, dh_cdata, evp_pkey)
+ elif key_type == getattr(self._lib, "EVP_PKEY_ED25519", None):
+ # EVP_PKEY_ED25519 is not present in OpenSSL < 1.1.1
+ return _Ed25519PrivateKey(self, evp_pkey)
+ elif key_type == getattr(self._lib, "EVP_PKEY_X448", None):
+ # EVP_PKEY_X448 is not present in OpenSSL < 1.1.1
+ return _X448PrivateKey(self, evp_pkey)
+ elif key_type == getattr(self._lib, "EVP_PKEY_X25519", None):
+ # EVP_PKEY_X25519 is not present in OpenSSL < 1.1.0
+ return _X25519PrivateKey(self, evp_pkey)
+ elif key_type == getattr(self._lib, "EVP_PKEY_ED448", None):
+ # EVP_PKEY_ED448 is not present in OpenSSL < 1.1.1
+ return _Ed448PrivateKey(self, evp_pkey)
+ else:
+ raise UnsupportedAlgorithm("Unsupported key type.")
+
+ def _evp_pkey_to_public_key(self, evp_pkey):
+ """
+ Return the appropriate type of PublicKey given an evp_pkey cdata
+ pointer.
+ """
+
+ key_type = self._lib.EVP_PKEY_id(evp_pkey)
+
+ if key_type == self._lib.EVP_PKEY_RSA:
+ rsa_cdata = self._lib.EVP_PKEY_get1_RSA(evp_pkey)
+ self.openssl_assert(rsa_cdata != self._ffi.NULL)
+ rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free)
+ return _RSAPublicKey(self, rsa_cdata, evp_pkey)
+ elif key_type == self._lib.EVP_PKEY_DSA:
+ dsa_cdata = self._lib.EVP_PKEY_get1_DSA(evp_pkey)
+ self.openssl_assert(dsa_cdata != self._ffi.NULL)
+ dsa_cdata = self._ffi.gc(dsa_cdata, self._lib.DSA_free)
+ return _DSAPublicKey(self, dsa_cdata, evp_pkey)
+ elif key_type == self._lib.EVP_PKEY_EC:
+ ec_cdata = self._lib.EVP_PKEY_get1_EC_KEY(evp_pkey)
+ self.openssl_assert(ec_cdata != self._ffi.NULL)
+ ec_cdata = self._ffi.gc(ec_cdata, self._lib.EC_KEY_free)
+ return _EllipticCurvePublicKey(self, ec_cdata, evp_pkey)
+ elif key_type in self._dh_types:
+ dh_cdata = self._lib.EVP_PKEY_get1_DH(evp_pkey)
+ self.openssl_assert(dh_cdata != self._ffi.NULL)
+ dh_cdata = self._ffi.gc(dh_cdata, self._lib.DH_free)
+ return _DHPublicKey(self, dh_cdata, evp_pkey)
+ elif key_type == getattr(self._lib, "EVP_PKEY_ED25519", None):
+ # EVP_PKEY_ED25519 is not present in OpenSSL < 1.1.1
+ return _Ed25519PublicKey(self, evp_pkey)
+ elif key_type == getattr(self._lib, "EVP_PKEY_X448", None):
+ # EVP_PKEY_X448 is not present in OpenSSL < 1.1.1
+ return _X448PublicKey(self, evp_pkey)
+ elif key_type == getattr(self._lib, "EVP_PKEY_X25519", None):
+ # EVP_PKEY_X25519 is not present in OpenSSL < 1.1.0
+ return _X25519PublicKey(self, evp_pkey)
+ elif key_type == getattr(self._lib, "EVP_PKEY_ED448", None):
+ # EVP_PKEY_X25519 is not present in OpenSSL < 1.1.1
+ return _Ed448PublicKey(self, evp_pkey)
+ else:
+ raise UnsupportedAlgorithm("Unsupported key type.")
+
+ def _oaep_hash_supported(self, algorithm):
+ if self._lib.Cryptography_HAS_RSA_OAEP_MD:
+ return isinstance(
+ algorithm,
+ (
+ hashes.SHA1,
+ hashes.SHA224,
+ hashes.SHA256,
+ hashes.SHA384,
+ hashes.SHA512,
+ ),
+ )
+ else:
+ return isinstance(algorithm, hashes.SHA1)
+
+ def rsa_padding_supported(self, padding):
+ if isinstance(padding, PKCS1v15):
+ return True
+ elif isinstance(padding, PSS) and isinstance(padding._mgf, MGF1):
+ return self.hash_supported(padding._mgf._algorithm)
+ elif isinstance(padding, OAEP) and isinstance(padding._mgf, MGF1):
+ return (
+ self._oaep_hash_supported(padding._mgf._algorithm)
+ and self._oaep_hash_supported(padding._algorithm)
+ and (
+ (padding._label is None or len(padding._label) == 0)
+ or self._lib.Cryptography_HAS_RSA_OAEP_LABEL == 1
+ )
+ )
+ else:
+ return False
+
+ def generate_dsa_parameters(self, key_size):
+ if key_size not in (1024, 2048, 3072, 4096):
+ raise ValueError(
+ "Key size must be 1024, 2048, 3072, or 4096 bits."
+ )
+
+ ctx = self._lib.DSA_new()
+ self.openssl_assert(ctx != self._ffi.NULL)
+ ctx = self._ffi.gc(ctx, self._lib.DSA_free)
+
+ res = self._lib.DSA_generate_parameters_ex(
+ ctx,
+ key_size,
+ self._ffi.NULL,
+ 0,
+ self._ffi.NULL,
+ self._ffi.NULL,
+ self._ffi.NULL,
+ )
+
+ self.openssl_assert(res == 1)
+
+ return _DSAParameters(self, ctx)
+
+ def generate_dsa_private_key(self, parameters):
+ ctx = self._lib.DSAparams_dup(parameters._dsa_cdata)
+ self.openssl_assert(ctx != self._ffi.NULL)
+ ctx = self._ffi.gc(ctx, self._lib.DSA_free)
+ self._lib.DSA_generate_key(ctx)
+ evp_pkey = self._dsa_cdata_to_evp_pkey(ctx)
+
+ return _DSAPrivateKey(self, ctx, evp_pkey)
+
+ def generate_dsa_private_key_and_parameters(self, key_size):
+ parameters = self.generate_dsa_parameters(key_size)
+ return self.generate_dsa_private_key(parameters)
+
+ def _dsa_cdata_set_values(self, dsa_cdata, p, q, g, pub_key, priv_key):
+ res = self._lib.DSA_set0_pqg(dsa_cdata, p, q, g)
+ self.openssl_assert(res == 1)
+ res = self._lib.DSA_set0_key(dsa_cdata, pub_key, priv_key)
+ self.openssl_assert(res == 1)
+
+ def load_dsa_private_numbers(self, numbers):
+ dsa._check_dsa_private_numbers(numbers)
+ parameter_numbers = numbers.public_numbers.parameter_numbers
+
+ dsa_cdata = self._lib.DSA_new()
+ self.openssl_assert(dsa_cdata != self._ffi.NULL)
+ dsa_cdata = self._ffi.gc(dsa_cdata, self._lib.DSA_free)
+
+ p = self._int_to_bn(parameter_numbers.p)
+ q = self._int_to_bn(parameter_numbers.q)
+ g = self._int_to_bn(parameter_numbers.g)
+ pub_key = self._int_to_bn(numbers.public_numbers.y)
+ priv_key = self._int_to_bn(numbers.x)
+ self._dsa_cdata_set_values(dsa_cdata, p, q, g, pub_key, priv_key)
+
+ evp_pkey = self._dsa_cdata_to_evp_pkey(dsa_cdata)
+
+ return _DSAPrivateKey(self, dsa_cdata, evp_pkey)
+
+ def load_dsa_public_numbers(self, numbers):
+ dsa._check_dsa_parameters(numbers.parameter_numbers)
+ dsa_cdata = self._lib.DSA_new()
+ self.openssl_assert(dsa_cdata != self._ffi.NULL)
+ dsa_cdata = self._ffi.gc(dsa_cdata, self._lib.DSA_free)
+
+ p = self._int_to_bn(numbers.parameter_numbers.p)
+ q = self._int_to_bn(numbers.parameter_numbers.q)
+ g = self._int_to_bn(numbers.parameter_numbers.g)
+ pub_key = self._int_to_bn(numbers.y)
+ priv_key = self._ffi.NULL
+ self._dsa_cdata_set_values(dsa_cdata, p, q, g, pub_key, priv_key)
+
+ evp_pkey = self._dsa_cdata_to_evp_pkey(dsa_cdata)
+
+ return _DSAPublicKey(self, dsa_cdata, evp_pkey)
+
+ def load_dsa_parameter_numbers(self, numbers):
+ dsa._check_dsa_parameters(numbers)
+ dsa_cdata = self._lib.DSA_new()
+ self.openssl_assert(dsa_cdata != self._ffi.NULL)
+ dsa_cdata = self._ffi.gc(dsa_cdata, self._lib.DSA_free)
+
+ p = self._int_to_bn(numbers.p)
+ q = self._int_to_bn(numbers.q)
+ g = self._int_to_bn(numbers.g)
+ res = self._lib.DSA_set0_pqg(dsa_cdata, p, q, g)
+ self.openssl_assert(res == 1)
+
+ return _DSAParameters(self, dsa_cdata)
+
+ def _dsa_cdata_to_evp_pkey(self, dsa_cdata):
+ evp_pkey = self._create_evp_pkey_gc()
+ res = self._lib.EVP_PKEY_set1_DSA(evp_pkey, dsa_cdata)
+ self.openssl_assert(res == 1)
+ return evp_pkey
+
+ def dsa_hash_supported(self, algorithm):
+ return self.hash_supported(algorithm)
+
+ def dsa_parameters_supported(self, p, q, g):
+ return True
+
+ def cmac_algorithm_supported(self, algorithm):
+ return self.cipher_supported(
+ algorithm, CBC(b"\x00" * algorithm.block_size)
+ )
+
+ def create_cmac_ctx(self, algorithm):
+ return _CMACContext(self, algorithm)
+
+ def _x509_check_signature_params(self, private_key, algorithm):
+ if isinstance(
+ private_key, (ed25519.Ed25519PrivateKey, ed448.Ed448PrivateKey)
+ ):
+ if algorithm is not None:
+ raise ValueError(
+ "algorithm must be None when signing via ed25519 or ed448"
+ )
+ elif not isinstance(
+ private_key,
+ (rsa.RSAPrivateKey, dsa.DSAPrivateKey, ec.EllipticCurvePrivateKey),
+ ):
+ raise TypeError(
+ "Key must be an rsa, dsa, ec, ed25519, or ed448 private key."
+ )
+ elif not isinstance(algorithm, hashes.HashAlgorithm):
+ raise TypeError("Algorithm must be a registered hash algorithm.")
+ elif isinstance(algorithm, hashes.MD5) and not isinstance(
+ private_key, rsa.RSAPrivateKey
+ ):
+ raise ValueError(
+ "MD5 hash algorithm is only supported with RSA keys"
+ )
+
+ def create_x509_csr(self, builder, private_key, algorithm):
+ if not isinstance(builder, x509.CertificateSigningRequestBuilder):
+ raise TypeError("Builder type mismatch.")
+ self._x509_check_signature_params(private_key, algorithm)
+
+ # Resolve the signature algorithm.
+ evp_md = self._evp_md_x509_null_if_eddsa(private_key, algorithm)
+
+ # Create an empty request.
+ x509_req = self._lib.X509_REQ_new()
+ self.openssl_assert(x509_req != self._ffi.NULL)
+ x509_req = self._ffi.gc(x509_req, self._lib.X509_REQ_free)
+
+ # Set x509 version.
+ res = self._lib.X509_REQ_set_version(x509_req, x509.Version.v1.value)
+ self.openssl_assert(res == 1)
+
+ # Set subject name.
+ res = self._lib.X509_REQ_set_subject_name(
+ x509_req, _encode_name_gc(self, builder._subject_name)
+ )
+ self.openssl_assert(res == 1)
+
+ # Set subject public key.
+ public_key = private_key.public_key()
+ res = self._lib.X509_REQ_set_pubkey(x509_req, public_key._evp_pkey)
+ self.openssl_assert(res == 1)
+
+ # Add extensions.
+ sk_extension = self._lib.sk_X509_EXTENSION_new_null()
+ self.openssl_assert(sk_extension != self._ffi.NULL)
+ sk_extension = self._ffi.gc(
+ sk_extension,
+ lambda x: self._lib.sk_X509_EXTENSION_pop_free(
+ x,
+ self._ffi.addressof(
+ self._lib._original_lib, "X509_EXTENSION_free"
+ ),
+ ),
+ )
+ # Don't GC individual extensions because the memory is owned by
+ # sk_extensions and will be freed along with it.
+ self._create_x509_extensions(
+ extensions=builder._extensions,
+ handlers=self._extension_encode_handlers,
+ x509_obj=sk_extension,
+ add_func=self._lib.sk_X509_EXTENSION_insert,
+ gc=False,
+ )
+ res = self._lib.X509_REQ_add_extensions(x509_req, sk_extension)
+ self.openssl_assert(res == 1)
+
+ # Add attributes (all bytes encoded as ASN1 UTF8_STRING)
+ for attr_oid, attr_val in builder._attributes:
+ obj = _txt2obj_gc(self, attr_oid.dotted_string)
+ res = self._lib.X509_REQ_add1_attr_by_OBJ(
+ x509_req,
+ obj,
+ x509.name._ASN1Type.UTF8String.value,
+ attr_val,
+ len(attr_val),
+ )
+ self.openssl_assert(res == 1)
+
+ # Sign the request using the requester's private key.
+ res = self._lib.X509_REQ_sign(x509_req, private_key._evp_pkey, evp_md)
+ if res == 0:
+ errors = self._consume_errors_with_text()
+ raise ValueError("Signing failed", errors)
+
+ return _CertificateSigningRequest(self, x509_req)
+
+ def create_x509_certificate(self, builder, private_key, algorithm):
+ if not isinstance(builder, x509.CertificateBuilder):
+ raise TypeError("Builder type mismatch.")
+ self._x509_check_signature_params(private_key, algorithm)
+
+ # Resolve the signature algorithm.
+ evp_md = self._evp_md_x509_null_if_eddsa(private_key, algorithm)
+
+ # Create an empty certificate.
+ x509_cert = self._lib.X509_new()
+ x509_cert = self._ffi.gc(x509_cert, self._lib.X509_free)
+
+ # Set the x509 version.
+ res = self._lib.X509_set_version(x509_cert, builder._version.value)
+ self.openssl_assert(res == 1)
+
+ # Set the subject's name.
+ res = self._lib.X509_set_subject_name(
+ x509_cert, _encode_name_gc(self, builder._subject_name)
+ )
+ self.openssl_assert(res == 1)
+
+ # Set the subject's public key.
+ res = self._lib.X509_set_pubkey(
+ x509_cert, builder._public_key._evp_pkey
+ )
+ self.openssl_assert(res == 1)
+
+ # Set the certificate serial number.
+ serial_number = _encode_asn1_int_gc(self, builder._serial_number)
+ res = self._lib.X509_set_serialNumber(x509_cert, serial_number)
+ self.openssl_assert(res == 1)
+
+ # Set the "not before" time.
+ self._set_asn1_time(
+ self._lib.X509_getm_notBefore(x509_cert), builder._not_valid_before
+ )
+
+ # Set the "not after" time.
+ self._set_asn1_time(
+ self._lib.X509_getm_notAfter(x509_cert), builder._not_valid_after
+ )
+
+ # Add extensions.
+ self._create_x509_extensions(
+ extensions=builder._extensions,
+ handlers=self._extension_encode_handlers,
+ x509_obj=x509_cert,
+ add_func=self._lib.X509_add_ext,
+ gc=True,
+ )
+
+ # Set the issuer name.
+ res = self._lib.X509_set_issuer_name(
+ x509_cert, _encode_name_gc(self, builder._issuer_name)
+ )
+ self.openssl_assert(res == 1)
+
+ # Sign the certificate with the issuer's private key.
+ res = self._lib.X509_sign(x509_cert, private_key._evp_pkey, evp_md)
+ if res == 0:
+ errors = self._consume_errors_with_text()
+ raise ValueError("Signing failed", errors)
+
+ return _Certificate(self, x509_cert)
+
+ def _evp_md_x509_null_if_eddsa(self, private_key, algorithm):
+ if isinstance(
+ private_key, (ed25519.Ed25519PrivateKey, ed448.Ed448PrivateKey)
+ ):
+ # OpenSSL requires us to pass NULL for EVP_MD for ed25519/ed448
+ return self._ffi.NULL
+ else:
+ return self._evp_md_non_null_from_algorithm(algorithm)
+
+ def _set_asn1_time(self, asn1_time, time):
+ if time.year >= 2050:
+ asn1_str = time.strftime("%Y%m%d%H%M%SZ").encode("ascii")
+ else:
+ asn1_str = time.strftime("%y%m%d%H%M%SZ").encode("ascii")
+ res = self._lib.ASN1_TIME_set_string(asn1_time, asn1_str)
+ self.openssl_assert(res == 1)
+
+ def _create_asn1_time(self, time):
+ asn1_time = self._lib.ASN1_TIME_new()
+ self.openssl_assert(asn1_time != self._ffi.NULL)
+ asn1_time = self._ffi.gc(asn1_time, self._lib.ASN1_TIME_free)
+ self._set_asn1_time(asn1_time, time)
+ return asn1_time
+
+ def create_x509_crl(self, builder, private_key, algorithm):
+ if not isinstance(builder, x509.CertificateRevocationListBuilder):
+ raise TypeError("Builder type mismatch.")
+ self._x509_check_signature_params(private_key, algorithm)
+
+ evp_md = self._evp_md_x509_null_if_eddsa(private_key, algorithm)
+
+ # Create an empty CRL.
+ x509_crl = self._lib.X509_CRL_new()
+ x509_crl = self._ffi.gc(x509_crl, self._lib.X509_CRL_free)
+
+ # Set the x509 CRL version. We only support v2 (integer value 1).
+ res = self._lib.X509_CRL_set_version(x509_crl, 1)
+ self.openssl_assert(res == 1)
+
+ # Set the issuer name.
+ res = self._lib.X509_CRL_set_issuer_name(
+ x509_crl, _encode_name_gc(self, builder._issuer_name)
+ )
+ self.openssl_assert(res == 1)
+
+ # Set the last update time.
+ last_update = self._create_asn1_time(builder._last_update)
+ res = self._lib.X509_CRL_set_lastUpdate(x509_crl, last_update)
+ self.openssl_assert(res == 1)
+
+ # Set the next update time.
+ next_update = self._create_asn1_time(builder._next_update)
+ res = self._lib.X509_CRL_set_nextUpdate(x509_crl, next_update)
+ self.openssl_assert(res == 1)
+
+ # Add extensions.
+ self._create_x509_extensions(
+ extensions=builder._extensions,
+ handlers=self._crl_extension_encode_handlers,
+ x509_obj=x509_crl,
+ add_func=self._lib.X509_CRL_add_ext,
+ gc=True,
+ )
+
+ # add revoked certificates
+ for revoked_cert in builder._revoked_certificates:
+ # Duplicating because the X509_CRL takes ownership and will free
+ # this memory when X509_CRL_free is called.
+ revoked = self._lib.X509_REVOKED_dup(revoked_cert._x509_revoked)
+ self.openssl_assert(revoked != self._ffi.NULL)
+ res = self._lib.X509_CRL_add0_revoked(x509_crl, revoked)
+ self.openssl_assert(res == 1)
+
+ res = self._lib.X509_CRL_sign(x509_crl, private_key._evp_pkey, evp_md)
+ if res == 0:
+ errors = self._consume_errors_with_text()
+ raise ValueError("Signing failed", errors)
+
+ return _CertificateRevocationList(self, x509_crl)
+
+ def _create_x509_extensions(
+ self, extensions, handlers, x509_obj, add_func, gc
+ ):
+ for i, extension in enumerate(extensions):
+ x509_extension = self._create_x509_extension(handlers, extension)
+ self.openssl_assert(x509_extension != self._ffi.NULL)
+
+ if gc:
+ x509_extension = self._ffi.gc(
+ x509_extension, self._lib.X509_EXTENSION_free
+ )
+ res = add_func(x509_obj, x509_extension, i)
+ self.openssl_assert(res >= 1)
+
+ def _create_raw_x509_extension(self, extension, value):
+ obj = _txt2obj_gc(self, extension.oid.dotted_string)
+ return self._lib.X509_EXTENSION_create_by_OBJ(
+ self._ffi.NULL, obj, 1 if extension.critical else 0, value
+ )
+
+ def _create_x509_extension(self, handlers, extension):
+ if isinstance(extension.value, x509.UnrecognizedExtension):
+ value = _encode_asn1_str_gc(self, extension.value.value)
+ return self._create_raw_x509_extension(extension, value)
+ elif isinstance(extension.value, x509.TLSFeature):
+ asn1 = encode_der(
+ SEQUENCE,
+ *[
+ encode_der(INTEGER, encode_der_integer(x.value))
+ for x in extension.value
+ ]
+ )
+ value = _encode_asn1_str_gc(self, asn1)
+ return self._create_raw_x509_extension(extension, value)
+ elif isinstance(extension.value, x509.PrecertPoison):
+ value = _encode_asn1_str_gc(self, encode_der(NULL))
+ return self._create_raw_x509_extension(extension, value)
+ else:
+ try:
+ encode = handlers[extension.oid]
+ except KeyError:
+ raise NotImplementedError(
+ "Extension not supported: {}".format(extension.oid)
+ )
+
+ ext_struct = encode(self, extension.value)
+ nid = self._lib.OBJ_txt2nid(
+ extension.oid.dotted_string.encode("ascii")
+ )
+ self.openssl_assert(nid != self._lib.NID_undef)
+ return self._lib.X509V3_EXT_i2d(
+ nid, 1 if extension.critical else 0, ext_struct
+ )
+
+ def create_x509_revoked_certificate(self, builder):
+ if not isinstance(builder, x509.RevokedCertificateBuilder):
+ raise TypeError("Builder type mismatch.")
+
+ x509_revoked = self._lib.X509_REVOKED_new()
+ self.openssl_assert(x509_revoked != self._ffi.NULL)
+ x509_revoked = self._ffi.gc(x509_revoked, self._lib.X509_REVOKED_free)
+ serial_number = _encode_asn1_int_gc(self, builder._serial_number)
+ res = self._lib.X509_REVOKED_set_serialNumber(
+ x509_revoked, serial_number
+ )
+ self.openssl_assert(res == 1)
+ rev_date = self._create_asn1_time(builder._revocation_date)
+ res = self._lib.X509_REVOKED_set_revocationDate(x509_revoked, rev_date)
+ self.openssl_assert(res == 1)
+ # add CRL entry extensions
+ self._create_x509_extensions(
+ extensions=builder._extensions,
+ handlers=self._crl_entry_extension_encode_handlers,
+ x509_obj=x509_revoked,
+ add_func=self._lib.X509_REVOKED_add_ext,
+ gc=True,
+ )
+ return _RevokedCertificate(self, None, x509_revoked)
+
+ def load_pem_private_key(self, data, password):
+ return self._load_key(
+ self._lib.PEM_read_bio_PrivateKey,
+ self._evp_pkey_to_private_key,
+ data,
+ password,
+ )
+
+ def load_pem_public_key(self, data):
+ mem_bio = self._bytes_to_bio(data)
+ evp_pkey = self._lib.PEM_read_bio_PUBKEY(
+ mem_bio.bio, self._ffi.NULL, self._ffi.NULL, self._ffi.NULL
+ )
+ if evp_pkey != self._ffi.NULL:
+ evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free)
+ return self._evp_pkey_to_public_key(evp_pkey)
+ else:
+ # It's not a (RSA/DSA/ECDSA) subjectPublicKeyInfo, but we still
+ # need to check to see if it is a pure PKCS1 RSA public key (not
+ # embedded in a subjectPublicKeyInfo)
+ self._consume_errors()
+ res = self._lib.BIO_reset(mem_bio.bio)
+ self.openssl_assert(res == 1)
+ rsa_cdata = self._lib.PEM_read_bio_RSAPublicKey(
+ mem_bio.bio, self._ffi.NULL, self._ffi.NULL, self._ffi.NULL
+ )
+ if rsa_cdata != self._ffi.NULL:
+ rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free)
+ evp_pkey = self._rsa_cdata_to_evp_pkey(rsa_cdata)
+ return _RSAPublicKey(self, rsa_cdata, evp_pkey)
+ else:
+ self._handle_key_loading_error()
+
+ def load_pem_parameters(self, data):
+ mem_bio = self._bytes_to_bio(data)
+ # only DH is supported currently
+ dh_cdata = self._lib.PEM_read_bio_DHparams(
+ mem_bio.bio, self._ffi.NULL, self._ffi.NULL, self._ffi.NULL
+ )
+ if dh_cdata != self._ffi.NULL:
+ dh_cdata = self._ffi.gc(dh_cdata, self._lib.DH_free)
+ return _DHParameters(self, dh_cdata)
+ else:
+ self._handle_key_loading_error()
+
+ def load_der_private_key(self, data, password):
+ # OpenSSL has a function called d2i_AutoPrivateKey that in theory
+ # handles this automatically, however it doesn't handle encrypted
+ # private keys. Instead we try to load the key two different ways.
+ # First we'll try to load it as a traditional key.
+ bio_data = self._bytes_to_bio(data)
+ key = self._evp_pkey_from_der_traditional_key(bio_data, password)
+ if key:
+ return self._evp_pkey_to_private_key(key)
+ else:
+ # Finally we try to load it with the method that handles encrypted
+ # PKCS8 properly.
+ return self._load_key(
+ self._lib.d2i_PKCS8PrivateKey_bio,
+ self._evp_pkey_to_private_key,
+ data,
+ password,
+ )
+
+ def _evp_pkey_from_der_traditional_key(self, bio_data, password):
+ key = self._lib.d2i_PrivateKey_bio(bio_data.bio, self._ffi.NULL)
+ if key != self._ffi.NULL:
+ key = self._ffi.gc(key, self._lib.EVP_PKEY_free)
+ if password is not None:
+ raise TypeError(
+ "Password was given but private key is not encrypted."
+ )
+
+ return key
+ else:
+ self._consume_errors()
+ return None
+
+ def load_der_public_key(self, data):
+ mem_bio = self._bytes_to_bio(data)
+ evp_pkey = self._lib.d2i_PUBKEY_bio(mem_bio.bio, self._ffi.NULL)
+ if evp_pkey != self._ffi.NULL:
+ evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free)
+ return self._evp_pkey_to_public_key(evp_pkey)
+ else:
+ # It's not a (RSA/DSA/ECDSA) subjectPublicKeyInfo, but we still
+ # need to check to see if it is a pure PKCS1 RSA public key (not
+ # embedded in a subjectPublicKeyInfo)
+ self._consume_errors()
+ res = self._lib.BIO_reset(mem_bio.bio)
+ self.openssl_assert(res == 1)
+ rsa_cdata = self._lib.d2i_RSAPublicKey_bio(
+ mem_bio.bio, self._ffi.NULL
+ )
+ if rsa_cdata != self._ffi.NULL:
+ rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free)
+ evp_pkey = self._rsa_cdata_to_evp_pkey(rsa_cdata)
+ return _RSAPublicKey(self, rsa_cdata, evp_pkey)
+ else:
+ self._handle_key_loading_error()
+
+ def load_der_parameters(self, data):
+ mem_bio = self._bytes_to_bio(data)
+ dh_cdata = self._lib.d2i_DHparams_bio(mem_bio.bio, self._ffi.NULL)
+ if dh_cdata != self._ffi.NULL:
+ dh_cdata = self._ffi.gc(dh_cdata, self._lib.DH_free)
+ return _DHParameters(self, dh_cdata)
+ elif self._lib.Cryptography_HAS_EVP_PKEY_DHX:
+ # We check to see if the is dhx.
+ self._consume_errors()
+ res = self._lib.BIO_reset(mem_bio.bio)
+ self.openssl_assert(res == 1)
+ dh_cdata = self._lib.Cryptography_d2i_DHxparams_bio(
+ mem_bio.bio, self._ffi.NULL
+ )
+ if dh_cdata != self._ffi.NULL:
+ dh_cdata = self._ffi.gc(dh_cdata, self._lib.DH_free)
+ return _DHParameters(self, dh_cdata)
+
+ self._handle_key_loading_error()
+
+ def load_pem_x509_certificate(self, data):
+ mem_bio = self._bytes_to_bio(data)
+ x509 = self._lib.PEM_read_bio_X509(
+ mem_bio.bio, self._ffi.NULL, self._ffi.NULL, self._ffi.NULL
+ )
+ if x509 == self._ffi.NULL:
+ self._consume_errors()
+ raise ValueError(
+ "Unable to load certificate. See https://cryptography.io/en/"
+ "latest/faq.html#why-can-t-i-import-my-pem-file for more"
+ " details."
+ )
+
+ x509 = self._ffi.gc(x509, self._lib.X509_free)
+ return _Certificate(self, x509)
+
+ def load_der_x509_certificate(self, data):
+ mem_bio = self._bytes_to_bio(data)
+ x509 = self._lib.d2i_X509_bio(mem_bio.bio, self._ffi.NULL)
+ if x509 == self._ffi.NULL:
+ self._consume_errors()
+ raise ValueError("Unable to load certificate")
+
+ x509 = self._ffi.gc(x509, self._lib.X509_free)
+ return _Certificate(self, x509)
+
+ def load_pem_x509_crl(self, data):
+ mem_bio = self._bytes_to_bio(data)
+ x509_crl = self._lib.PEM_read_bio_X509_CRL(
+ mem_bio.bio, self._ffi.NULL, self._ffi.NULL, self._ffi.NULL
+ )
+ if x509_crl == self._ffi.NULL:
+ self._consume_errors()
+ raise ValueError(
+ "Unable to load CRL. See https://cryptography.io/en/la"
+ "test/faq.html#why-can-t-i-import-my-pem-file for more"
+ " details."
+ )
+
+ x509_crl = self._ffi.gc(x509_crl, self._lib.X509_CRL_free)
+ return _CertificateRevocationList(self, x509_crl)
+
+ def load_der_x509_crl(self, data):
+ mem_bio = self._bytes_to_bio(data)
+ x509_crl = self._lib.d2i_X509_CRL_bio(mem_bio.bio, self._ffi.NULL)
+ if x509_crl == self._ffi.NULL:
+ self._consume_errors()
+ raise ValueError("Unable to load CRL")
+
+ x509_crl = self._ffi.gc(x509_crl, self._lib.X509_CRL_free)
+ return _CertificateRevocationList(self, x509_crl)
+
+ def load_pem_x509_csr(self, data):
+ mem_bio = self._bytes_to_bio(data)
+ x509_req = self._lib.PEM_read_bio_X509_REQ(
+ mem_bio.bio, self._ffi.NULL, self._ffi.NULL, self._ffi.NULL
+ )
+ if x509_req == self._ffi.NULL:
+ self._consume_errors()
+ raise ValueError(
+ "Unable to load request. See https://cryptography.io/en/"
+ "latest/faq.html#why-can-t-i-import-my-pem-file for more"
+ " details."
+ )
+
+ x509_req = self._ffi.gc(x509_req, self._lib.X509_REQ_free)
+ return _CertificateSigningRequest(self, x509_req)
+
+ def load_der_x509_csr(self, data):
+ mem_bio = self._bytes_to_bio(data)
+ x509_req = self._lib.d2i_X509_REQ_bio(mem_bio.bio, self._ffi.NULL)
+ if x509_req == self._ffi.NULL:
+ self._consume_errors()
+ raise ValueError("Unable to load request")
+
+ x509_req = self._ffi.gc(x509_req, self._lib.X509_REQ_free)
+ return _CertificateSigningRequest(self, x509_req)
+
+ def _load_key(self, openssl_read_func, convert_func, data, password):
+ mem_bio = self._bytes_to_bio(data)
+
+ userdata = self._ffi.new("CRYPTOGRAPHY_PASSWORD_DATA *")
+ if password is not None:
+ utils._check_byteslike("password", password)
+ password_ptr = self._ffi.from_buffer(password)
+ userdata.password = password_ptr
+ userdata.length = len(password)
+
+ evp_pkey = openssl_read_func(
+ mem_bio.bio,
+ self._ffi.NULL,
+ self._ffi.addressof(
+ self._lib._original_lib, "Cryptography_pem_password_cb"
+ ),
+ userdata,
+ )
+
+ if evp_pkey == self._ffi.NULL:
+ if userdata.error != 0:
+ self._consume_errors()
+ if userdata.error == -1:
+ raise TypeError(
+ "Password was not given but private key is encrypted"
+ )
+ else:
+ assert userdata.error == -2
+ raise ValueError(
+ "Passwords longer than {} bytes are not supported "
+ "by this backend.".format(userdata.maxsize - 1)
+ )
+ else:
+ self._handle_key_loading_error()
+
+ evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free)
+
+ if password is not None and userdata.called == 0:
+ raise TypeError(
+ "Password was given but private key is not encrypted."
+ )
+
+ assert (
+ password is not None and userdata.called == 1
+ ) or password is None
+
+ return convert_func(evp_pkey)
+
+ def _handle_key_loading_error(self):
+ errors = self._consume_errors()
+
+ if not errors:
+ raise ValueError(
+ "Could not deserialize key data. The data may be in an "
+ "incorrect format or it may be encrypted with an unsupported "
+ "algorithm."
+ )
+ elif errors[0]._lib_reason_match(
+ self._lib.ERR_LIB_EVP, self._lib.EVP_R_BAD_DECRYPT
+ ) or errors[0]._lib_reason_match(
+ self._lib.ERR_LIB_PKCS12,
+ self._lib.PKCS12_R_PKCS12_CIPHERFINAL_ERROR,
+ ):
+ raise ValueError("Bad decrypt. Incorrect password?")
+
+ elif any(
+ error._lib_reason_match(
+ self._lib.ERR_LIB_EVP,
+ self._lib.EVP_R_UNSUPPORTED_PRIVATE_KEY_ALGORITHM,
+ )
+ for error in errors
+ ):
+ raise ValueError("Unsupported public key algorithm.")
+
+ else:
+ raise ValueError(
+ "Could not deserialize key data. The data may be in an "
+ "incorrect format or it may be encrypted with an unsupported "
+ "algorithm."
+ )
+
+ def elliptic_curve_supported(self, curve):
+ try:
+ curve_nid = self._elliptic_curve_to_nid(curve)
+ except UnsupportedAlgorithm:
+ curve_nid = self._lib.NID_undef
+
+ group = self._lib.EC_GROUP_new_by_curve_name(curve_nid)
+
+ if group == self._ffi.NULL:
+ self._consume_errors()
+ return False
+ else:
+ self.openssl_assert(curve_nid != self._lib.NID_undef)
+ self._lib.EC_GROUP_free(group)
+ return True
+
+ def elliptic_curve_signature_algorithm_supported(
+ self, signature_algorithm, curve
+ ):
+ # We only support ECDSA right now.
+ if not isinstance(signature_algorithm, ec.ECDSA):
+ return False
+
+ return self.elliptic_curve_supported(curve)
+
+ def generate_elliptic_curve_private_key(self, curve):
+ """
+ Generate a new private key on the named curve.
+ """
+
+ if self.elliptic_curve_supported(curve):
+ ec_cdata = self._ec_key_new_by_curve(curve)
+
+ res = self._lib.EC_KEY_generate_key(ec_cdata)
+ self.openssl_assert(res == 1)
+
+ evp_pkey = self._ec_cdata_to_evp_pkey(ec_cdata)
+
+ return _EllipticCurvePrivateKey(self, ec_cdata, evp_pkey)
+ else:
+ raise UnsupportedAlgorithm(
+ "Backend object does not support {}.".format(curve.name),
+ _Reasons.UNSUPPORTED_ELLIPTIC_CURVE,
+ )
+
+ def load_elliptic_curve_private_numbers(self, numbers):
+ public = numbers.public_numbers
+
+ ec_cdata = self._ec_key_new_by_curve(public.curve)
+
+ private_value = self._ffi.gc(
+ self._int_to_bn(numbers.private_value), self._lib.BN_clear_free
+ )
+ res = self._lib.EC_KEY_set_private_key(ec_cdata, private_value)
+ self.openssl_assert(res == 1)
+
+ ec_cdata = self._ec_key_set_public_key_affine_coordinates(
+ ec_cdata, public.x, public.y
+ )
+
+ evp_pkey = self._ec_cdata_to_evp_pkey(ec_cdata)
+
+ return _EllipticCurvePrivateKey(self, ec_cdata, evp_pkey)
+
+ def load_elliptic_curve_public_numbers(self, numbers):
+ ec_cdata = self._ec_key_new_by_curve(numbers.curve)
+ ec_cdata = self._ec_key_set_public_key_affine_coordinates(
+ ec_cdata, numbers.x, numbers.y
+ )
+ evp_pkey = self._ec_cdata_to_evp_pkey(ec_cdata)
+
+ return _EllipticCurvePublicKey(self, ec_cdata, evp_pkey)
+
+ def load_elliptic_curve_public_bytes(self, curve, point_bytes):
+ ec_cdata = self._ec_key_new_by_curve(curve)
+ group = self._lib.EC_KEY_get0_group(ec_cdata)
+ self.openssl_assert(group != self._ffi.NULL)
+ point = self._lib.EC_POINT_new(group)
+ self.openssl_assert(point != self._ffi.NULL)
+ point = self._ffi.gc(point, self._lib.EC_POINT_free)
+ with self._tmp_bn_ctx() as bn_ctx:
+ res = self._lib.EC_POINT_oct2point(
+ group, point, point_bytes, len(point_bytes), bn_ctx
+ )
+ if res != 1:
+ self._consume_errors()
+ raise ValueError("Invalid public bytes for the given curve")
+
+ res = self._lib.EC_KEY_set_public_key(ec_cdata, point)
+ self.openssl_assert(res == 1)
+ evp_pkey = self._ec_cdata_to_evp_pkey(ec_cdata)
+ return _EllipticCurvePublicKey(self, ec_cdata, evp_pkey)
+
+ def derive_elliptic_curve_private_key(self, private_value, curve):
+ ec_cdata = self._ec_key_new_by_curve(curve)
+
+ get_func, group = self._ec_key_determine_group_get_func(ec_cdata)
+
+ point = self._lib.EC_POINT_new(group)
+ self.openssl_assert(point != self._ffi.NULL)
+ point = self._ffi.gc(point, self._lib.EC_POINT_free)
+
+ value = self._int_to_bn(private_value)
+ value = self._ffi.gc(value, self._lib.BN_clear_free)
+
+ with self._tmp_bn_ctx() as bn_ctx:
+ res = self._lib.EC_POINT_mul(
+ group, point, value, self._ffi.NULL, self._ffi.NULL, bn_ctx
+ )
+ self.openssl_assert(res == 1)
+
+ bn_x = self._lib.BN_CTX_get(bn_ctx)
+ bn_y = self._lib.BN_CTX_get(bn_ctx)
+
+ res = get_func(group, point, bn_x, bn_y, bn_ctx)
+ self.openssl_assert(res == 1)
+
+ res = self._lib.EC_KEY_set_public_key(ec_cdata, point)
+ self.openssl_assert(res == 1)
+ private = self._int_to_bn(private_value)
+ private = self._ffi.gc(private, self._lib.BN_clear_free)
+ res = self._lib.EC_KEY_set_private_key(ec_cdata, private)
+ self.openssl_assert(res == 1)
+
+ evp_pkey = self._ec_cdata_to_evp_pkey(ec_cdata)
+
+ return _EllipticCurvePrivateKey(self, ec_cdata, evp_pkey)
+
+ def _ec_key_new_by_curve(self, curve):
+ curve_nid = self._elliptic_curve_to_nid(curve)
+ return self._ec_key_new_by_curve_nid(curve_nid)
+
+ def _ec_key_new_by_curve_nid(self, curve_nid):
+ ec_cdata = self._lib.EC_KEY_new_by_curve_name(curve_nid)
+ self.openssl_assert(ec_cdata != self._ffi.NULL)
+ return self._ffi.gc(ec_cdata, self._lib.EC_KEY_free)
+
+ def load_der_ocsp_request(self, data):
+ mem_bio = self._bytes_to_bio(data)
+ request = self._lib.d2i_OCSP_REQUEST_bio(mem_bio.bio, self._ffi.NULL)
+ if request == self._ffi.NULL:
+ self._consume_errors()
+ raise ValueError("Unable to load OCSP request")
+
+ request = self._ffi.gc(request, self._lib.OCSP_REQUEST_free)
+ return _OCSPRequest(self, request)
+
+ def load_der_ocsp_response(self, data):
+ mem_bio = self._bytes_to_bio(data)
+ response = self._lib.d2i_OCSP_RESPONSE_bio(mem_bio.bio, self._ffi.NULL)
+ if response == self._ffi.NULL:
+ self._consume_errors()
+ raise ValueError("Unable to load OCSP response")
+
+ response = self._ffi.gc(response, self._lib.OCSP_RESPONSE_free)
+ return _OCSPResponse(self, response)
+
+ def create_ocsp_request(self, builder):
+ ocsp_req = self._lib.OCSP_REQUEST_new()
+ self.openssl_assert(ocsp_req != self._ffi.NULL)
+ ocsp_req = self._ffi.gc(ocsp_req, self._lib.OCSP_REQUEST_free)
+ cert, issuer, algorithm = builder._request
+ evp_md = self._evp_md_non_null_from_algorithm(algorithm)
+ certid = self._lib.OCSP_cert_to_id(evp_md, cert._x509, issuer._x509)
+ self.openssl_assert(certid != self._ffi.NULL)
+ onereq = self._lib.OCSP_request_add0_id(ocsp_req, certid)
+ self.openssl_assert(onereq != self._ffi.NULL)
+ self._create_x509_extensions(
+ extensions=builder._extensions,
+ handlers=self._ocsp_request_extension_encode_handlers,
+ x509_obj=ocsp_req,
+ add_func=self._lib.OCSP_REQUEST_add_ext,
+ gc=True,
+ )
+ return _OCSPRequest(self, ocsp_req)
+
+ def _create_ocsp_basic_response(self, builder, private_key, algorithm):
+ self._x509_check_signature_params(private_key, algorithm)
+
+ basic = self._lib.OCSP_BASICRESP_new()
+ self.openssl_assert(basic != self._ffi.NULL)
+ basic = self._ffi.gc(basic, self._lib.OCSP_BASICRESP_free)
+ evp_md = self._evp_md_non_null_from_algorithm(
+ builder._response._algorithm
+ )
+ certid = self._lib.OCSP_cert_to_id(
+ evp_md,
+ builder._response._cert._x509,
+ builder._response._issuer._x509,
+ )
+ self.openssl_assert(certid != self._ffi.NULL)
+ certid = self._ffi.gc(certid, self._lib.OCSP_CERTID_free)
+ if builder._response._revocation_reason is None:
+ reason = -1
+ else:
+ reason = _CRL_ENTRY_REASON_ENUM_TO_CODE[
+ builder._response._revocation_reason
+ ]
+ if builder._response._revocation_time is None:
+ rev_time = self._ffi.NULL
+ else:
+ rev_time = self._create_asn1_time(
+ builder._response._revocation_time
+ )
+
+ next_update = self._ffi.NULL
+ if builder._response._next_update is not None:
+ next_update = self._create_asn1_time(
+ builder._response._next_update
+ )
+
+ this_update = self._create_asn1_time(builder._response._this_update)
+
+ res = self._lib.OCSP_basic_add1_status(
+ basic,
+ certid,
+ builder._response._cert_status.value,
+ reason,
+ rev_time,
+ this_update,
+ next_update,
+ )
+ self.openssl_assert(res != self._ffi.NULL)
+ # okay, now sign the basic structure
+ evp_md = self._evp_md_x509_null_if_eddsa(private_key, algorithm)
+ responder_cert, responder_encoding = builder._responder_id
+ flags = self._lib.OCSP_NOCERTS
+ if responder_encoding is ocsp.OCSPResponderEncoding.HASH:
+ flags |= self._lib.OCSP_RESPID_KEY
+
+ if builder._certs is not None:
+ for cert in builder._certs:
+ res = self._lib.OCSP_basic_add1_cert(basic, cert._x509)
+ self.openssl_assert(res == 1)
+
+ self._create_x509_extensions(
+ extensions=builder._extensions,
+ handlers=self._ocsp_basicresp_extension_encode_handlers,
+ x509_obj=basic,
+ add_func=self._lib.OCSP_BASICRESP_add_ext,
+ gc=True,
+ )
+
+ res = self._lib.OCSP_basic_sign(
+ basic,
+ responder_cert._x509,
+ private_key._evp_pkey,
+ evp_md,
+ self._ffi.NULL,
+ flags,
+ )
+ if res != 1:
+ errors = self._consume_errors_with_text()
+ raise ValueError(
+ "Error while signing. responder_cert must be signed "
+ "by private_key",
+ errors,
+ )
+
+ return basic
+
+ def create_ocsp_response(
+ self, response_status, builder, private_key, algorithm
+ ):
+ if response_status is ocsp.OCSPResponseStatus.SUCCESSFUL:
+ basic = self._create_ocsp_basic_response(
+ builder, private_key, algorithm
+ )
+ else:
+ basic = self._ffi.NULL
+
+ ocsp_resp = self._lib.OCSP_response_create(
+ response_status.value, basic
+ )
+ self.openssl_assert(ocsp_resp != self._ffi.NULL)
+ ocsp_resp = self._ffi.gc(ocsp_resp, self._lib.OCSP_RESPONSE_free)
+ return _OCSPResponse(self, ocsp_resp)
+
+ def elliptic_curve_exchange_algorithm_supported(self, algorithm, curve):
+ return self.elliptic_curve_supported(curve) and isinstance(
+ algorithm, ec.ECDH
+ )
+
+ def _ec_cdata_to_evp_pkey(self, ec_cdata):
+ evp_pkey = self._create_evp_pkey_gc()
+ res = self._lib.EVP_PKEY_set1_EC_KEY(evp_pkey, ec_cdata)
+ self.openssl_assert(res == 1)
+ return evp_pkey
+
+ def _elliptic_curve_to_nid(self, curve):
+ """
+ Get the NID for a curve name.
+ """
+
+ curve_aliases = {"secp192r1": "prime192v1", "secp256r1": "prime256v1"}
+
+ curve_name = curve_aliases.get(curve.name, curve.name)
+
+ curve_nid = self._lib.OBJ_sn2nid(curve_name.encode())
+ if curve_nid == self._lib.NID_undef:
+ raise UnsupportedAlgorithm(
+ "{} is not a supported elliptic curve".format(curve.name),
+ _Reasons.UNSUPPORTED_ELLIPTIC_CURVE,
+ )
+ return curve_nid
+
+ @contextmanager
+ def _tmp_bn_ctx(self):
+ bn_ctx = self._lib.BN_CTX_new()
+ self.openssl_assert(bn_ctx != self._ffi.NULL)
+ bn_ctx = self._ffi.gc(bn_ctx, self._lib.BN_CTX_free)
+ self._lib.BN_CTX_start(bn_ctx)
+ try:
+ yield bn_ctx
+ finally:
+ self._lib.BN_CTX_end(bn_ctx)
+
+ def _ec_key_determine_group_get_func(self, ctx):
+ """
+ Given an EC_KEY determine the group and what function is required to
+ get point coordinates.
+ """
+ self.openssl_assert(ctx != self._ffi.NULL)
+
+ nid_two_field = self._lib.OBJ_sn2nid(b"characteristic-two-field")
+ self.openssl_assert(nid_two_field != self._lib.NID_undef)
+
+ group = self._lib.EC_KEY_get0_group(ctx)
+ self.openssl_assert(group != self._ffi.NULL)
+
+ method = self._lib.EC_GROUP_method_of(group)
+ self.openssl_assert(method != self._ffi.NULL)
+
+ nid = self._lib.EC_METHOD_get_field_type(method)
+ self.openssl_assert(nid != self._lib.NID_undef)
+
+ if nid == nid_two_field and self._lib.Cryptography_HAS_EC2M:
+ get_func = self._lib.EC_POINT_get_affine_coordinates_GF2m
+ else:
+ get_func = self._lib.EC_POINT_get_affine_coordinates_GFp
+
+ assert get_func
+
+ return get_func, group
+
+ def _ec_key_set_public_key_affine_coordinates(self, ctx, x, y):
+ """
+ Sets the public key point in the EC_KEY context to the affine x and y
+ values.
+ """
+
+ if x < 0 or y < 0:
+ raise ValueError(
+ "Invalid EC key. Both x and y must be non-negative."
+ )
+
+ x = self._ffi.gc(self._int_to_bn(x), self._lib.BN_free)
+ y = self._ffi.gc(self._int_to_bn(y), self._lib.BN_free)
+ res = self._lib.EC_KEY_set_public_key_affine_coordinates(ctx, x, y)
+ if res != 1:
+ self._consume_errors()
+ raise ValueError("Invalid EC key.")
+
+ return ctx
+
+ def _private_key_bytes(
+ self, encoding, format, encryption_algorithm, key, evp_pkey, cdata
+ ):
+ # validate argument types
+ if not isinstance(encoding, serialization.Encoding):
+ raise TypeError("encoding must be an item from the Encoding enum")
+ if not isinstance(format, serialization.PrivateFormat):
+ raise TypeError(
+ "format must be an item from the PrivateFormat enum"
+ )
+ if not isinstance(
+ encryption_algorithm, serialization.KeySerializationEncryption
+ ):
+ raise TypeError(
+ "Encryption algorithm must be a KeySerializationEncryption "
+ "instance"
+ )
+
+ # validate password
+ if isinstance(encryption_algorithm, serialization.NoEncryption):
+ password = b""
+ elif isinstance(
+ encryption_algorithm, serialization.BestAvailableEncryption
+ ):
+ password = encryption_algorithm.password
+ if len(password) > 1023:
+ raise ValueError(
+ "Passwords longer than 1023 bytes are not supported by "
+ "this backend"
+ )
+ else:
+ raise ValueError("Unsupported encryption type")
+
+ # PKCS8 + PEM/DER
+ if format is serialization.PrivateFormat.PKCS8:
+ if encoding is serialization.Encoding.PEM:
+ write_bio = self._lib.PEM_write_bio_PKCS8PrivateKey
+ elif encoding is serialization.Encoding.DER:
+ write_bio = self._lib.i2d_PKCS8PrivateKey_bio
+ else:
+ raise ValueError("Unsupported encoding for PKCS8")
+ return self._private_key_bytes_via_bio(
+ write_bio, evp_pkey, password
+ )
+
+ # TraditionalOpenSSL + PEM/DER
+ if format is serialization.PrivateFormat.TraditionalOpenSSL:
+ if self._fips_enabled and not isinstance(
+ encryption_algorithm, serialization.NoEncryption
+ ):
+ raise ValueError(
+ "Encrypted traditional OpenSSL format is not "
+ "supported in FIPS mode."
+ )
+ key_type = self._lib.EVP_PKEY_id(evp_pkey)
+
+ if encoding is serialization.Encoding.PEM:
+ if key_type == self._lib.EVP_PKEY_RSA:
+ write_bio = self._lib.PEM_write_bio_RSAPrivateKey
+ elif key_type == self._lib.EVP_PKEY_DSA:
+ write_bio = self._lib.PEM_write_bio_DSAPrivateKey
+ elif key_type == self._lib.EVP_PKEY_EC:
+ write_bio = self._lib.PEM_write_bio_ECPrivateKey
+ else:
+ raise ValueError(
+ "Unsupported key type for TraditionalOpenSSL"
+ )
+ return self._private_key_bytes_via_bio(
+ write_bio, cdata, password
+ )
+
+ if encoding is serialization.Encoding.DER:
+ if password:
+ raise ValueError(
+ "Encryption is not supported for DER encoded "
+ "traditional OpenSSL keys"
+ )
+ if key_type == self._lib.EVP_PKEY_RSA:
+ write_bio = self._lib.i2d_RSAPrivateKey_bio
+ elif key_type == self._lib.EVP_PKEY_EC:
+ write_bio = self._lib.i2d_ECPrivateKey_bio
+ elif key_type == self._lib.EVP_PKEY_DSA:
+ write_bio = self._lib.i2d_DSAPrivateKey_bio
+ else:
+ raise ValueError(
+ "Unsupported key type for TraditionalOpenSSL"
+ )
+ return self._bio_func_output(write_bio, cdata)
+
+ raise ValueError("Unsupported encoding for TraditionalOpenSSL")
+
+ # OpenSSH + PEM
+ if format is serialization.PrivateFormat.OpenSSH:
+ if encoding is serialization.Encoding.PEM:
+ return ssh.serialize_ssh_private_key(key, password)
+
+ raise ValueError(
+ "OpenSSH private key format can only be used"
+ " with PEM encoding"
+ )
+
+ # Anything that key-specific code was supposed to handle earlier,
+ # like Raw.
+ raise ValueError("format is invalid with this key")
+
+ def _private_key_bytes_via_bio(self, write_bio, evp_pkey, password):
+ if not password:
+ evp_cipher = self._ffi.NULL
+ else:
+ # This is a curated value that we will update over time.
+ evp_cipher = self._lib.EVP_get_cipherbyname(b"aes-256-cbc")
+
+ return self._bio_func_output(
+ write_bio,
+ evp_pkey,
+ evp_cipher,
+ password,
+ len(password),
+ self._ffi.NULL,
+ self._ffi.NULL,
+ )
+
+ def _bio_func_output(self, write_bio, *args):
+ bio = self._create_mem_bio_gc()
+ res = write_bio(bio, *args)
+ self.openssl_assert(res == 1)
+ return self._read_mem_bio(bio)
+
+ def _public_key_bytes(self, encoding, format, key, evp_pkey, cdata):
+ if not isinstance(encoding, serialization.Encoding):
+ raise TypeError("encoding must be an item from the Encoding enum")
+ if not isinstance(format, serialization.PublicFormat):
+ raise TypeError(
+ "format must be an item from the PublicFormat enum"
+ )
+
+ # SubjectPublicKeyInfo + PEM/DER
+ if format is serialization.PublicFormat.SubjectPublicKeyInfo:
+ if encoding is serialization.Encoding.PEM:
+ write_bio = self._lib.PEM_write_bio_PUBKEY
+ elif encoding is serialization.Encoding.DER:
+ write_bio = self._lib.i2d_PUBKEY_bio
+ else:
+ raise ValueError(
+ "SubjectPublicKeyInfo works only with PEM or DER encoding"
+ )
+ return self._bio_func_output(write_bio, evp_pkey)
+
+ # PKCS1 + PEM/DER
+ if format is serialization.PublicFormat.PKCS1:
+ # Only RSA is supported here.
+ key_type = self._lib.EVP_PKEY_id(evp_pkey)
+ if key_type != self._lib.EVP_PKEY_RSA:
+ raise ValueError("PKCS1 format is supported only for RSA keys")
+
+ if encoding is serialization.Encoding.PEM:
+ write_bio = self._lib.PEM_write_bio_RSAPublicKey
+ elif encoding is serialization.Encoding.DER:
+ write_bio = self._lib.i2d_RSAPublicKey_bio
+ else:
+ raise ValueError("PKCS1 works only with PEM or DER encoding")
+ return self._bio_func_output(write_bio, cdata)
+
+ # OpenSSH + OpenSSH
+ if format is serialization.PublicFormat.OpenSSH:
+ if encoding is serialization.Encoding.OpenSSH:
+ return ssh.serialize_ssh_public_key(key)
+
+ raise ValueError(
+ "OpenSSH format must be used with OpenSSH encoding"
+ )
+
+ # Anything that key-specific code was supposed to handle earlier,
+ # like Raw, CompressedPoint, UncompressedPoint
+ raise ValueError("format is invalid with this key")
+
+ def _parameter_bytes(self, encoding, format, cdata):
+ if encoding is serialization.Encoding.OpenSSH:
+ raise TypeError("OpenSSH encoding is not supported")
+
+ # Only DH is supported here currently.
+ q = self._ffi.new("BIGNUM **")
+ self._lib.DH_get0_pqg(cdata, self._ffi.NULL, q, self._ffi.NULL)
+ if encoding is serialization.Encoding.PEM:
+ if q[0] != self._ffi.NULL:
+ write_bio = self._lib.PEM_write_bio_DHxparams
+ else:
+ write_bio = self._lib.PEM_write_bio_DHparams
+ elif encoding is serialization.Encoding.DER:
+ if q[0] != self._ffi.NULL:
+ write_bio = self._lib.Cryptography_i2d_DHxparams_bio
+ else:
+ write_bio = self._lib.i2d_DHparams_bio
+ else:
+ raise TypeError("encoding must be an item from the Encoding enum")
+
+ bio = self._create_mem_bio_gc()
+ res = write_bio(bio, cdata)
+ self.openssl_assert(res == 1)
+ return self._read_mem_bio(bio)
+
+ def generate_dh_parameters(self, generator, key_size):
+ if key_size < dh._MIN_MODULUS_SIZE:
+ raise ValueError(
+ "DH key_size must be at least {} bits".format(
+ dh._MIN_MODULUS_SIZE
+ )
+ )
+
+ if generator not in (2, 5):
+ raise ValueError("DH generator must be 2 or 5")
+
+ dh_param_cdata = self._lib.DH_new()
+ self.openssl_assert(dh_param_cdata != self._ffi.NULL)
+ dh_param_cdata = self._ffi.gc(dh_param_cdata, self._lib.DH_free)
+
+ res = self._lib.DH_generate_parameters_ex(
+ dh_param_cdata, key_size, generator, self._ffi.NULL
+ )
+ self.openssl_assert(res == 1)
+
+ return _DHParameters(self, dh_param_cdata)
+
+ def _dh_cdata_to_evp_pkey(self, dh_cdata):
+ evp_pkey = self._create_evp_pkey_gc()
+ res = self._lib.EVP_PKEY_set1_DH(evp_pkey, dh_cdata)
+ self.openssl_assert(res == 1)
+ return evp_pkey
+
+ def generate_dh_private_key(self, parameters):
+ dh_key_cdata = _dh_params_dup(parameters._dh_cdata, self)
+
+ res = self._lib.DH_generate_key(dh_key_cdata)
+ self.openssl_assert(res == 1)
+
+ evp_pkey = self._dh_cdata_to_evp_pkey(dh_key_cdata)
+
+ return _DHPrivateKey(self, dh_key_cdata, evp_pkey)
+
+ def generate_dh_private_key_and_parameters(self, generator, key_size):
+ return self.generate_dh_private_key(
+ self.generate_dh_parameters(generator, key_size)
+ )
+
+ def load_dh_private_numbers(self, numbers):
+ parameter_numbers = numbers.public_numbers.parameter_numbers
+
+ dh_cdata = self._lib.DH_new()
+ self.openssl_assert(dh_cdata != self._ffi.NULL)
+ dh_cdata = self._ffi.gc(dh_cdata, self._lib.DH_free)
+
+ p = self._int_to_bn(parameter_numbers.p)
+ g = self._int_to_bn(parameter_numbers.g)
+
+ if parameter_numbers.q is not None:
+ q = self._int_to_bn(parameter_numbers.q)
+ else:
+ q = self._ffi.NULL
+
+ pub_key = self._int_to_bn(numbers.public_numbers.y)
+ priv_key = self._int_to_bn(numbers.x)
+
+ res = self._lib.DH_set0_pqg(dh_cdata, p, q, g)
+ self.openssl_assert(res == 1)
+
+ res = self._lib.DH_set0_key(dh_cdata, pub_key, priv_key)
+ self.openssl_assert(res == 1)
+
+ codes = self._ffi.new("int[]", 1)
+ res = self._lib.Cryptography_DH_check(dh_cdata, codes)
+ self.openssl_assert(res == 1)
+
+ # DH_check will return DH_NOT_SUITABLE_GENERATOR if p % 24 does not
+ # equal 11 when the generator is 2 (a quadratic nonresidue).
+ # We want to ignore that error because p % 24 == 23 is also fine.
+ # Specifically, g is then a quadratic residue. Within the context of
+ # Diffie-Hellman this means it can only generate half the possible
+ # values. That sounds bad, but quadratic nonresidues leak a bit of
+ # the key to the attacker in exchange for having the full key space
+ # available. See: https://crypto.stackexchange.com/questions/12961
+ if codes[0] != 0 and not (
+ parameter_numbers.g == 2
+ and codes[0] ^ self._lib.DH_NOT_SUITABLE_GENERATOR == 0
+ ):
+ raise ValueError("DH private numbers did not pass safety checks.")
+
+ evp_pkey = self._dh_cdata_to_evp_pkey(dh_cdata)
+
+ return _DHPrivateKey(self, dh_cdata, evp_pkey)
+
+ def load_dh_public_numbers(self, numbers):
+ dh_cdata = self._lib.DH_new()
+ self.openssl_assert(dh_cdata != self._ffi.NULL)
+ dh_cdata = self._ffi.gc(dh_cdata, self._lib.DH_free)
+
+ parameter_numbers = numbers.parameter_numbers
+
+ p = self._int_to_bn(parameter_numbers.p)
+ g = self._int_to_bn(parameter_numbers.g)
+
+ if parameter_numbers.q is not None:
+ q = self._int_to_bn(parameter_numbers.q)
+ else:
+ q = self._ffi.NULL
+
+ pub_key = self._int_to_bn(numbers.y)
+
+ res = self._lib.DH_set0_pqg(dh_cdata, p, q, g)
+ self.openssl_assert(res == 1)
+
+ res = self._lib.DH_set0_key(dh_cdata, pub_key, self._ffi.NULL)
+ self.openssl_assert(res == 1)
+
+ evp_pkey = self._dh_cdata_to_evp_pkey(dh_cdata)
+
+ return _DHPublicKey(self, dh_cdata, evp_pkey)
+
+ def load_dh_parameter_numbers(self, numbers):
+ dh_cdata = self._lib.DH_new()
+ self.openssl_assert(dh_cdata != self._ffi.NULL)
+ dh_cdata = self._ffi.gc(dh_cdata, self._lib.DH_free)
+
+ p = self._int_to_bn(numbers.p)
+ g = self._int_to_bn(numbers.g)
+
+ if numbers.q is not None:
+ q = self._int_to_bn(numbers.q)
+ else:
+ q = self._ffi.NULL
+
+ res = self._lib.DH_set0_pqg(dh_cdata, p, q, g)
+ self.openssl_assert(res == 1)
+
+ return _DHParameters(self, dh_cdata)
+
+ def dh_parameters_supported(self, p, g, q=None):
+ dh_cdata = self._lib.DH_new()
+ self.openssl_assert(dh_cdata != self._ffi.NULL)
+ dh_cdata = self._ffi.gc(dh_cdata, self._lib.DH_free)
+
+ p = self._int_to_bn(p)
+ g = self._int_to_bn(g)
+
+ if q is not None:
+ q = self._int_to_bn(q)
+ else:
+ q = self._ffi.NULL
+
+ res = self._lib.DH_set0_pqg(dh_cdata, p, q, g)
+ self.openssl_assert(res == 1)
+
+ codes = self._ffi.new("int[]", 1)
+ res = self._lib.Cryptography_DH_check(dh_cdata, codes)
+ self.openssl_assert(res == 1)
+
+ return codes[0] == 0
+
+ def dh_x942_serialization_supported(self):
+ return self._lib.Cryptography_HAS_EVP_PKEY_DHX == 1
+
+ def x509_name_bytes(self, name):
+ x509_name = _encode_name_gc(self, name)
+ pp = self._ffi.new("unsigned char **")
+ res = self._lib.i2d_X509_NAME(x509_name, pp)
+ self.openssl_assert(pp[0] != self._ffi.NULL)
+ pp = self._ffi.gc(
+ pp, lambda pointer: self._lib.OPENSSL_free(pointer[0])
+ )
+ self.openssl_assert(res > 0)
+ return self._ffi.buffer(pp[0], res)[:]
+
+ def x25519_load_public_bytes(self, data):
+ # When we drop support for CRYPTOGRAPHY_OPENSSL_LESS_THAN_111 we can
+ # switch this to EVP_PKEY_new_raw_public_key
+ if len(data) != 32:
+ raise ValueError("An X25519 public key is 32 bytes long")
+
+ evp_pkey = self._create_evp_pkey_gc()
+ res = self._lib.EVP_PKEY_set_type(evp_pkey, self._lib.NID_X25519)
+ self.openssl_assert(res == 1)
+ res = self._lib.EVP_PKEY_set1_tls_encodedpoint(
+ evp_pkey, data, len(data)
+ )
+ self.openssl_assert(res == 1)
+ return _X25519PublicKey(self, evp_pkey)
+
+ def x25519_load_private_bytes(self, data):
+ # When we drop support for CRYPTOGRAPHY_OPENSSL_LESS_THAN_111 we can
+ # switch this to EVP_PKEY_new_raw_private_key and drop the
+ # zeroed_bytearray garbage.
+ # OpenSSL only has facilities for loading PKCS8 formatted private
+ # keys using the algorithm identifiers specified in
+ # https://tools.ietf.org/html/draft-ietf-curdle-pkix-09.
+ # This is the standard PKCS8 prefix for a 32 byte X25519 key.
+ # The form is:
+ # 0:d=0 hl=2 l= 46 cons: SEQUENCE
+ # 2:d=1 hl=2 l= 1 prim: INTEGER :00
+ # 5:d=1 hl=2 l= 5 cons: SEQUENCE
+ # 7:d=2 hl=2 l= 3 prim: OBJECT :1.3.101.110
+ # 12:d=1 hl=2 l= 34 prim: OCTET STRING (the key)
+ # Of course there's a bit more complexity. In reality OCTET STRING
+ # contains an OCTET STRING of length 32! So the last two bytes here
+ # are \x04\x20, which is an OCTET STRING of length 32.
+ if len(data) != 32:
+ raise ValueError("An X25519 private key is 32 bytes long")
+
+ pkcs8_prefix = b'0.\x02\x01\x000\x05\x06\x03+en\x04"\x04 '
+ with self._zeroed_bytearray(48) as ba:
+ ba[0:16] = pkcs8_prefix
+ ba[16:] = data
+ bio = self._bytes_to_bio(ba)
+ evp_pkey = self._lib.d2i_PrivateKey_bio(bio.bio, self._ffi.NULL)
+
+ self.openssl_assert(evp_pkey != self._ffi.NULL)
+ evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free)
+ self.openssl_assert(
+ self._lib.EVP_PKEY_id(evp_pkey) == self._lib.EVP_PKEY_X25519
+ )
+ return _X25519PrivateKey(self, evp_pkey)
+
+ def _evp_pkey_keygen_gc(self, nid):
+ evp_pkey_ctx = self._lib.EVP_PKEY_CTX_new_id(nid, self._ffi.NULL)
+ self.openssl_assert(evp_pkey_ctx != self._ffi.NULL)
+ evp_pkey_ctx = self._ffi.gc(evp_pkey_ctx, self._lib.EVP_PKEY_CTX_free)
+ res = self._lib.EVP_PKEY_keygen_init(evp_pkey_ctx)
+ self.openssl_assert(res == 1)
+ evp_ppkey = self._ffi.new("EVP_PKEY **")
+ res = self._lib.EVP_PKEY_keygen(evp_pkey_ctx, evp_ppkey)
+ self.openssl_assert(res == 1)
+ self.openssl_assert(evp_ppkey[0] != self._ffi.NULL)
+ evp_pkey = self._ffi.gc(evp_ppkey[0], self._lib.EVP_PKEY_free)
+ return evp_pkey
+
+ def x25519_generate_key(self):
+ evp_pkey = self._evp_pkey_keygen_gc(self._lib.NID_X25519)
+ return _X25519PrivateKey(self, evp_pkey)
+
+ def x25519_supported(self):
+ if self._fips_enabled:
+ return False
+ return not self._lib.CRYPTOGRAPHY_IS_LIBRESSL
+
+ def x448_load_public_bytes(self, data):
+ if len(data) != 56:
+ raise ValueError("An X448 public key is 56 bytes long")
+
+ evp_pkey = self._lib.EVP_PKEY_new_raw_public_key(
+ self._lib.NID_X448, self._ffi.NULL, data, len(data)
+ )
+ self.openssl_assert(evp_pkey != self._ffi.NULL)
+ evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free)
+ return _X448PublicKey(self, evp_pkey)
+
+ def x448_load_private_bytes(self, data):
+ if len(data) != 56:
+ raise ValueError("An X448 private key is 56 bytes long")
+
+ data_ptr = self._ffi.from_buffer(data)
+ evp_pkey = self._lib.EVP_PKEY_new_raw_private_key(
+ self._lib.NID_X448, self._ffi.NULL, data_ptr, len(data)
+ )
+ self.openssl_assert(evp_pkey != self._ffi.NULL)
+ evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free)
+ return _X448PrivateKey(self, evp_pkey)
+
+ def x448_generate_key(self):
+ evp_pkey = self._evp_pkey_keygen_gc(self._lib.NID_X448)
+ return _X448PrivateKey(self, evp_pkey)
+
+ def x448_supported(self):
+ if self._fips_enabled:
+ return False
+ return not self._lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_111
+
+ def ed25519_supported(self):
+ if self._fips_enabled:
+ return False
+ return not self._lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_111B
+
+ def ed25519_load_public_bytes(self, data):
+ utils._check_bytes("data", data)
+
+ if len(data) != ed25519._ED25519_KEY_SIZE:
+ raise ValueError("An Ed25519 public key is 32 bytes long")
+
+ evp_pkey = self._lib.EVP_PKEY_new_raw_public_key(
+ self._lib.NID_ED25519, self._ffi.NULL, data, len(data)
+ )
+ self.openssl_assert(evp_pkey != self._ffi.NULL)
+ evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free)
+
+ return _Ed25519PublicKey(self, evp_pkey)
+
+ def ed25519_load_private_bytes(self, data):
+ if len(data) != ed25519._ED25519_KEY_SIZE:
+ raise ValueError("An Ed25519 private key is 32 bytes long")
+
+ utils._check_byteslike("data", data)
+ data_ptr = self._ffi.from_buffer(data)
+ evp_pkey = self._lib.EVP_PKEY_new_raw_private_key(
+ self._lib.NID_ED25519, self._ffi.NULL, data_ptr, len(data)
+ )
+ self.openssl_assert(evp_pkey != self._ffi.NULL)
+ evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free)
+
+ return _Ed25519PrivateKey(self, evp_pkey)
+
+ def ed25519_generate_key(self):
+ evp_pkey = self._evp_pkey_keygen_gc(self._lib.NID_ED25519)
+ return _Ed25519PrivateKey(self, evp_pkey)
+
+ def ed448_supported(self):
+ if self._fips_enabled:
+ return False
+ return not self._lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_111B
+
+ def ed448_load_public_bytes(self, data):
+ utils._check_bytes("data", data)
+ if len(data) != _ED448_KEY_SIZE:
+ raise ValueError("An Ed448 public key is 57 bytes long")
+
+ evp_pkey = self._lib.EVP_PKEY_new_raw_public_key(
+ self._lib.NID_ED448, self._ffi.NULL, data, len(data)
+ )
+ self.openssl_assert(evp_pkey != self._ffi.NULL)
+ evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free)
+
+ return _Ed448PublicKey(self, evp_pkey)
+
+ def ed448_load_private_bytes(self, data):
+ utils._check_byteslike("data", data)
+ if len(data) != _ED448_KEY_SIZE:
+ raise ValueError("An Ed448 private key is 57 bytes long")
+
+ data_ptr = self._ffi.from_buffer(data)
+ evp_pkey = self._lib.EVP_PKEY_new_raw_private_key(
+ self._lib.NID_ED448, self._ffi.NULL, data_ptr, len(data)
+ )
+ self.openssl_assert(evp_pkey != self._ffi.NULL)
+ evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free)
+
+ return _Ed448PrivateKey(self, evp_pkey)
+
+ def ed448_generate_key(self):
+ evp_pkey = self._evp_pkey_keygen_gc(self._lib.NID_ED448)
+ return _Ed448PrivateKey(self, evp_pkey)
+
+ def derive_scrypt(self, key_material, salt, length, n, r, p):
+ buf = self._ffi.new("unsigned char[]", length)
+ key_material_ptr = self._ffi.from_buffer(key_material)
+ res = self._lib.EVP_PBE_scrypt(
+ key_material_ptr,
+ len(key_material),
+ salt,
+ len(salt),
+ n,
+ r,
+ p,
+ scrypt._MEM_LIMIT,
+ buf,
+ length,
+ )
+ if res != 1:
+ errors = self._consume_errors_with_text()
+ # memory required formula explained here:
+ # https://blog.filippo.io/the-scrypt-parameters/
+ min_memory = 128 * n * r // (1024 ** 2)
+ raise MemoryError(
+ "Not enough memory to derive key. These parameters require"
+ " {} MB of memory.".format(min_memory),
+ errors,
+ )
+ return self._ffi.buffer(buf)[:]
+
+ def aead_cipher_supported(self, cipher):
+ cipher_name = aead._aead_cipher_name(cipher)
+ if self._fips_enabled and cipher_name not in self._fips_aead:
+ return False
+ return self._lib.EVP_get_cipherbyname(cipher_name) != self._ffi.NULL
+
+ @contextlib.contextmanager
+ def _zeroed_bytearray(self, length):
+ """
+ This method creates a bytearray, which we copy data into (hopefully
+ also from a mutable buffer that can be dynamically erased!), and then
+ zero when we're done.
+ """
+ ba = bytearray(length)
+ try:
+ yield ba
+ finally:
+ self._zero_data(ba, length)
+
+ def _zero_data(self, data, length):
+ # We clear things this way because at the moment we're not
+ # sure of a better way that can guarantee it overwrites the
+ # memory of a bytearray and doesn't just replace the underlying char *.
+ for i in range(length):
+ data[i] = 0
+
+ @contextlib.contextmanager
+ def _zeroed_null_terminated_buf(self, data):
+ """
+ This method takes bytes, which can be a bytestring or a mutable
+ buffer like a bytearray, and yields a null-terminated version of that
+ data. This is required because PKCS12_parse doesn't take a length with
+ its password char * and ffi.from_buffer doesn't provide null
+ termination. So, to support zeroing the data via bytearray we
+ need to build this ridiculous construct that copies the memory, but
+ zeroes it after use.
+ """
+ if data is None:
+ yield self._ffi.NULL
+ else:
+ data_len = len(data)
+ buf = self._ffi.new("char[]", data_len + 1)
+ self._ffi.memmove(buf, data, data_len)
+ try:
+ yield buf
+ finally:
+ # Cast to a uint8_t * so we can assign by integer
+ self._zero_data(self._ffi.cast("uint8_t *", buf), data_len)
+
+ def load_key_and_certificates_from_pkcs12(self, data, password):
+ if password is not None:
+ utils._check_byteslike("password", password)
+
+ bio = self._bytes_to_bio(data)
+ p12 = self._lib.d2i_PKCS12_bio(bio.bio, self._ffi.NULL)
+ if p12 == self._ffi.NULL:
+ self._consume_errors()
+ raise ValueError("Could not deserialize PKCS12 data")
+
+ p12 = self._ffi.gc(p12, self._lib.PKCS12_free)
+ evp_pkey_ptr = self._ffi.new("EVP_PKEY **")
+ x509_ptr = self._ffi.new("X509 **")
+ sk_x509_ptr = self._ffi.new("Cryptography_STACK_OF_X509 **")
+ with self._zeroed_null_terminated_buf(password) as password_buf:
+ res = self._lib.PKCS12_parse(
+ p12, password_buf, evp_pkey_ptr, x509_ptr, sk_x509_ptr
+ )
+
+ if res == 0:
+ self._consume_errors()
+ raise ValueError("Invalid password or PKCS12 data")
+
+ cert = None
+ key = None
+ additional_certificates = []
+
+ if evp_pkey_ptr[0] != self._ffi.NULL:
+ evp_pkey = self._ffi.gc(evp_pkey_ptr[0], self._lib.EVP_PKEY_free)
+ key = self._evp_pkey_to_private_key(evp_pkey)
+
+ if x509_ptr[0] != self._ffi.NULL:
+ x509 = self._ffi.gc(x509_ptr[0], self._lib.X509_free)
+ cert = _Certificate(self, x509)
+
+ if sk_x509_ptr[0] != self._ffi.NULL:
+ sk_x509 = self._ffi.gc(sk_x509_ptr[0], self._lib.sk_X509_free)
+ num = self._lib.sk_X509_num(sk_x509_ptr[0])
+ for i in range(num):
+ x509 = self._lib.sk_X509_value(sk_x509, i)
+ self.openssl_assert(x509 != self._ffi.NULL)
+ x509 = self._ffi.gc(x509, self._lib.X509_free)
+ additional_certificates.append(_Certificate(self, x509))
+
+ return (key, cert, additional_certificates)
+
+ def serialize_key_and_certificates_to_pkcs12(
+ self, name, key, cert, cas, encryption_algorithm
+ ):
+ password = None
+ if name is not None:
+ utils._check_bytes("name", name)
+
+ if isinstance(encryption_algorithm, serialization.NoEncryption):
+ nid_cert = -1
+ nid_key = -1
+ pkcs12_iter = 0
+ mac_iter = 0
+ elif isinstance(
+ encryption_algorithm, serialization.BestAvailableEncryption
+ ):
+ # PKCS12 encryption is hopeless trash and can never be fixed.
+ # This is the least terrible option.
+ nid_cert = self._lib.NID_pbe_WithSHA1And3_Key_TripleDES_CBC
+ nid_key = self._lib.NID_pbe_WithSHA1And3_Key_TripleDES_CBC
+ # At least we can set this higher than OpenSSL's default
+ pkcs12_iter = 20000
+ # mac_iter chosen for compatibility reasons, see:
+ # https://www.openssl.org/docs/man1.1.1/man3/PKCS12_create.html
+ # Did we mention how lousy PKCS12 encryption is?
+ mac_iter = 1
+ password = encryption_algorithm.password
+ else:
+ raise ValueError("Unsupported key encryption type")
+
+ if cas is None or len(cas) == 0:
+ sk_x509 = self._ffi.NULL
+ else:
+ sk_x509 = self._lib.sk_X509_new_null()
+ sk_x509 = self._ffi.gc(sk_x509, self._lib.sk_X509_free)
+
+ # reverse the list when building the stack so that they're encoded
+ # in the order they were originally provided. it is a mystery
+ for ca in reversed(cas):
+ res = self._lib.sk_X509_push(sk_x509, ca._x509)
+ backend.openssl_assert(res >= 1)
+
+ with self._zeroed_null_terminated_buf(password) as password_buf:
+ with self._zeroed_null_terminated_buf(name) as name_buf:
+ p12 = self._lib.PKCS12_create(
+ password_buf,
+ name_buf,
+ key._evp_pkey if key else self._ffi.NULL,
+ cert._x509 if cert else self._ffi.NULL,
+ sk_x509,
+ nid_key,
+ nid_cert,
+ pkcs12_iter,
+ mac_iter,
+ 0,
+ )
+
+ self.openssl_assert(p12 != self._ffi.NULL)
+ p12 = self._ffi.gc(p12, self._lib.PKCS12_free)
+
+ bio = self._create_mem_bio_gc()
+ res = self._lib.i2d_PKCS12_bio(bio, p12)
+ self.openssl_assert(res > 0)
+ return self._read_mem_bio(bio)
+
+ def poly1305_supported(self):
+ if self._fips_enabled:
+ return False
+ return self._lib.Cryptography_HAS_POLY1305 == 1
+
+ def create_poly1305_ctx(self, key):
+ utils._check_byteslike("key", key)
+ if len(key) != _POLY1305_KEY_SIZE:
+ raise ValueError("A poly1305 key is 32 bytes long")
+
+ return _Poly1305Context(self, key)
+
+ def load_pem_pkcs7_certificates(self, data):
+ utils._check_bytes("data", data)
+ bio = self._bytes_to_bio(data)
+ p7 = self._lib.PEM_read_bio_PKCS7(
+ bio.bio, self._ffi.NULL, self._ffi.NULL, self._ffi.NULL
+ )
+ if p7 == self._ffi.NULL:
+ self._consume_errors()
+ raise ValueError("Unable to parse PKCS7 data")
+
+ p7 = self._ffi.gc(p7, self._lib.PKCS7_free)
+ return self._load_pkcs7_certificates(p7)
+
+ def load_der_pkcs7_certificates(self, data):
+ utils._check_bytes("data", data)
+ bio = self._bytes_to_bio(data)
+ p7 = self._lib.d2i_PKCS7_bio(bio.bio, self._ffi.NULL)
+ if p7 == self._ffi.NULL:
+ self._consume_errors()
+ raise ValueError("Unable to parse PKCS7 data")
+
+ p7 = self._ffi.gc(p7, self._lib.PKCS7_free)
+ return self._load_pkcs7_certificates(p7)
+
+ def _load_pkcs7_certificates(self, p7):
+ nid = self._lib.OBJ_obj2nid(p7.type)
+ self.openssl_assert(nid != self._lib.NID_undef)
+ if nid != self._lib.NID_pkcs7_signed:
+ raise UnsupportedAlgorithm(
+ "Only basic signed structures are currently supported. NID"
+ " for this data was {}".format(nid),
+ _Reasons.UNSUPPORTED_SERIALIZATION,
+ )
+
+ sk_x509 = p7.d.sign.cert
+ num = self._lib.sk_X509_num(sk_x509)
+ certs = []
+ for i in range(num):
+ x509 = self._lib.sk_X509_value(sk_x509, i)
+ self.openssl_assert(x509 != self._ffi.NULL)
+ res = self._lib.X509_up_ref(x509)
+ # When OpenSSL is less than 1.1.0 up_ref returns the current
+ # refcount. On 1.1.0+ it returns 1 for success.
+ self.openssl_assert(res >= 1)
+ x509 = self._ffi.gc(x509, self._lib.X509_free)
+ certs.append(_Certificate(self, x509))
+
+ return certs
+
+ def pkcs7_sign(self, builder, encoding, options):
+ bio = self._bytes_to_bio(builder._data)
+ init_flags = self._lib.PKCS7_PARTIAL
+ final_flags = 0
+
+ if len(builder._additional_certs) == 0:
+ certs = self._ffi.NULL
+ else:
+ certs = self._lib.sk_X509_new_null()
+ certs = self._ffi.gc(certs, self._lib.sk_X509_free)
+ for cert in builder._additional_certs:
+ res = self._lib.sk_X509_push(certs, cert._x509)
+ self.openssl_assert(res >= 1)
+
+ if pkcs7.PKCS7Options.DetachedSignature in options:
+ # Don't embed the data in the PKCS7 structure
+ init_flags |= self._lib.PKCS7_DETACHED
+ final_flags |= self._lib.PKCS7_DETACHED
+
+ # This just inits a structure for us. However, there
+ # are flags we need to set, joy.
+ p7 = self._lib.PKCS7_sign(
+ self._ffi.NULL,
+ self._ffi.NULL,
+ certs,
+ self._ffi.NULL,
+ init_flags,
+ )
+ self.openssl_assert(p7 != self._ffi.NULL)
+ p7 = self._ffi.gc(p7, self._lib.PKCS7_free)
+ signer_flags = 0
+ # These flags are configurable on a per-signature basis
+ # but we've deliberately chosen to make the API only allow
+ # setting it across all signatures for now.
+ if pkcs7.PKCS7Options.NoCapabilities in options:
+ signer_flags |= self._lib.PKCS7_NOSMIMECAP
+ elif pkcs7.PKCS7Options.NoAttributes in options:
+ signer_flags |= self._lib.PKCS7_NOATTR
+
+ if pkcs7.PKCS7Options.NoCerts in options:
+ signer_flags |= self._lib.PKCS7_NOCERTS
+
+ for certificate, private_key, hash_algorithm in builder._signers:
+ md = self._evp_md_non_null_from_algorithm(hash_algorithm)
+ p7signerinfo = self._lib.PKCS7_sign_add_signer(
+ p7, certificate._x509, private_key._evp_pkey, md, signer_flags
+ )
+ self.openssl_assert(p7signerinfo != self._ffi.NULL)
+
+ for option in options:
+ # DetachedSignature, NoCapabilities, and NoAttributes are already
+ # handled so we just need to check these last two options.
+ if option is pkcs7.PKCS7Options.Text:
+ final_flags |= self._lib.PKCS7_TEXT
+ elif option is pkcs7.PKCS7Options.Binary:
+ final_flags |= self._lib.PKCS7_BINARY
+
+ bio_out = self._create_mem_bio_gc()
+ if encoding is serialization.Encoding.SMIME:
+ # This finalizes the structure
+ res = self._lib.SMIME_write_PKCS7(
+ bio_out, p7, bio.bio, final_flags
+ )
+ elif encoding is serialization.Encoding.PEM:
+ res = self._lib.PKCS7_final(p7, bio.bio, final_flags)
+ self.openssl_assert(res == 1)
+ res = self._lib.PEM_write_bio_PKCS7_stream(
+ bio_out, p7, bio.bio, final_flags
+ )
+ else:
+ assert encoding is serialization.Encoding.DER
+ # We need to call finalize here becauase i2d_PKCS7_bio does not
+ # finalize.
+ res = self._lib.PKCS7_final(p7, bio.bio, final_flags)
+ self.openssl_assert(res == 1)
+ res = self._lib.i2d_PKCS7_bio(bio_out, p7)
+ self.openssl_assert(res == 1)
+ return self._read_mem_bio(bio_out)
+
+
+class GetCipherByName(object):
+ def __init__(self, fmt):
+ self._fmt = fmt
+
+ def __call__(self, backend, cipher, mode):
+ cipher_name = self._fmt.format(cipher=cipher, mode=mode).lower()
+ return backend._lib.EVP_get_cipherbyname(cipher_name.encode("ascii"))
+
+
+def _get_xts_cipher(backend, cipher, mode):
+ cipher_name = "aes-{}-xts".format(cipher.key_size // 2)
+ return backend._lib.EVP_get_cipherbyname(cipher_name.encode("ascii"))
+
+
+backend = Backend()
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/ciphers.py b/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/ciphers.py
new file mode 100644
index 0000000000..ad5dad3f7e
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/ciphers.py
@@ -0,0 +1,231 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+from cryptography import utils
+from cryptography.exceptions import InvalidTag, UnsupportedAlgorithm, _Reasons
+from cryptography.hazmat.primitives import ciphers
+from cryptography.hazmat.primitives.ciphers import modes
+
+
+@utils.register_interface(ciphers.CipherContext)
+@utils.register_interface(ciphers.AEADCipherContext)
+@utils.register_interface(ciphers.AEADEncryptionContext)
+@utils.register_interface(ciphers.AEADDecryptionContext)
+class _CipherContext(object):
+ _ENCRYPT = 1
+ _DECRYPT = 0
+ _MAX_CHUNK_SIZE = 2 ** 30 - 1
+
+ def __init__(self, backend, cipher, mode, operation):
+ self._backend = backend
+ self._cipher = cipher
+ self._mode = mode
+ self._operation = operation
+ self._tag = None
+
+ if isinstance(self._cipher, ciphers.BlockCipherAlgorithm):
+ self._block_size_bytes = self._cipher.block_size // 8
+ else:
+ self._block_size_bytes = 1
+
+ ctx = self._backend._lib.EVP_CIPHER_CTX_new()
+ ctx = self._backend._ffi.gc(
+ ctx, self._backend._lib.EVP_CIPHER_CTX_free
+ )
+
+ registry = self._backend._cipher_registry
+ try:
+ adapter = registry[type(cipher), type(mode)]
+ except KeyError:
+ raise UnsupportedAlgorithm(
+ "cipher {} in {} mode is not supported "
+ "by this backend.".format(
+ cipher.name, mode.name if mode else mode
+ ),
+ _Reasons.UNSUPPORTED_CIPHER,
+ )
+
+ evp_cipher = adapter(self._backend, cipher, mode)
+ if evp_cipher == self._backend._ffi.NULL:
+ msg = "cipher {0.name} ".format(cipher)
+ if mode is not None:
+ msg += "in {0.name} mode ".format(mode)
+ msg += (
+ "is not supported by this backend (Your version of OpenSSL "
+ "may be too old. Current version: {}.)"
+ ).format(self._backend.openssl_version_text())
+ raise UnsupportedAlgorithm(msg, _Reasons.UNSUPPORTED_CIPHER)
+
+ if isinstance(mode, modes.ModeWithInitializationVector):
+ iv_nonce = self._backend._ffi.from_buffer(
+ mode.initialization_vector
+ )
+ elif isinstance(mode, modes.ModeWithTweak):
+ iv_nonce = self._backend._ffi.from_buffer(mode.tweak)
+ elif isinstance(mode, modes.ModeWithNonce):
+ iv_nonce = self._backend._ffi.from_buffer(mode.nonce)
+ elif isinstance(cipher, modes.ModeWithNonce):
+ iv_nonce = self._backend._ffi.from_buffer(cipher.nonce)
+ else:
+ iv_nonce = self._backend._ffi.NULL
+ # begin init with cipher and operation type
+ res = self._backend._lib.EVP_CipherInit_ex(
+ ctx,
+ evp_cipher,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ operation,
+ )
+ self._backend.openssl_assert(res != 0)
+ # set the key length to handle variable key ciphers
+ res = self._backend._lib.EVP_CIPHER_CTX_set_key_length(
+ ctx, len(cipher.key)
+ )
+ self._backend.openssl_assert(res != 0)
+ if isinstance(mode, modes.GCM):
+ res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
+ ctx,
+ self._backend._lib.EVP_CTRL_AEAD_SET_IVLEN,
+ len(iv_nonce),
+ self._backend._ffi.NULL,
+ )
+ self._backend.openssl_assert(res != 0)
+ if mode.tag is not None:
+ res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
+ ctx,
+ self._backend._lib.EVP_CTRL_AEAD_SET_TAG,
+ len(mode.tag),
+ mode.tag,
+ )
+ self._backend.openssl_assert(res != 0)
+ self._tag = mode.tag
+
+ # pass key/iv
+ res = self._backend._lib.EVP_CipherInit_ex(
+ ctx,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ self._backend._ffi.from_buffer(cipher.key),
+ iv_nonce,
+ operation,
+ )
+ self._backend.openssl_assert(res != 0)
+ # We purposely disable padding here as it's handled higher up in the
+ # API.
+ self._backend._lib.EVP_CIPHER_CTX_set_padding(ctx, 0)
+ self._ctx = ctx
+
+ def update(self, data):
+ buf = bytearray(len(data) + self._block_size_bytes - 1)
+ n = self.update_into(data, buf)
+ return bytes(buf[:n])
+
+ def update_into(self, data, buf):
+ total_data_len = len(data)
+ if len(buf) < (total_data_len + self._block_size_bytes - 1):
+ raise ValueError(
+ "buffer must be at least {} bytes for this "
+ "payload".format(len(data) + self._block_size_bytes - 1)
+ )
+
+ data_processed = 0
+ total_out = 0
+ outlen = self._backend._ffi.new("int *")
+ baseoutbuf = self._backend._ffi.from_buffer(buf)
+ baseinbuf = self._backend._ffi.from_buffer(data)
+
+ while data_processed != total_data_len:
+ outbuf = baseoutbuf + total_out
+ inbuf = baseinbuf + data_processed
+ inlen = min(self._MAX_CHUNK_SIZE, total_data_len - data_processed)
+
+ res = self._backend._lib.EVP_CipherUpdate(
+ self._ctx, outbuf, outlen, inbuf, inlen
+ )
+ self._backend.openssl_assert(res != 0)
+ data_processed += inlen
+ total_out += outlen[0]
+
+ return total_out
+
+ def finalize(self):
+ if (
+ self._operation == self._DECRYPT
+ and isinstance(self._mode, modes.ModeWithAuthenticationTag)
+ and self.tag is None
+ ):
+ raise ValueError(
+ "Authentication tag must be provided when decrypting."
+ )
+
+ buf = self._backend._ffi.new("unsigned char[]", self._block_size_bytes)
+ outlen = self._backend._ffi.new("int *")
+ res = self._backend._lib.EVP_CipherFinal_ex(self._ctx, buf, outlen)
+ if res == 0:
+ errors = self._backend._consume_errors()
+
+ if not errors and isinstance(self._mode, modes.GCM):
+ raise InvalidTag
+
+ self._backend.openssl_assert(
+ errors[0]._lib_reason_match(
+ self._backend._lib.ERR_LIB_EVP,
+ self._backend._lib.EVP_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH,
+ ),
+ errors=errors,
+ )
+ raise ValueError(
+ "The length of the provided data is not a multiple of "
+ "the block length."
+ )
+
+ if (
+ isinstance(self._mode, modes.GCM)
+ and self._operation == self._ENCRYPT
+ ):
+ tag_buf = self._backend._ffi.new(
+ "unsigned char[]", self._block_size_bytes
+ )
+ res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
+ self._ctx,
+ self._backend._lib.EVP_CTRL_AEAD_GET_TAG,
+ self._block_size_bytes,
+ tag_buf,
+ )
+ self._backend.openssl_assert(res != 0)
+ self._tag = self._backend._ffi.buffer(tag_buf)[:]
+
+ res = self._backend._lib.EVP_CIPHER_CTX_cleanup(self._ctx)
+ self._backend.openssl_assert(res == 1)
+ return self._backend._ffi.buffer(buf)[: outlen[0]]
+
+ def finalize_with_tag(self, tag):
+ if len(tag) < self._mode._min_tag_length:
+ raise ValueError(
+ "Authentication tag must be {} bytes or longer.".format(
+ self._mode._min_tag_length
+ )
+ )
+ res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
+ self._ctx, self._backend._lib.EVP_CTRL_AEAD_SET_TAG, len(tag), tag
+ )
+ self._backend.openssl_assert(res != 0)
+ self._tag = tag
+ return self.finalize()
+
+ def authenticate_additional_data(self, data):
+ outlen = self._backend._ffi.new("int *")
+ res = self._backend._lib.EVP_CipherUpdate(
+ self._ctx,
+ self._backend._ffi.NULL,
+ outlen,
+ self._backend._ffi.from_buffer(data),
+ len(data),
+ )
+ self._backend.openssl_assert(res != 0)
+
+ tag = utils.read_only_property("_tag")
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/cmac.py b/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/cmac.py
new file mode 100644
index 0000000000..195fc230f2
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/cmac.py
@@ -0,0 +1,82 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+
+from cryptography import utils
+from cryptography.exceptions import (
+ InvalidSignature,
+ UnsupportedAlgorithm,
+ _Reasons,
+)
+from cryptography.hazmat.primitives import constant_time
+from cryptography.hazmat.primitives.ciphers.modes import CBC
+
+
+class _CMACContext(object):
+ def __init__(self, backend, algorithm, ctx=None):
+ if not backend.cmac_algorithm_supported(algorithm):
+ raise UnsupportedAlgorithm(
+ "This backend does not support CMAC.",
+ _Reasons.UNSUPPORTED_CIPHER,
+ )
+
+ self._backend = backend
+ self._key = algorithm.key
+ self._algorithm = algorithm
+ self._output_length = algorithm.block_size // 8
+
+ if ctx is None:
+ registry = self._backend._cipher_registry
+ adapter = registry[type(algorithm), CBC]
+
+ evp_cipher = adapter(self._backend, algorithm, CBC)
+
+ ctx = self._backend._lib.CMAC_CTX_new()
+
+ self._backend.openssl_assert(ctx != self._backend._ffi.NULL)
+ ctx = self._backend._ffi.gc(ctx, self._backend._lib.CMAC_CTX_free)
+
+ key_ptr = self._backend._ffi.from_buffer(self._key)
+ res = self._backend._lib.CMAC_Init(
+ ctx,
+ key_ptr,
+ len(self._key),
+ evp_cipher,
+ self._backend._ffi.NULL,
+ )
+ self._backend.openssl_assert(res == 1)
+
+ self._ctx = ctx
+
+ algorithm = utils.read_only_property("_algorithm")
+
+ def update(self, data):
+ res = self._backend._lib.CMAC_Update(self._ctx, data, len(data))
+ self._backend.openssl_assert(res == 1)
+
+ def finalize(self):
+ buf = self._backend._ffi.new("unsigned char[]", self._output_length)
+ length = self._backend._ffi.new("size_t *", self._output_length)
+ res = self._backend._lib.CMAC_Final(self._ctx, buf, length)
+ self._backend.openssl_assert(res == 1)
+
+ self._ctx = None
+
+ return self._backend._ffi.buffer(buf)[:]
+
+ def copy(self):
+ copied_ctx = self._backend._lib.CMAC_CTX_new()
+ copied_ctx = self._backend._ffi.gc(
+ copied_ctx, self._backend._lib.CMAC_CTX_free
+ )
+ res = self._backend._lib.CMAC_CTX_copy(copied_ctx, self._ctx)
+ self._backend.openssl_assert(res == 1)
+ return _CMACContext(self._backend, self._algorithm, ctx=copied_ctx)
+
+ def verify(self, signature):
+ digest = self.finalize()
+ if not constant_time.bytes_eq(digest, signature):
+ raise InvalidSignature("Signature did not match digest.")
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/decode_asn1.py b/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/decode_asn1.py
new file mode 100644
index 0000000000..cc9b8c0e34
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/decode_asn1.py
@@ -0,0 +1,878 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import datetime
+import ipaddress
+
+import six
+
+from cryptography import x509
+from cryptography.hazmat._der import DERReader, INTEGER, NULL, SEQUENCE
+from cryptography.x509.extensions import _TLS_FEATURE_TYPE_TO_ENUM
+from cryptography.x509.name import _ASN1_TYPE_TO_ENUM
+from cryptography.x509.oid import (
+ CRLEntryExtensionOID,
+ CertificatePoliciesOID,
+ ExtensionOID,
+ OCSPExtensionOID,
+)
+
+
+def _obj2txt(backend, obj):
+ # Set to 80 on the recommendation of
+ # https://www.openssl.org/docs/crypto/OBJ_nid2ln.html#return_values
+ #
+ # But OIDs longer than this occur in real life (e.g. Active
+ # Directory makes some very long OIDs). So we need to detect
+ # and properly handle the case where the default buffer is not
+ # big enough.
+ #
+ buf_len = 80
+ buf = backend._ffi.new("char[]", buf_len)
+
+ # 'res' is the number of bytes that *would* be written if the
+ # buffer is large enough. If 'res' > buf_len - 1, we need to
+ # alloc a big-enough buffer and go again.
+ res = backend._lib.OBJ_obj2txt(buf, buf_len, obj, 1)
+ if res > buf_len - 1: # account for terminating null byte
+ buf_len = res + 1
+ buf = backend._ffi.new("char[]", buf_len)
+ res = backend._lib.OBJ_obj2txt(buf, buf_len, obj, 1)
+ backend.openssl_assert(res > 0)
+ return backend._ffi.buffer(buf, res)[:].decode()
+
+
+def _decode_x509_name_entry(backend, x509_name_entry):
+ obj = backend._lib.X509_NAME_ENTRY_get_object(x509_name_entry)
+ backend.openssl_assert(obj != backend._ffi.NULL)
+ data = backend._lib.X509_NAME_ENTRY_get_data(x509_name_entry)
+ backend.openssl_assert(data != backend._ffi.NULL)
+ value = _asn1_string_to_utf8(backend, data)
+ oid = _obj2txt(backend, obj)
+ type = _ASN1_TYPE_TO_ENUM[data.type]
+
+ return x509.NameAttribute(x509.ObjectIdentifier(oid), value, type)
+
+
+def _decode_x509_name(backend, x509_name):
+ count = backend._lib.X509_NAME_entry_count(x509_name)
+ attributes = []
+ prev_set_id = -1
+ for x in range(count):
+ entry = backend._lib.X509_NAME_get_entry(x509_name, x)
+ attribute = _decode_x509_name_entry(backend, entry)
+ set_id = backend._lib.X509_NAME_ENTRY_set(entry)
+ if set_id != prev_set_id:
+ attributes.append({attribute})
+ else:
+ # is in the same RDN a previous entry
+ attributes[-1].add(attribute)
+ prev_set_id = set_id
+
+ return x509.Name(x509.RelativeDistinguishedName(rdn) for rdn in attributes)
+
+
+def _decode_general_names(backend, gns):
+ num = backend._lib.sk_GENERAL_NAME_num(gns)
+ names = []
+ for i in range(num):
+ gn = backend._lib.sk_GENERAL_NAME_value(gns, i)
+ backend.openssl_assert(gn != backend._ffi.NULL)
+ names.append(_decode_general_name(backend, gn))
+
+ return names
+
+
+def _decode_general_name(backend, gn):
+ if gn.type == backend._lib.GEN_DNS:
+ # Convert to bytes and then decode to utf8. We don't use
+ # asn1_string_to_utf8 here because it doesn't properly convert
+ # utf8 from ia5strings.
+ data = _asn1_string_to_bytes(backend, gn.d.dNSName).decode("utf8")
+ # We don't use the constructor for DNSName so we can bypass validation
+ # This allows us to create DNSName objects that have unicode chars
+ # when a certificate (against the RFC) contains them.
+ return x509.DNSName._init_without_validation(data)
+ elif gn.type == backend._lib.GEN_URI:
+ # Convert to bytes and then decode to utf8. We don't use
+ # asn1_string_to_utf8 here because it doesn't properly convert
+ # utf8 from ia5strings.
+ data = _asn1_string_to_bytes(
+ backend, gn.d.uniformResourceIdentifier
+ ).decode("utf8")
+ # We don't use the constructor for URI so we can bypass validation
+ # This allows us to create URI objects that have unicode chars
+ # when a certificate (against the RFC) contains them.
+ return x509.UniformResourceIdentifier._init_without_validation(data)
+ elif gn.type == backend._lib.GEN_RID:
+ oid = _obj2txt(backend, gn.d.registeredID)
+ return x509.RegisteredID(x509.ObjectIdentifier(oid))
+ elif gn.type == backend._lib.GEN_IPADD:
+ data = _asn1_string_to_bytes(backend, gn.d.iPAddress)
+ data_len = len(data)
+ if data_len == 8 or data_len == 32:
+ # This is an IPv4 or IPv6 Network and not a single IP. This
+ # type of data appears in Name Constraints. Unfortunately,
+ # ipaddress doesn't support packed bytes + netmask. Additionally,
+ # IPv6Network can only handle CIDR rather than the full 16 byte
+ # netmask. To handle this we convert the netmask to integer, then
+ # find the first 0 bit, which will be the prefix. If another 1
+ # bit is present after that the netmask is invalid.
+ base = ipaddress.ip_address(data[: data_len // 2])
+ netmask = ipaddress.ip_address(data[data_len // 2 :])
+ bits = bin(int(netmask))[2:]
+ prefix = bits.find("0")
+ # If no 0 bits are found it is a /32 or /128
+ if prefix == -1:
+ prefix = len(bits)
+
+ if "1" in bits[prefix:]:
+ raise ValueError("Invalid netmask")
+
+ ip = ipaddress.ip_network(base.exploded + u"/{}".format(prefix))
+ else:
+ ip = ipaddress.ip_address(data)
+
+ return x509.IPAddress(ip)
+ elif gn.type == backend._lib.GEN_DIRNAME:
+ return x509.DirectoryName(
+ _decode_x509_name(backend, gn.d.directoryName)
+ )
+ elif gn.type == backend._lib.GEN_EMAIL:
+ # Convert to bytes and then decode to utf8. We don't use
+ # asn1_string_to_utf8 here because it doesn't properly convert
+ # utf8 from ia5strings.
+ data = _asn1_string_to_bytes(backend, gn.d.rfc822Name).decode("utf8")
+ # We don't use the constructor for RFC822Name so we can bypass
+ # validation. This allows us to create RFC822Name objects that have
+ # unicode chars when a certificate (against the RFC) contains them.
+ return x509.RFC822Name._init_without_validation(data)
+ elif gn.type == backend._lib.GEN_OTHERNAME:
+ type_id = _obj2txt(backend, gn.d.otherName.type_id)
+ value = _asn1_to_der(backend, gn.d.otherName.value)
+ return x509.OtherName(x509.ObjectIdentifier(type_id), value)
+ else:
+ # x400Address or ediPartyName
+ raise x509.UnsupportedGeneralNameType(
+ "{} is not a supported type".format(
+ x509._GENERAL_NAMES.get(gn.type, gn.type)
+ ),
+ gn.type,
+ )
+
+
+def _decode_ocsp_no_check(backend, ext):
+ return x509.OCSPNoCheck()
+
+
+def _decode_crl_number(backend, ext):
+ asn1_int = backend._ffi.cast("ASN1_INTEGER *", ext)
+ asn1_int = backend._ffi.gc(asn1_int, backend._lib.ASN1_INTEGER_free)
+ return x509.CRLNumber(_asn1_integer_to_int(backend, asn1_int))
+
+
+def _decode_delta_crl_indicator(backend, ext):
+ asn1_int = backend._ffi.cast("ASN1_INTEGER *", ext)
+ asn1_int = backend._ffi.gc(asn1_int, backend._lib.ASN1_INTEGER_free)
+ return x509.DeltaCRLIndicator(_asn1_integer_to_int(backend, asn1_int))
+
+
+class _X509ExtensionParser(object):
+ def __init__(self, backend, ext_count, get_ext, handlers):
+ self.ext_count = ext_count
+ self.get_ext = get_ext
+ self.handlers = handlers
+ self._backend = backend
+
+ def parse(self, x509_obj):
+ extensions = []
+ seen_oids = set()
+ for i in range(self.ext_count(x509_obj)):
+ ext = self.get_ext(x509_obj, i)
+ self._backend.openssl_assert(ext != self._backend._ffi.NULL)
+ crit = self._backend._lib.X509_EXTENSION_get_critical(ext)
+ critical = crit == 1
+ oid = x509.ObjectIdentifier(
+ _obj2txt(
+ self._backend,
+ self._backend._lib.X509_EXTENSION_get_object(ext),
+ )
+ )
+ if oid in seen_oids:
+ raise x509.DuplicateExtension(
+ "Duplicate {} extension found".format(oid), oid
+ )
+
+ # These OIDs are only supported in OpenSSL 1.1.0+ but we want
+ # to support them in all versions of OpenSSL so we decode them
+ # ourselves.
+ if oid == ExtensionOID.TLS_FEATURE:
+ # The extension contents are a SEQUENCE OF INTEGERs.
+ data = self._backend._lib.X509_EXTENSION_get_data(ext)
+ data_bytes = _asn1_string_to_bytes(self._backend, data)
+ features = DERReader(data_bytes).read_single_element(SEQUENCE)
+ parsed = []
+ while not features.is_empty():
+ parsed.append(features.read_element(INTEGER).as_integer())
+ # Map the features to their enum value.
+ value = x509.TLSFeature(
+ [_TLS_FEATURE_TYPE_TO_ENUM[x] for x in parsed]
+ )
+ extensions.append(x509.Extension(oid, critical, value))
+ seen_oids.add(oid)
+ continue
+ elif oid == ExtensionOID.PRECERT_POISON:
+ data = self._backend._lib.X509_EXTENSION_get_data(ext)
+ # The contents of the extension must be an ASN.1 NULL.
+ reader = DERReader(_asn1_string_to_bytes(self._backend, data))
+ reader.read_single_element(NULL).check_empty()
+ extensions.append(
+ x509.Extension(oid, critical, x509.PrecertPoison())
+ )
+ seen_oids.add(oid)
+ continue
+
+ try:
+ handler = self.handlers[oid]
+ except KeyError:
+ # Dump the DER payload into an UnrecognizedExtension object
+ data = self._backend._lib.X509_EXTENSION_get_data(ext)
+ self._backend.openssl_assert(data != self._backend._ffi.NULL)
+ der = self._backend._ffi.buffer(data.data, data.length)[:]
+ unrecognized = x509.UnrecognizedExtension(oid, der)
+ extensions.append(x509.Extension(oid, critical, unrecognized))
+ else:
+ ext_data = self._backend._lib.X509V3_EXT_d2i(ext)
+ if ext_data == self._backend._ffi.NULL:
+ self._backend._consume_errors()
+ raise ValueError(
+ "The {} extension is invalid and can't be "
+ "parsed".format(oid)
+ )
+
+ value = handler(self._backend, ext_data)
+ extensions.append(x509.Extension(oid, critical, value))
+
+ seen_oids.add(oid)
+
+ return x509.Extensions(extensions)
+
+
+def _decode_certificate_policies(backend, cp):
+ cp = backend._ffi.cast("Cryptography_STACK_OF_POLICYINFO *", cp)
+ cp = backend._ffi.gc(cp, backend._lib.CERTIFICATEPOLICIES_free)
+
+ num = backend._lib.sk_POLICYINFO_num(cp)
+ certificate_policies = []
+ for i in range(num):
+ qualifiers = None
+ pi = backend._lib.sk_POLICYINFO_value(cp, i)
+ oid = x509.ObjectIdentifier(_obj2txt(backend, pi.policyid))
+ if pi.qualifiers != backend._ffi.NULL:
+ qnum = backend._lib.sk_POLICYQUALINFO_num(pi.qualifiers)
+ qualifiers = []
+ for j in range(qnum):
+ pqi = backend._lib.sk_POLICYQUALINFO_value(pi.qualifiers, j)
+ pqualid = x509.ObjectIdentifier(_obj2txt(backend, pqi.pqualid))
+ if pqualid == CertificatePoliciesOID.CPS_QUALIFIER:
+ cpsuri = backend._ffi.buffer(
+ pqi.d.cpsuri.data, pqi.d.cpsuri.length
+ )[:].decode("ascii")
+ qualifiers.append(cpsuri)
+ else:
+ assert pqualid == CertificatePoliciesOID.CPS_USER_NOTICE
+ user_notice = _decode_user_notice(
+ backend, pqi.d.usernotice
+ )
+ qualifiers.append(user_notice)
+
+ certificate_policies.append(x509.PolicyInformation(oid, qualifiers))
+
+ return x509.CertificatePolicies(certificate_policies)
+
+
+def _decode_user_notice(backend, un):
+ explicit_text = None
+ notice_reference = None
+
+ if un.exptext != backend._ffi.NULL:
+ explicit_text = _asn1_string_to_utf8(backend, un.exptext)
+
+ if un.noticeref != backend._ffi.NULL:
+ organization = _asn1_string_to_utf8(backend, un.noticeref.organization)
+
+ num = backend._lib.sk_ASN1_INTEGER_num(un.noticeref.noticenos)
+ notice_numbers = []
+ for i in range(num):
+ asn1_int = backend._lib.sk_ASN1_INTEGER_value(
+ un.noticeref.noticenos, i
+ )
+ notice_num = _asn1_integer_to_int(backend, asn1_int)
+ notice_numbers.append(notice_num)
+
+ notice_reference = x509.NoticeReference(organization, notice_numbers)
+
+ return x509.UserNotice(notice_reference, explicit_text)
+
+
+def _decode_basic_constraints(backend, bc_st):
+ basic_constraints = backend._ffi.cast("BASIC_CONSTRAINTS *", bc_st)
+ basic_constraints = backend._ffi.gc(
+ basic_constraints, backend._lib.BASIC_CONSTRAINTS_free
+ )
+ # The byte representation of an ASN.1 boolean true is \xff. OpenSSL
+ # chooses to just map this to its ordinal value, so true is 255 and
+ # false is 0.
+ ca = basic_constraints.ca == 255
+ path_length = _asn1_integer_to_int_or_none(
+ backend, basic_constraints.pathlen
+ )
+
+ return x509.BasicConstraints(ca, path_length)
+
+
+def _decode_subject_key_identifier(backend, asn1_string):
+ asn1_string = backend._ffi.cast("ASN1_OCTET_STRING *", asn1_string)
+ asn1_string = backend._ffi.gc(
+ asn1_string, backend._lib.ASN1_OCTET_STRING_free
+ )
+ return x509.SubjectKeyIdentifier(
+ backend._ffi.buffer(asn1_string.data, asn1_string.length)[:]
+ )
+
+
+def _decode_authority_key_identifier(backend, akid):
+ akid = backend._ffi.cast("AUTHORITY_KEYID *", akid)
+ akid = backend._ffi.gc(akid, backend._lib.AUTHORITY_KEYID_free)
+ key_identifier = None
+ authority_cert_issuer = None
+
+ if akid.keyid != backend._ffi.NULL:
+ key_identifier = backend._ffi.buffer(
+ akid.keyid.data, akid.keyid.length
+ )[:]
+
+ if akid.issuer != backend._ffi.NULL:
+ authority_cert_issuer = _decode_general_names(backend, akid.issuer)
+
+ authority_cert_serial_number = _asn1_integer_to_int_or_none(
+ backend, akid.serial
+ )
+
+ return x509.AuthorityKeyIdentifier(
+ key_identifier, authority_cert_issuer, authority_cert_serial_number
+ )
+
+
+def _decode_information_access(backend, ia):
+ ia = backend._ffi.cast("Cryptography_STACK_OF_ACCESS_DESCRIPTION *", ia)
+ ia = backend._ffi.gc(
+ ia,
+ lambda x: backend._lib.sk_ACCESS_DESCRIPTION_pop_free(
+ x,
+ backend._ffi.addressof(
+ backend._lib._original_lib, "ACCESS_DESCRIPTION_free"
+ ),
+ ),
+ )
+ num = backend._lib.sk_ACCESS_DESCRIPTION_num(ia)
+ access_descriptions = []
+ for i in range(num):
+ ad = backend._lib.sk_ACCESS_DESCRIPTION_value(ia, i)
+ backend.openssl_assert(ad.method != backend._ffi.NULL)
+ oid = x509.ObjectIdentifier(_obj2txt(backend, ad.method))
+ backend.openssl_assert(ad.location != backend._ffi.NULL)
+ gn = _decode_general_name(backend, ad.location)
+ access_descriptions.append(x509.AccessDescription(oid, gn))
+
+ return access_descriptions
+
+
+def _decode_authority_information_access(backend, aia):
+ access_descriptions = _decode_information_access(backend, aia)
+ return x509.AuthorityInformationAccess(access_descriptions)
+
+
+def _decode_subject_information_access(backend, aia):
+ access_descriptions = _decode_information_access(backend, aia)
+ return x509.SubjectInformationAccess(access_descriptions)
+
+
+def _decode_key_usage(backend, bit_string):
+ bit_string = backend._ffi.cast("ASN1_BIT_STRING *", bit_string)
+ bit_string = backend._ffi.gc(bit_string, backend._lib.ASN1_BIT_STRING_free)
+ get_bit = backend._lib.ASN1_BIT_STRING_get_bit
+ digital_signature = get_bit(bit_string, 0) == 1
+ content_commitment = get_bit(bit_string, 1) == 1
+ key_encipherment = get_bit(bit_string, 2) == 1
+ data_encipherment = get_bit(bit_string, 3) == 1
+ key_agreement = get_bit(bit_string, 4) == 1
+ key_cert_sign = get_bit(bit_string, 5) == 1
+ crl_sign = get_bit(bit_string, 6) == 1
+ encipher_only = get_bit(bit_string, 7) == 1
+ decipher_only = get_bit(bit_string, 8) == 1
+ return x509.KeyUsage(
+ digital_signature,
+ content_commitment,
+ key_encipherment,
+ data_encipherment,
+ key_agreement,
+ key_cert_sign,
+ crl_sign,
+ encipher_only,
+ decipher_only,
+ )
+
+
+def _decode_general_names_extension(backend, gns):
+ gns = backend._ffi.cast("GENERAL_NAMES *", gns)
+ gns = backend._ffi.gc(gns, backend._lib.GENERAL_NAMES_free)
+ general_names = _decode_general_names(backend, gns)
+ return general_names
+
+
+def _decode_subject_alt_name(backend, ext):
+ return x509.SubjectAlternativeName(
+ _decode_general_names_extension(backend, ext)
+ )
+
+
+def _decode_issuer_alt_name(backend, ext):
+ return x509.IssuerAlternativeName(
+ _decode_general_names_extension(backend, ext)
+ )
+
+
+def _decode_name_constraints(backend, nc):
+ nc = backend._ffi.cast("NAME_CONSTRAINTS *", nc)
+ nc = backend._ffi.gc(nc, backend._lib.NAME_CONSTRAINTS_free)
+ permitted = _decode_general_subtrees(backend, nc.permittedSubtrees)
+ excluded = _decode_general_subtrees(backend, nc.excludedSubtrees)
+ return x509.NameConstraints(
+ permitted_subtrees=permitted, excluded_subtrees=excluded
+ )
+
+
+def _decode_general_subtrees(backend, stack_subtrees):
+ if stack_subtrees == backend._ffi.NULL:
+ return None
+
+ num = backend._lib.sk_GENERAL_SUBTREE_num(stack_subtrees)
+ subtrees = []
+
+ for i in range(num):
+ obj = backend._lib.sk_GENERAL_SUBTREE_value(stack_subtrees, i)
+ backend.openssl_assert(obj != backend._ffi.NULL)
+ name = _decode_general_name(backend, obj.base)
+ subtrees.append(name)
+
+ return subtrees
+
+
+def _decode_issuing_dist_point(backend, idp):
+ idp = backend._ffi.cast("ISSUING_DIST_POINT *", idp)
+ idp = backend._ffi.gc(idp, backend._lib.ISSUING_DIST_POINT_free)
+ if idp.distpoint != backend._ffi.NULL:
+ full_name, relative_name = _decode_distpoint(backend, idp.distpoint)
+ else:
+ full_name = None
+ relative_name = None
+
+ only_user = idp.onlyuser == 255
+ only_ca = idp.onlyCA == 255
+ indirect_crl = idp.indirectCRL == 255
+ only_attr = idp.onlyattr == 255
+ if idp.onlysomereasons != backend._ffi.NULL:
+ only_some_reasons = _decode_reasons(backend, idp.onlysomereasons)
+ else:
+ only_some_reasons = None
+
+ return x509.IssuingDistributionPoint(
+ full_name,
+ relative_name,
+ only_user,
+ only_ca,
+ only_some_reasons,
+ indirect_crl,
+ only_attr,
+ )
+
+
+def _decode_policy_constraints(backend, pc):
+ pc = backend._ffi.cast("POLICY_CONSTRAINTS *", pc)
+ pc = backend._ffi.gc(pc, backend._lib.POLICY_CONSTRAINTS_free)
+
+ require_explicit_policy = _asn1_integer_to_int_or_none(
+ backend, pc.requireExplicitPolicy
+ )
+ inhibit_policy_mapping = _asn1_integer_to_int_or_none(
+ backend, pc.inhibitPolicyMapping
+ )
+
+ return x509.PolicyConstraints(
+ require_explicit_policy, inhibit_policy_mapping
+ )
+
+
+def _decode_extended_key_usage(backend, sk):
+ sk = backend._ffi.cast("Cryptography_STACK_OF_ASN1_OBJECT *", sk)
+ sk = backend._ffi.gc(sk, backend._lib.sk_ASN1_OBJECT_free)
+ num = backend._lib.sk_ASN1_OBJECT_num(sk)
+ ekus = []
+
+ for i in range(num):
+ obj = backend._lib.sk_ASN1_OBJECT_value(sk, i)
+ backend.openssl_assert(obj != backend._ffi.NULL)
+ oid = x509.ObjectIdentifier(_obj2txt(backend, obj))
+ ekus.append(oid)
+
+ return x509.ExtendedKeyUsage(ekus)
+
+
+_DISTPOINT_TYPE_FULLNAME = 0
+_DISTPOINT_TYPE_RELATIVENAME = 1
+
+
+def _decode_dist_points(backend, cdps):
+ cdps = backend._ffi.cast("Cryptography_STACK_OF_DIST_POINT *", cdps)
+ cdps = backend._ffi.gc(cdps, backend._lib.CRL_DIST_POINTS_free)
+
+ num = backend._lib.sk_DIST_POINT_num(cdps)
+ dist_points = []
+ for i in range(num):
+ full_name = None
+ relative_name = None
+ crl_issuer = None
+ reasons = None
+ cdp = backend._lib.sk_DIST_POINT_value(cdps, i)
+ if cdp.reasons != backend._ffi.NULL:
+ reasons = _decode_reasons(backend, cdp.reasons)
+
+ if cdp.CRLissuer != backend._ffi.NULL:
+ crl_issuer = _decode_general_names(backend, cdp.CRLissuer)
+
+ # Certificates may have a crl_issuer/reasons and no distribution
+ # point so make sure it's not null.
+ if cdp.distpoint != backend._ffi.NULL:
+ full_name, relative_name = _decode_distpoint(
+ backend, cdp.distpoint
+ )
+
+ dist_points.append(
+ x509.DistributionPoint(
+ full_name, relative_name, reasons, crl_issuer
+ )
+ )
+
+ return dist_points
+
+
+# ReasonFlags ::= BIT STRING {
+# unused (0),
+# keyCompromise (1),
+# cACompromise (2),
+# affiliationChanged (3),
+# superseded (4),
+# cessationOfOperation (5),
+# certificateHold (6),
+# privilegeWithdrawn (7),
+# aACompromise (8) }
+_REASON_BIT_MAPPING = {
+ 1: x509.ReasonFlags.key_compromise,
+ 2: x509.ReasonFlags.ca_compromise,
+ 3: x509.ReasonFlags.affiliation_changed,
+ 4: x509.ReasonFlags.superseded,
+ 5: x509.ReasonFlags.cessation_of_operation,
+ 6: x509.ReasonFlags.certificate_hold,
+ 7: x509.ReasonFlags.privilege_withdrawn,
+ 8: x509.ReasonFlags.aa_compromise,
+}
+
+
+def _decode_reasons(backend, reasons):
+ # We will check each bit from RFC 5280
+ enum_reasons = []
+ for bit_position, reason in six.iteritems(_REASON_BIT_MAPPING):
+ if backend._lib.ASN1_BIT_STRING_get_bit(reasons, bit_position):
+ enum_reasons.append(reason)
+
+ return frozenset(enum_reasons)
+
+
+def _decode_distpoint(backend, distpoint):
+ if distpoint.type == _DISTPOINT_TYPE_FULLNAME:
+ full_name = _decode_general_names(backend, distpoint.name.fullname)
+ return full_name, None
+
+ # OpenSSL code doesn't test for a specific type for
+ # relativename, everything that isn't fullname is considered
+ # relativename. Per RFC 5280:
+ #
+ # DistributionPointName ::= CHOICE {
+ # fullName [0] GeneralNames,
+ # nameRelativeToCRLIssuer [1] RelativeDistinguishedName }
+ rns = distpoint.name.relativename
+ rnum = backend._lib.sk_X509_NAME_ENTRY_num(rns)
+ attributes = set()
+ for i in range(rnum):
+ rn = backend._lib.sk_X509_NAME_ENTRY_value(rns, i)
+ backend.openssl_assert(rn != backend._ffi.NULL)
+ attributes.add(_decode_x509_name_entry(backend, rn))
+
+ relative_name = x509.RelativeDistinguishedName(attributes)
+
+ return None, relative_name
+
+
+def _decode_crl_distribution_points(backend, cdps):
+ dist_points = _decode_dist_points(backend, cdps)
+ return x509.CRLDistributionPoints(dist_points)
+
+
+def _decode_freshest_crl(backend, cdps):
+ dist_points = _decode_dist_points(backend, cdps)
+ return x509.FreshestCRL(dist_points)
+
+
+def _decode_inhibit_any_policy(backend, asn1_int):
+ asn1_int = backend._ffi.cast("ASN1_INTEGER *", asn1_int)
+ asn1_int = backend._ffi.gc(asn1_int, backend._lib.ASN1_INTEGER_free)
+ skip_certs = _asn1_integer_to_int(backend, asn1_int)
+ return x509.InhibitAnyPolicy(skip_certs)
+
+
+def _decode_scts(backend, asn1_scts):
+ from cryptography.hazmat.backends.openssl.x509 import (
+ _SignedCertificateTimestamp,
+ )
+
+ asn1_scts = backend._ffi.cast("Cryptography_STACK_OF_SCT *", asn1_scts)
+ asn1_scts = backend._ffi.gc(asn1_scts, backend._lib.SCT_LIST_free)
+
+ scts = []
+ for i in range(backend._lib.sk_SCT_num(asn1_scts)):
+ sct = backend._lib.sk_SCT_value(asn1_scts, i)
+
+ scts.append(_SignedCertificateTimestamp(backend, asn1_scts, sct))
+ return scts
+
+
+def _decode_precert_signed_certificate_timestamps(backend, asn1_scts):
+ return x509.PrecertificateSignedCertificateTimestamps(
+ _decode_scts(backend, asn1_scts)
+ )
+
+
+def _decode_signed_certificate_timestamps(backend, asn1_scts):
+ return x509.SignedCertificateTimestamps(_decode_scts(backend, asn1_scts))
+
+
+# CRLReason ::= ENUMERATED {
+# unspecified (0),
+# keyCompromise (1),
+# cACompromise (2),
+# affiliationChanged (3),
+# superseded (4),
+# cessationOfOperation (5),
+# certificateHold (6),
+# -- value 7 is not used
+# removeFromCRL (8),
+# privilegeWithdrawn (9),
+# aACompromise (10) }
+_CRL_ENTRY_REASON_CODE_TO_ENUM = {
+ 0: x509.ReasonFlags.unspecified,
+ 1: x509.ReasonFlags.key_compromise,
+ 2: x509.ReasonFlags.ca_compromise,
+ 3: x509.ReasonFlags.affiliation_changed,
+ 4: x509.ReasonFlags.superseded,
+ 5: x509.ReasonFlags.cessation_of_operation,
+ 6: x509.ReasonFlags.certificate_hold,
+ 8: x509.ReasonFlags.remove_from_crl,
+ 9: x509.ReasonFlags.privilege_withdrawn,
+ 10: x509.ReasonFlags.aa_compromise,
+}
+
+
+_CRL_ENTRY_REASON_ENUM_TO_CODE = {
+ x509.ReasonFlags.unspecified: 0,
+ x509.ReasonFlags.key_compromise: 1,
+ x509.ReasonFlags.ca_compromise: 2,
+ x509.ReasonFlags.affiliation_changed: 3,
+ x509.ReasonFlags.superseded: 4,
+ x509.ReasonFlags.cessation_of_operation: 5,
+ x509.ReasonFlags.certificate_hold: 6,
+ x509.ReasonFlags.remove_from_crl: 8,
+ x509.ReasonFlags.privilege_withdrawn: 9,
+ x509.ReasonFlags.aa_compromise: 10,
+}
+
+
+def _decode_crl_reason(backend, enum):
+ enum = backend._ffi.cast("ASN1_ENUMERATED *", enum)
+ enum = backend._ffi.gc(enum, backend._lib.ASN1_ENUMERATED_free)
+ code = backend._lib.ASN1_ENUMERATED_get(enum)
+
+ try:
+ return x509.CRLReason(_CRL_ENTRY_REASON_CODE_TO_ENUM[code])
+ except KeyError:
+ raise ValueError("Unsupported reason code: {}".format(code))
+
+
+def _decode_invalidity_date(backend, inv_date):
+ generalized_time = backend._ffi.cast("ASN1_GENERALIZEDTIME *", inv_date)
+ generalized_time = backend._ffi.gc(
+ generalized_time, backend._lib.ASN1_GENERALIZEDTIME_free
+ )
+ return x509.InvalidityDate(
+ _parse_asn1_generalized_time(backend, generalized_time)
+ )
+
+
+def _decode_cert_issuer(backend, gns):
+ gns = backend._ffi.cast("GENERAL_NAMES *", gns)
+ gns = backend._ffi.gc(gns, backend._lib.GENERAL_NAMES_free)
+ general_names = _decode_general_names(backend, gns)
+ return x509.CertificateIssuer(general_names)
+
+
+def _asn1_to_der(backend, asn1_type):
+ buf = backend._ffi.new("unsigned char **")
+ res = backend._lib.i2d_ASN1_TYPE(asn1_type, buf)
+ backend.openssl_assert(res >= 0)
+ backend.openssl_assert(buf[0] != backend._ffi.NULL)
+ buf = backend._ffi.gc(
+ buf, lambda buffer: backend._lib.OPENSSL_free(buffer[0])
+ )
+ return backend._ffi.buffer(buf[0], res)[:]
+
+
+def _asn1_integer_to_int(backend, asn1_int):
+ bn = backend._lib.ASN1_INTEGER_to_BN(asn1_int, backend._ffi.NULL)
+ backend.openssl_assert(bn != backend._ffi.NULL)
+ bn = backend._ffi.gc(bn, backend._lib.BN_free)
+ return backend._bn_to_int(bn)
+
+
+def _asn1_integer_to_int_or_none(backend, asn1_int):
+ if asn1_int == backend._ffi.NULL:
+ return None
+ else:
+ return _asn1_integer_to_int(backend, asn1_int)
+
+
+def _asn1_string_to_bytes(backend, asn1_string):
+ return backend._ffi.buffer(asn1_string.data, asn1_string.length)[:]
+
+
+def _asn1_string_to_ascii(backend, asn1_string):
+ return _asn1_string_to_bytes(backend, asn1_string).decode("ascii")
+
+
+def _asn1_string_to_utf8(backend, asn1_string):
+ buf = backend._ffi.new("unsigned char **")
+ res = backend._lib.ASN1_STRING_to_UTF8(buf, asn1_string)
+ if res == -1:
+ raise ValueError(
+ "Unsupported ASN1 string type. Type: {}".format(asn1_string.type)
+ )
+
+ backend.openssl_assert(buf[0] != backend._ffi.NULL)
+ buf = backend._ffi.gc(
+ buf, lambda buffer: backend._lib.OPENSSL_free(buffer[0])
+ )
+ return backend._ffi.buffer(buf[0], res)[:].decode("utf8")
+
+
+def _parse_asn1_time(backend, asn1_time):
+ backend.openssl_assert(asn1_time != backend._ffi.NULL)
+ generalized_time = backend._lib.ASN1_TIME_to_generalizedtime(
+ asn1_time, backend._ffi.NULL
+ )
+ if generalized_time == backend._ffi.NULL:
+ raise ValueError(
+ "Couldn't parse ASN.1 time as generalizedtime {!r}".format(
+ _asn1_string_to_bytes(backend, asn1_time)
+ )
+ )
+
+ generalized_time = backend._ffi.gc(
+ generalized_time, backend._lib.ASN1_GENERALIZEDTIME_free
+ )
+ return _parse_asn1_generalized_time(backend, generalized_time)
+
+
+def _parse_asn1_generalized_time(backend, generalized_time):
+ time = _asn1_string_to_ascii(
+ backend, backend._ffi.cast("ASN1_STRING *", generalized_time)
+ )
+ return datetime.datetime.strptime(time, "%Y%m%d%H%M%SZ")
+
+
+def _decode_nonce(backend, nonce):
+ nonce = backend._ffi.cast("ASN1_OCTET_STRING *", nonce)
+ nonce = backend._ffi.gc(nonce, backend._lib.ASN1_OCTET_STRING_free)
+ return x509.OCSPNonce(_asn1_string_to_bytes(backend, nonce))
+
+
+_EXTENSION_HANDLERS_BASE = {
+ ExtensionOID.BASIC_CONSTRAINTS: _decode_basic_constraints,
+ ExtensionOID.SUBJECT_KEY_IDENTIFIER: _decode_subject_key_identifier,
+ ExtensionOID.KEY_USAGE: _decode_key_usage,
+ ExtensionOID.SUBJECT_ALTERNATIVE_NAME: _decode_subject_alt_name,
+ ExtensionOID.EXTENDED_KEY_USAGE: _decode_extended_key_usage,
+ ExtensionOID.AUTHORITY_KEY_IDENTIFIER: _decode_authority_key_identifier,
+ ExtensionOID.AUTHORITY_INFORMATION_ACCESS: (
+ _decode_authority_information_access
+ ),
+ ExtensionOID.SUBJECT_INFORMATION_ACCESS: (
+ _decode_subject_information_access
+ ),
+ ExtensionOID.CERTIFICATE_POLICIES: _decode_certificate_policies,
+ ExtensionOID.CRL_DISTRIBUTION_POINTS: _decode_crl_distribution_points,
+ ExtensionOID.FRESHEST_CRL: _decode_freshest_crl,
+ ExtensionOID.OCSP_NO_CHECK: _decode_ocsp_no_check,
+ ExtensionOID.INHIBIT_ANY_POLICY: _decode_inhibit_any_policy,
+ ExtensionOID.ISSUER_ALTERNATIVE_NAME: _decode_issuer_alt_name,
+ ExtensionOID.NAME_CONSTRAINTS: _decode_name_constraints,
+ ExtensionOID.POLICY_CONSTRAINTS: _decode_policy_constraints,
+}
+_EXTENSION_HANDLERS_SCT = {
+ ExtensionOID.PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS: (
+ _decode_precert_signed_certificate_timestamps
+ )
+}
+
+_REVOKED_EXTENSION_HANDLERS = {
+ CRLEntryExtensionOID.CRL_REASON: _decode_crl_reason,
+ CRLEntryExtensionOID.INVALIDITY_DATE: _decode_invalidity_date,
+ CRLEntryExtensionOID.CERTIFICATE_ISSUER: _decode_cert_issuer,
+}
+
+_CRL_EXTENSION_HANDLERS = {
+ ExtensionOID.CRL_NUMBER: _decode_crl_number,
+ ExtensionOID.DELTA_CRL_INDICATOR: _decode_delta_crl_indicator,
+ ExtensionOID.AUTHORITY_KEY_IDENTIFIER: _decode_authority_key_identifier,
+ ExtensionOID.ISSUER_ALTERNATIVE_NAME: _decode_issuer_alt_name,
+ ExtensionOID.AUTHORITY_INFORMATION_ACCESS: (
+ _decode_authority_information_access
+ ),
+ ExtensionOID.ISSUING_DISTRIBUTION_POINT: _decode_issuing_dist_point,
+ ExtensionOID.FRESHEST_CRL: _decode_freshest_crl,
+}
+
+_OCSP_REQ_EXTENSION_HANDLERS = {
+ OCSPExtensionOID.NONCE: _decode_nonce,
+}
+
+_OCSP_BASICRESP_EXTENSION_HANDLERS = {
+ OCSPExtensionOID.NONCE: _decode_nonce,
+}
+
+_OCSP_SINGLERESP_EXTENSION_HANDLERS_SCT = {
+ ExtensionOID.SIGNED_CERTIFICATE_TIMESTAMPS: (
+ _decode_signed_certificate_timestamps
+ )
+}
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/dh.py b/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/dh.py
new file mode 100644
index 0000000000..2862676c65
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/dh.py
@@ -0,0 +1,271 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+from cryptography import utils
+from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
+from cryptography.hazmat.primitives import serialization
+from cryptography.hazmat.primitives.asymmetric import dh
+
+
+def _dh_params_dup(dh_cdata, backend):
+ lib = backend._lib
+ ffi = backend._ffi
+
+ param_cdata = lib.DHparams_dup(dh_cdata)
+ backend.openssl_assert(param_cdata != ffi.NULL)
+ param_cdata = ffi.gc(param_cdata, lib.DH_free)
+ if lib.CRYPTOGRAPHY_IS_LIBRESSL:
+ # In libressl DHparams_dup don't copy q
+ q = ffi.new("BIGNUM **")
+ lib.DH_get0_pqg(dh_cdata, ffi.NULL, q, ffi.NULL)
+ q_dup = lib.BN_dup(q[0])
+ res = lib.DH_set0_pqg(param_cdata, ffi.NULL, q_dup, ffi.NULL)
+ backend.openssl_assert(res == 1)
+
+ return param_cdata
+
+
+def _dh_cdata_to_parameters(dh_cdata, backend):
+ param_cdata = _dh_params_dup(dh_cdata, backend)
+ return _DHParameters(backend, param_cdata)
+
+
+@utils.register_interface(dh.DHParametersWithSerialization)
+class _DHParameters(object):
+ def __init__(self, backend, dh_cdata):
+ self._backend = backend
+ self._dh_cdata = dh_cdata
+
+ def parameter_numbers(self):
+ p = self._backend._ffi.new("BIGNUM **")
+ g = self._backend._ffi.new("BIGNUM **")
+ q = self._backend._ffi.new("BIGNUM **")
+ self._backend._lib.DH_get0_pqg(self._dh_cdata, p, q, g)
+ self._backend.openssl_assert(p[0] != self._backend._ffi.NULL)
+ self._backend.openssl_assert(g[0] != self._backend._ffi.NULL)
+ if q[0] == self._backend._ffi.NULL:
+ q_val = None
+ else:
+ q_val = self._backend._bn_to_int(q[0])
+ return dh.DHParameterNumbers(
+ p=self._backend._bn_to_int(p[0]),
+ g=self._backend._bn_to_int(g[0]),
+ q=q_val,
+ )
+
+ def generate_private_key(self):
+ return self._backend.generate_dh_private_key(self)
+
+ def parameter_bytes(self, encoding, format):
+ if format is not serialization.ParameterFormat.PKCS3:
+ raise ValueError("Only PKCS3 serialization is supported")
+ if not self._backend._lib.Cryptography_HAS_EVP_PKEY_DHX:
+ q = self._backend._ffi.new("BIGNUM **")
+ self._backend._lib.DH_get0_pqg(
+ self._dh_cdata,
+ self._backend._ffi.NULL,
+ q,
+ self._backend._ffi.NULL,
+ )
+ if q[0] != self._backend._ffi.NULL:
+ raise UnsupportedAlgorithm(
+ "DH X9.42 serialization is not supported",
+ _Reasons.UNSUPPORTED_SERIALIZATION,
+ )
+
+ return self._backend._parameter_bytes(encoding, format, self._dh_cdata)
+
+
+def _get_dh_num_bits(backend, dh_cdata):
+ p = backend._ffi.new("BIGNUM **")
+ backend._lib.DH_get0_pqg(dh_cdata, p, backend._ffi.NULL, backend._ffi.NULL)
+ backend.openssl_assert(p[0] != backend._ffi.NULL)
+ return backend._lib.BN_num_bits(p[0])
+
+
+@utils.register_interface(dh.DHPrivateKeyWithSerialization)
+class _DHPrivateKey(object):
+ def __init__(self, backend, dh_cdata, evp_pkey):
+ self._backend = backend
+ self._dh_cdata = dh_cdata
+ self._evp_pkey = evp_pkey
+ self._key_size_bytes = self._backend._lib.DH_size(dh_cdata)
+
+ @property
+ def key_size(self):
+ return _get_dh_num_bits(self._backend, self._dh_cdata)
+
+ def private_numbers(self):
+ p = self._backend._ffi.new("BIGNUM **")
+ g = self._backend._ffi.new("BIGNUM **")
+ q = self._backend._ffi.new("BIGNUM **")
+ self._backend._lib.DH_get0_pqg(self._dh_cdata, p, q, g)
+ self._backend.openssl_assert(p[0] != self._backend._ffi.NULL)
+ self._backend.openssl_assert(g[0] != self._backend._ffi.NULL)
+ if q[0] == self._backend._ffi.NULL:
+ q_val = None
+ else:
+ q_val = self._backend._bn_to_int(q[0])
+ pub_key = self._backend._ffi.new("BIGNUM **")
+ priv_key = self._backend._ffi.new("BIGNUM **")
+ self._backend._lib.DH_get0_key(self._dh_cdata, pub_key, priv_key)
+ self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL)
+ self._backend.openssl_assert(priv_key[0] != self._backend._ffi.NULL)
+ return dh.DHPrivateNumbers(
+ public_numbers=dh.DHPublicNumbers(
+ parameter_numbers=dh.DHParameterNumbers(
+ p=self._backend._bn_to_int(p[0]),
+ g=self._backend._bn_to_int(g[0]),
+ q=q_val,
+ ),
+ y=self._backend._bn_to_int(pub_key[0]),
+ ),
+ x=self._backend._bn_to_int(priv_key[0]),
+ )
+
+ def exchange(self, peer_public_key):
+
+ buf = self._backend._ffi.new("unsigned char[]", self._key_size_bytes)
+ pub_key = self._backend._ffi.new("BIGNUM **")
+ self._backend._lib.DH_get0_key(
+ peer_public_key._dh_cdata, pub_key, self._backend._ffi.NULL
+ )
+ self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL)
+ res = self._backend._lib.DH_compute_key(
+ buf, pub_key[0], self._dh_cdata
+ )
+
+ if res == -1:
+ errors_with_text = self._backend._consume_errors_with_text()
+ raise ValueError(
+ "Error computing shared key. Public key is likely invalid "
+ "for this exchange.",
+ errors_with_text,
+ )
+ else:
+ self._backend.openssl_assert(res >= 1)
+
+ key = self._backend._ffi.buffer(buf)[:res]
+ pad = self._key_size_bytes - len(key)
+
+ if pad > 0:
+ key = (b"\x00" * pad) + key
+
+ return key
+
+ def public_key(self):
+ dh_cdata = _dh_params_dup(self._dh_cdata, self._backend)
+ pub_key = self._backend._ffi.new("BIGNUM **")
+ self._backend._lib.DH_get0_key(
+ self._dh_cdata, pub_key, self._backend._ffi.NULL
+ )
+ self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL)
+ pub_key_dup = self._backend._lib.BN_dup(pub_key[0])
+ self._backend.openssl_assert(pub_key_dup != self._backend._ffi.NULL)
+
+ res = self._backend._lib.DH_set0_key(
+ dh_cdata, pub_key_dup, self._backend._ffi.NULL
+ )
+ self._backend.openssl_assert(res == 1)
+ evp_pkey = self._backend._dh_cdata_to_evp_pkey(dh_cdata)
+ return _DHPublicKey(self._backend, dh_cdata, evp_pkey)
+
+ def parameters(self):
+ return _dh_cdata_to_parameters(self._dh_cdata, self._backend)
+
+ def private_bytes(self, encoding, format, encryption_algorithm):
+ if format is not serialization.PrivateFormat.PKCS8:
+ raise ValueError(
+ "DH private keys support only PKCS8 serialization"
+ )
+ if not self._backend._lib.Cryptography_HAS_EVP_PKEY_DHX:
+ q = self._backend._ffi.new("BIGNUM **")
+ self._backend._lib.DH_get0_pqg(
+ self._dh_cdata,
+ self._backend._ffi.NULL,
+ q,
+ self._backend._ffi.NULL,
+ )
+ if q[0] != self._backend._ffi.NULL:
+ raise UnsupportedAlgorithm(
+ "DH X9.42 serialization is not supported",
+ _Reasons.UNSUPPORTED_SERIALIZATION,
+ )
+
+ return self._backend._private_key_bytes(
+ encoding,
+ format,
+ encryption_algorithm,
+ self,
+ self._evp_pkey,
+ self._dh_cdata,
+ )
+
+
+@utils.register_interface(dh.DHPublicKeyWithSerialization)
+class _DHPublicKey(object):
+ def __init__(self, backend, dh_cdata, evp_pkey):
+ self._backend = backend
+ self._dh_cdata = dh_cdata
+ self._evp_pkey = evp_pkey
+ self._key_size_bits = _get_dh_num_bits(self._backend, self._dh_cdata)
+
+ @property
+ def key_size(self):
+ return self._key_size_bits
+
+ def public_numbers(self):
+ p = self._backend._ffi.new("BIGNUM **")
+ g = self._backend._ffi.new("BIGNUM **")
+ q = self._backend._ffi.new("BIGNUM **")
+ self._backend._lib.DH_get0_pqg(self._dh_cdata, p, q, g)
+ self._backend.openssl_assert(p[0] != self._backend._ffi.NULL)
+ self._backend.openssl_assert(g[0] != self._backend._ffi.NULL)
+ if q[0] == self._backend._ffi.NULL:
+ q_val = None
+ else:
+ q_val = self._backend._bn_to_int(q[0])
+ pub_key = self._backend._ffi.new("BIGNUM **")
+ self._backend._lib.DH_get0_key(
+ self._dh_cdata, pub_key, self._backend._ffi.NULL
+ )
+ self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL)
+ return dh.DHPublicNumbers(
+ parameter_numbers=dh.DHParameterNumbers(
+ p=self._backend._bn_to_int(p[0]),
+ g=self._backend._bn_to_int(g[0]),
+ q=q_val,
+ ),
+ y=self._backend._bn_to_int(pub_key[0]),
+ )
+
+ def parameters(self):
+ return _dh_cdata_to_parameters(self._dh_cdata, self._backend)
+
+ def public_bytes(self, encoding, format):
+ if format is not serialization.PublicFormat.SubjectPublicKeyInfo:
+ raise ValueError(
+ "DH public keys support only "
+ "SubjectPublicKeyInfo serialization"
+ )
+
+ if not self._backend._lib.Cryptography_HAS_EVP_PKEY_DHX:
+ q = self._backend._ffi.new("BIGNUM **")
+ self._backend._lib.DH_get0_pqg(
+ self._dh_cdata,
+ self._backend._ffi.NULL,
+ q,
+ self._backend._ffi.NULL,
+ )
+ if q[0] != self._backend._ffi.NULL:
+ raise UnsupportedAlgorithm(
+ "DH X9.42 serialization is not supported",
+ _Reasons.UNSUPPORTED_SERIALIZATION,
+ )
+
+ return self._backend._public_key_bytes(
+ encoding, format, self, self._evp_pkey, None
+ )
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/dsa.py b/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/dsa.py
new file mode 100644
index 0000000000..0c5faba18a
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/dsa.py
@@ -0,0 +1,263 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+from cryptography import utils
+from cryptography.exceptions import InvalidSignature
+from cryptography.hazmat.backends.openssl.utils import (
+ _calculate_digest_and_algorithm,
+ _check_not_prehashed,
+ _warn_sign_verify_deprecated,
+)
+from cryptography.hazmat.primitives import hashes
+from cryptography.hazmat.primitives.asymmetric import (
+ AsymmetricSignatureContext,
+ AsymmetricVerificationContext,
+ dsa,
+)
+
+
+def _dsa_sig_sign(backend, private_key, data):
+ sig_buf_len = backend._lib.DSA_size(private_key._dsa_cdata)
+ sig_buf = backend._ffi.new("unsigned char[]", sig_buf_len)
+ buflen = backend._ffi.new("unsigned int *")
+
+ # The first parameter passed to DSA_sign is unused by OpenSSL but
+ # must be an integer.
+ res = backend._lib.DSA_sign(
+ 0, data, len(data), sig_buf, buflen, private_key._dsa_cdata
+ )
+ backend.openssl_assert(res == 1)
+ backend.openssl_assert(buflen[0])
+
+ return backend._ffi.buffer(sig_buf)[: buflen[0]]
+
+
+def _dsa_sig_verify(backend, public_key, signature, data):
+ # The first parameter passed to DSA_verify is unused by OpenSSL but
+ # must be an integer.
+ res = backend._lib.DSA_verify(
+ 0, data, len(data), signature, len(signature), public_key._dsa_cdata
+ )
+
+ if res != 1:
+ backend._consume_errors()
+ raise InvalidSignature
+
+
+@utils.register_interface(AsymmetricVerificationContext)
+class _DSAVerificationContext(object):
+ def __init__(self, backend, public_key, signature, algorithm):
+ self._backend = backend
+ self._public_key = public_key
+ self._signature = signature
+ self._algorithm = algorithm
+
+ self._hash_ctx = hashes.Hash(self._algorithm, self._backend)
+
+ def update(self, data):
+ self._hash_ctx.update(data)
+
+ def verify(self):
+ data_to_verify = self._hash_ctx.finalize()
+
+ _dsa_sig_verify(
+ self._backend, self._public_key, self._signature, data_to_verify
+ )
+
+
+@utils.register_interface(AsymmetricSignatureContext)
+class _DSASignatureContext(object):
+ def __init__(self, backend, private_key, algorithm):
+ self._backend = backend
+ self._private_key = private_key
+ self._algorithm = algorithm
+ self._hash_ctx = hashes.Hash(self._algorithm, self._backend)
+
+ def update(self, data):
+ self._hash_ctx.update(data)
+
+ def finalize(self):
+ data_to_sign = self._hash_ctx.finalize()
+ return _dsa_sig_sign(self._backend, self._private_key, data_to_sign)
+
+
+@utils.register_interface(dsa.DSAParametersWithNumbers)
+class _DSAParameters(object):
+ def __init__(self, backend, dsa_cdata):
+ self._backend = backend
+ self._dsa_cdata = dsa_cdata
+
+ def parameter_numbers(self):
+ p = self._backend._ffi.new("BIGNUM **")
+ q = self._backend._ffi.new("BIGNUM **")
+ g = self._backend._ffi.new("BIGNUM **")
+ self._backend._lib.DSA_get0_pqg(self._dsa_cdata, p, q, g)
+ self._backend.openssl_assert(p[0] != self._backend._ffi.NULL)
+ self._backend.openssl_assert(q[0] != self._backend._ffi.NULL)
+ self._backend.openssl_assert(g[0] != self._backend._ffi.NULL)
+ return dsa.DSAParameterNumbers(
+ p=self._backend._bn_to_int(p[0]),
+ q=self._backend._bn_to_int(q[0]),
+ g=self._backend._bn_to_int(g[0]),
+ )
+
+ def generate_private_key(self):
+ return self._backend.generate_dsa_private_key(self)
+
+
+@utils.register_interface(dsa.DSAPrivateKeyWithSerialization)
+class _DSAPrivateKey(object):
+ def __init__(self, backend, dsa_cdata, evp_pkey):
+ self._backend = backend
+ self._dsa_cdata = dsa_cdata
+ self._evp_pkey = evp_pkey
+
+ p = self._backend._ffi.new("BIGNUM **")
+ self._backend._lib.DSA_get0_pqg(
+ dsa_cdata, p, self._backend._ffi.NULL, self._backend._ffi.NULL
+ )
+ self._backend.openssl_assert(p[0] != backend._ffi.NULL)
+ self._key_size = self._backend._lib.BN_num_bits(p[0])
+
+ key_size = utils.read_only_property("_key_size")
+
+ def signer(self, signature_algorithm):
+ _warn_sign_verify_deprecated()
+ _check_not_prehashed(signature_algorithm)
+ return _DSASignatureContext(self._backend, self, signature_algorithm)
+
+ def private_numbers(self):
+ p = self._backend._ffi.new("BIGNUM **")
+ q = self._backend._ffi.new("BIGNUM **")
+ g = self._backend._ffi.new("BIGNUM **")
+ pub_key = self._backend._ffi.new("BIGNUM **")
+ priv_key = self._backend._ffi.new("BIGNUM **")
+ self._backend._lib.DSA_get0_pqg(self._dsa_cdata, p, q, g)
+ self._backend.openssl_assert(p[0] != self._backend._ffi.NULL)
+ self._backend.openssl_assert(q[0] != self._backend._ffi.NULL)
+ self._backend.openssl_assert(g[0] != self._backend._ffi.NULL)
+ self._backend._lib.DSA_get0_key(self._dsa_cdata, pub_key, priv_key)
+ self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL)
+ self._backend.openssl_assert(priv_key[0] != self._backend._ffi.NULL)
+ return dsa.DSAPrivateNumbers(
+ public_numbers=dsa.DSAPublicNumbers(
+ parameter_numbers=dsa.DSAParameterNumbers(
+ p=self._backend._bn_to_int(p[0]),
+ q=self._backend._bn_to_int(q[0]),
+ g=self._backend._bn_to_int(g[0]),
+ ),
+ y=self._backend._bn_to_int(pub_key[0]),
+ ),
+ x=self._backend._bn_to_int(priv_key[0]),
+ )
+
+ def public_key(self):
+ dsa_cdata = self._backend._lib.DSAparams_dup(self._dsa_cdata)
+ self._backend.openssl_assert(dsa_cdata != self._backend._ffi.NULL)
+ dsa_cdata = self._backend._ffi.gc(
+ dsa_cdata, self._backend._lib.DSA_free
+ )
+ pub_key = self._backend._ffi.new("BIGNUM **")
+ self._backend._lib.DSA_get0_key(
+ self._dsa_cdata, pub_key, self._backend._ffi.NULL
+ )
+ self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL)
+ pub_key_dup = self._backend._lib.BN_dup(pub_key[0])
+ res = self._backend._lib.DSA_set0_key(
+ dsa_cdata, pub_key_dup, self._backend._ffi.NULL
+ )
+ self._backend.openssl_assert(res == 1)
+ evp_pkey = self._backend._dsa_cdata_to_evp_pkey(dsa_cdata)
+ return _DSAPublicKey(self._backend, dsa_cdata, evp_pkey)
+
+ def parameters(self):
+ dsa_cdata = self._backend._lib.DSAparams_dup(self._dsa_cdata)
+ self._backend.openssl_assert(dsa_cdata != self._backend._ffi.NULL)
+ dsa_cdata = self._backend._ffi.gc(
+ dsa_cdata, self._backend._lib.DSA_free
+ )
+ return _DSAParameters(self._backend, dsa_cdata)
+
+ def private_bytes(self, encoding, format, encryption_algorithm):
+ return self._backend._private_key_bytes(
+ encoding,
+ format,
+ encryption_algorithm,
+ self,
+ self._evp_pkey,
+ self._dsa_cdata,
+ )
+
+ def sign(self, data, algorithm):
+ data, algorithm = _calculate_digest_and_algorithm(
+ self._backend, data, algorithm
+ )
+ return _dsa_sig_sign(self._backend, self, data)
+
+
+@utils.register_interface(dsa.DSAPublicKeyWithSerialization)
+class _DSAPublicKey(object):
+ def __init__(self, backend, dsa_cdata, evp_pkey):
+ self._backend = backend
+ self._dsa_cdata = dsa_cdata
+ self._evp_pkey = evp_pkey
+ p = self._backend._ffi.new("BIGNUM **")
+ self._backend._lib.DSA_get0_pqg(
+ dsa_cdata, p, self._backend._ffi.NULL, self._backend._ffi.NULL
+ )
+ self._backend.openssl_assert(p[0] != backend._ffi.NULL)
+ self._key_size = self._backend._lib.BN_num_bits(p[0])
+
+ key_size = utils.read_only_property("_key_size")
+
+ def verifier(self, signature, signature_algorithm):
+ _warn_sign_verify_deprecated()
+ utils._check_bytes("signature", signature)
+
+ _check_not_prehashed(signature_algorithm)
+ return _DSAVerificationContext(
+ self._backend, self, signature, signature_algorithm
+ )
+
+ def public_numbers(self):
+ p = self._backend._ffi.new("BIGNUM **")
+ q = self._backend._ffi.new("BIGNUM **")
+ g = self._backend._ffi.new("BIGNUM **")
+ pub_key = self._backend._ffi.new("BIGNUM **")
+ self._backend._lib.DSA_get0_pqg(self._dsa_cdata, p, q, g)
+ self._backend.openssl_assert(p[0] != self._backend._ffi.NULL)
+ self._backend.openssl_assert(q[0] != self._backend._ffi.NULL)
+ self._backend.openssl_assert(g[0] != self._backend._ffi.NULL)
+ self._backend._lib.DSA_get0_key(
+ self._dsa_cdata, pub_key, self._backend._ffi.NULL
+ )
+ self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL)
+ return dsa.DSAPublicNumbers(
+ parameter_numbers=dsa.DSAParameterNumbers(
+ p=self._backend._bn_to_int(p[0]),
+ q=self._backend._bn_to_int(q[0]),
+ g=self._backend._bn_to_int(g[0]),
+ ),
+ y=self._backend._bn_to_int(pub_key[0]),
+ )
+
+ def parameters(self):
+ dsa_cdata = self._backend._lib.DSAparams_dup(self._dsa_cdata)
+ dsa_cdata = self._backend._ffi.gc(
+ dsa_cdata, self._backend._lib.DSA_free
+ )
+ return _DSAParameters(self._backend, dsa_cdata)
+
+ def public_bytes(self, encoding, format):
+ return self._backend._public_key_bytes(
+ encoding, format, self, self._evp_pkey, None
+ )
+
+ def verify(self, signature, data, algorithm):
+ data, algorithm = _calculate_digest_and_algorithm(
+ self._backend, data, algorithm
+ )
+ return _dsa_sig_verify(self._backend, self, signature, data)
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/ec.py b/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/ec.py
new file mode 100644
index 0000000000..05d32baba6
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/ec.py
@@ -0,0 +1,337 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+from cryptography import utils
+from cryptography.exceptions import (
+ InvalidSignature,
+ UnsupportedAlgorithm,
+ _Reasons,
+)
+from cryptography.hazmat.backends.openssl.utils import (
+ _calculate_digest_and_algorithm,
+ _check_not_prehashed,
+ _warn_sign_verify_deprecated,
+)
+from cryptography.hazmat.primitives import hashes, serialization
+from cryptography.hazmat.primitives.asymmetric import (
+ AsymmetricSignatureContext,
+ AsymmetricVerificationContext,
+ ec,
+)
+
+
+def _check_signature_algorithm(signature_algorithm):
+ if not isinstance(signature_algorithm, ec.ECDSA):
+ raise UnsupportedAlgorithm(
+ "Unsupported elliptic curve signature algorithm.",
+ _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM,
+ )
+
+
+def _ec_key_curve_sn(backend, ec_key):
+ group = backend._lib.EC_KEY_get0_group(ec_key)
+ backend.openssl_assert(group != backend._ffi.NULL)
+
+ nid = backend._lib.EC_GROUP_get_curve_name(group)
+ # The following check is to find EC keys with unnamed curves and raise
+ # an error for now.
+ if nid == backend._lib.NID_undef:
+ raise NotImplementedError(
+ "ECDSA keys with unnamed curves are unsupported at this time"
+ )
+
+ # This is like the above check, but it also catches the case where you
+ # explicitly encoded a curve with the same parameters as a named curve.
+ # Don't do that.
+ if (
+ not backend._lib.CRYPTOGRAPHY_IS_LIBRESSL
+ and backend._lib.EC_GROUP_get_asn1_flag(group) == 0
+ ):
+ raise NotImplementedError(
+ "ECDSA keys with unnamed curves are unsupported at this time"
+ )
+
+ curve_name = backend._lib.OBJ_nid2sn(nid)
+ backend.openssl_assert(curve_name != backend._ffi.NULL)
+
+ sn = backend._ffi.string(curve_name).decode("ascii")
+ return sn
+
+
+def _mark_asn1_named_ec_curve(backend, ec_cdata):
+ """
+ Set the named curve flag on the EC_KEY. This causes OpenSSL to
+ serialize EC keys along with their curve OID which makes
+ deserialization easier.
+ """
+
+ backend._lib.EC_KEY_set_asn1_flag(
+ ec_cdata, backend._lib.OPENSSL_EC_NAMED_CURVE
+ )
+
+
+def _sn_to_elliptic_curve(backend, sn):
+ try:
+ return ec._CURVE_TYPES[sn]()
+ except KeyError:
+ raise UnsupportedAlgorithm(
+ "{} is not a supported elliptic curve".format(sn),
+ _Reasons.UNSUPPORTED_ELLIPTIC_CURVE,
+ )
+
+
+def _ecdsa_sig_sign(backend, private_key, data):
+ max_size = backend._lib.ECDSA_size(private_key._ec_key)
+ backend.openssl_assert(max_size > 0)
+
+ sigbuf = backend._ffi.new("unsigned char[]", max_size)
+ siglen_ptr = backend._ffi.new("unsigned int[]", 1)
+ res = backend._lib.ECDSA_sign(
+ 0, data, len(data), sigbuf, siglen_ptr, private_key._ec_key
+ )
+ backend.openssl_assert(res == 1)
+ return backend._ffi.buffer(sigbuf)[: siglen_ptr[0]]
+
+
+def _ecdsa_sig_verify(backend, public_key, signature, data):
+ res = backend._lib.ECDSA_verify(
+ 0, data, len(data), signature, len(signature), public_key._ec_key
+ )
+ if res != 1:
+ backend._consume_errors()
+ raise InvalidSignature
+
+
+@utils.register_interface(AsymmetricSignatureContext)
+class _ECDSASignatureContext(object):
+ def __init__(self, backend, private_key, algorithm):
+ self._backend = backend
+ self._private_key = private_key
+ self._digest = hashes.Hash(algorithm, backend)
+
+ def update(self, data):
+ self._digest.update(data)
+
+ def finalize(self):
+ digest = self._digest.finalize()
+
+ return _ecdsa_sig_sign(self._backend, self._private_key, digest)
+
+
+@utils.register_interface(AsymmetricVerificationContext)
+class _ECDSAVerificationContext(object):
+ def __init__(self, backend, public_key, signature, algorithm):
+ self._backend = backend
+ self._public_key = public_key
+ self._signature = signature
+ self._digest = hashes.Hash(algorithm, backend)
+
+ def update(self, data):
+ self._digest.update(data)
+
+ def verify(self):
+ digest = self._digest.finalize()
+ _ecdsa_sig_verify(
+ self._backend, self._public_key, self._signature, digest
+ )
+
+
+@utils.register_interface(ec.EllipticCurvePrivateKeyWithSerialization)
+class _EllipticCurvePrivateKey(object):
+ def __init__(self, backend, ec_key_cdata, evp_pkey):
+ self._backend = backend
+ self._ec_key = ec_key_cdata
+ self._evp_pkey = evp_pkey
+
+ sn = _ec_key_curve_sn(backend, ec_key_cdata)
+ self._curve = _sn_to_elliptic_curve(backend, sn)
+ _mark_asn1_named_ec_curve(backend, ec_key_cdata)
+
+ curve = utils.read_only_property("_curve")
+
+ @property
+ def key_size(self):
+ return self.curve.key_size
+
+ def signer(self, signature_algorithm):
+ _warn_sign_verify_deprecated()
+ _check_signature_algorithm(signature_algorithm)
+ _check_not_prehashed(signature_algorithm.algorithm)
+ return _ECDSASignatureContext(
+ self._backend, self, signature_algorithm.algorithm
+ )
+
+ def exchange(self, algorithm, peer_public_key):
+ if not (
+ self._backend.elliptic_curve_exchange_algorithm_supported(
+ algorithm, self.curve
+ )
+ ):
+ raise UnsupportedAlgorithm(
+ "This backend does not support the ECDH algorithm.",
+ _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM,
+ )
+
+ if peer_public_key.curve.name != self.curve.name:
+ raise ValueError(
+ "peer_public_key and self are not on the same curve"
+ )
+
+ group = self._backend._lib.EC_KEY_get0_group(self._ec_key)
+ z_len = (self._backend._lib.EC_GROUP_get_degree(group) + 7) // 8
+ self._backend.openssl_assert(z_len > 0)
+ z_buf = self._backend._ffi.new("uint8_t[]", z_len)
+ peer_key = self._backend._lib.EC_KEY_get0_public_key(
+ peer_public_key._ec_key
+ )
+
+ r = self._backend._lib.ECDH_compute_key(
+ z_buf, z_len, peer_key, self._ec_key, self._backend._ffi.NULL
+ )
+ self._backend.openssl_assert(r > 0)
+ return self._backend._ffi.buffer(z_buf)[:z_len]
+
+ def public_key(self):
+ group = self._backend._lib.EC_KEY_get0_group(self._ec_key)
+ self._backend.openssl_assert(group != self._backend._ffi.NULL)
+
+ curve_nid = self._backend._lib.EC_GROUP_get_curve_name(group)
+ public_ec_key = self._backend._ec_key_new_by_curve_nid(curve_nid)
+
+ point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key)
+ self._backend.openssl_assert(point != self._backend._ffi.NULL)
+
+ res = self._backend._lib.EC_KEY_set_public_key(public_ec_key, point)
+ self._backend.openssl_assert(res == 1)
+
+ evp_pkey = self._backend._ec_cdata_to_evp_pkey(public_ec_key)
+
+ return _EllipticCurvePublicKey(self._backend, public_ec_key, evp_pkey)
+
+ def private_numbers(self):
+ bn = self._backend._lib.EC_KEY_get0_private_key(self._ec_key)
+ private_value = self._backend._bn_to_int(bn)
+ return ec.EllipticCurvePrivateNumbers(
+ private_value=private_value,
+ public_numbers=self.public_key().public_numbers(),
+ )
+
+ def private_bytes(self, encoding, format, encryption_algorithm):
+ return self._backend._private_key_bytes(
+ encoding,
+ format,
+ encryption_algorithm,
+ self,
+ self._evp_pkey,
+ self._ec_key,
+ )
+
+ def sign(self, data, signature_algorithm):
+ _check_signature_algorithm(signature_algorithm)
+ data, algorithm = _calculate_digest_and_algorithm(
+ self._backend, data, signature_algorithm._algorithm
+ )
+ return _ecdsa_sig_sign(self._backend, self, data)
+
+
+@utils.register_interface(ec.EllipticCurvePublicKeyWithSerialization)
+class _EllipticCurvePublicKey(object):
+ def __init__(self, backend, ec_key_cdata, evp_pkey):
+ self._backend = backend
+ self._ec_key = ec_key_cdata
+ self._evp_pkey = evp_pkey
+
+ sn = _ec_key_curve_sn(backend, ec_key_cdata)
+ self._curve = _sn_to_elliptic_curve(backend, sn)
+ _mark_asn1_named_ec_curve(backend, ec_key_cdata)
+
+ curve = utils.read_only_property("_curve")
+
+ @property
+ def key_size(self):
+ return self.curve.key_size
+
+ def verifier(self, signature, signature_algorithm):
+ _warn_sign_verify_deprecated()
+ utils._check_bytes("signature", signature)
+
+ _check_signature_algorithm(signature_algorithm)
+ _check_not_prehashed(signature_algorithm.algorithm)
+ return _ECDSAVerificationContext(
+ self._backend, self, signature, signature_algorithm.algorithm
+ )
+
+ def public_numbers(self):
+ get_func, group = self._backend._ec_key_determine_group_get_func(
+ self._ec_key
+ )
+ point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key)
+ self._backend.openssl_assert(point != self._backend._ffi.NULL)
+
+ with self._backend._tmp_bn_ctx() as bn_ctx:
+ bn_x = self._backend._lib.BN_CTX_get(bn_ctx)
+ bn_y = self._backend._lib.BN_CTX_get(bn_ctx)
+
+ res = get_func(group, point, bn_x, bn_y, bn_ctx)
+ self._backend.openssl_assert(res == 1)
+
+ x = self._backend._bn_to_int(bn_x)
+ y = self._backend._bn_to_int(bn_y)
+
+ return ec.EllipticCurvePublicNumbers(x=x, y=y, curve=self._curve)
+
+ def _encode_point(self, format):
+ if format is serialization.PublicFormat.CompressedPoint:
+ conversion = self._backend._lib.POINT_CONVERSION_COMPRESSED
+ else:
+ assert format is serialization.PublicFormat.UncompressedPoint
+ conversion = self._backend._lib.POINT_CONVERSION_UNCOMPRESSED
+
+ group = self._backend._lib.EC_KEY_get0_group(self._ec_key)
+ self._backend.openssl_assert(group != self._backend._ffi.NULL)
+ point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key)
+ self._backend.openssl_assert(point != self._backend._ffi.NULL)
+ with self._backend._tmp_bn_ctx() as bn_ctx:
+ buflen = self._backend._lib.EC_POINT_point2oct(
+ group, point, conversion, self._backend._ffi.NULL, 0, bn_ctx
+ )
+ self._backend.openssl_assert(buflen > 0)
+ buf = self._backend._ffi.new("char[]", buflen)
+ res = self._backend._lib.EC_POINT_point2oct(
+ group, point, conversion, buf, buflen, bn_ctx
+ )
+ self._backend.openssl_assert(buflen == res)
+
+ return self._backend._ffi.buffer(buf)[:]
+
+ def public_bytes(self, encoding, format):
+
+ if (
+ encoding is serialization.Encoding.X962
+ or format is serialization.PublicFormat.CompressedPoint
+ or format is serialization.PublicFormat.UncompressedPoint
+ ):
+ if encoding is not serialization.Encoding.X962 or format not in (
+ serialization.PublicFormat.CompressedPoint,
+ serialization.PublicFormat.UncompressedPoint,
+ ):
+ raise ValueError(
+ "X962 encoding must be used with CompressedPoint or "
+ "UncompressedPoint format"
+ )
+
+ return self._encode_point(format)
+ else:
+ return self._backend._public_key_bytes(
+ encoding, format, self, self._evp_pkey, None
+ )
+
+ def verify(self, signature, data, signature_algorithm):
+ _check_signature_algorithm(signature_algorithm)
+ data, algorithm = _calculate_digest_and_algorithm(
+ self._backend, data, signature_algorithm._algorithm
+ )
+ _ecdsa_sig_verify(self._backend, self, signature, data)
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/ed25519.py b/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/ed25519.py
new file mode 100644
index 0000000000..13bec3af10
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/ed25519.py
@@ -0,0 +1,145 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+from cryptography import exceptions, utils
+from cryptography.hazmat.primitives import serialization
+from cryptography.hazmat.primitives.asymmetric.ed25519 import (
+ Ed25519PrivateKey,
+ Ed25519PublicKey,
+ _ED25519_KEY_SIZE,
+ _ED25519_SIG_SIZE,
+)
+
+
+@utils.register_interface(Ed25519PublicKey)
+class _Ed25519PublicKey(object):
+ def __init__(self, backend, evp_pkey):
+ self._backend = backend
+ self._evp_pkey = evp_pkey
+
+ def public_bytes(self, encoding, format):
+ if (
+ encoding is serialization.Encoding.Raw
+ or format is serialization.PublicFormat.Raw
+ ):
+ if (
+ encoding is not serialization.Encoding.Raw
+ or format is not serialization.PublicFormat.Raw
+ ):
+ raise ValueError(
+ "When using Raw both encoding and format must be Raw"
+ )
+
+ return self._raw_public_bytes()
+
+ return self._backend._public_key_bytes(
+ encoding, format, self, self._evp_pkey, None
+ )
+
+ def _raw_public_bytes(self):
+ buf = self._backend._ffi.new("unsigned char []", _ED25519_KEY_SIZE)
+ buflen = self._backend._ffi.new("size_t *", _ED25519_KEY_SIZE)
+ res = self._backend._lib.EVP_PKEY_get_raw_public_key(
+ self._evp_pkey, buf, buflen
+ )
+ self._backend.openssl_assert(res == 1)
+ self._backend.openssl_assert(buflen[0] == _ED25519_KEY_SIZE)
+ return self._backend._ffi.buffer(buf, _ED25519_KEY_SIZE)[:]
+
+ def verify(self, signature, data):
+ evp_md_ctx = self._backend._lib.EVP_MD_CTX_new()
+ self._backend.openssl_assert(evp_md_ctx != self._backend._ffi.NULL)
+ evp_md_ctx = self._backend._ffi.gc(
+ evp_md_ctx, self._backend._lib.EVP_MD_CTX_free
+ )
+ res = self._backend._lib.EVP_DigestVerifyInit(
+ evp_md_ctx,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ self._evp_pkey,
+ )
+ self._backend.openssl_assert(res == 1)
+ res = self._backend._lib.EVP_DigestVerify(
+ evp_md_ctx, signature, len(signature), data, len(data)
+ )
+ if res != 1:
+ self._backend._consume_errors()
+ raise exceptions.InvalidSignature
+
+
+@utils.register_interface(Ed25519PrivateKey)
+class _Ed25519PrivateKey(object):
+ def __init__(self, backend, evp_pkey):
+ self._backend = backend
+ self._evp_pkey = evp_pkey
+
+ def public_key(self):
+ buf = self._backend._ffi.new("unsigned char []", _ED25519_KEY_SIZE)
+ buflen = self._backend._ffi.new("size_t *", _ED25519_KEY_SIZE)
+ res = self._backend._lib.EVP_PKEY_get_raw_public_key(
+ self._evp_pkey, buf, buflen
+ )
+ self._backend.openssl_assert(res == 1)
+ self._backend.openssl_assert(buflen[0] == _ED25519_KEY_SIZE)
+ public_bytes = self._backend._ffi.buffer(buf)[:]
+ return self._backend.ed25519_load_public_bytes(public_bytes)
+
+ def sign(self, data):
+ evp_md_ctx = self._backend._lib.EVP_MD_CTX_new()
+ self._backend.openssl_assert(evp_md_ctx != self._backend._ffi.NULL)
+ evp_md_ctx = self._backend._ffi.gc(
+ evp_md_ctx, self._backend._lib.EVP_MD_CTX_free
+ )
+ res = self._backend._lib.EVP_DigestSignInit(
+ evp_md_ctx,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ self._evp_pkey,
+ )
+ self._backend.openssl_assert(res == 1)
+ buf = self._backend._ffi.new("unsigned char[]", _ED25519_SIG_SIZE)
+ buflen = self._backend._ffi.new("size_t *", len(buf))
+ res = self._backend._lib.EVP_DigestSign(
+ evp_md_ctx, buf, buflen, data, len(data)
+ )
+ self._backend.openssl_assert(res == 1)
+ self._backend.openssl_assert(buflen[0] == _ED25519_SIG_SIZE)
+ return self._backend._ffi.buffer(buf, buflen[0])[:]
+
+ def private_bytes(self, encoding, format, encryption_algorithm):
+ if (
+ encoding is serialization.Encoding.Raw
+ or format is serialization.PublicFormat.Raw
+ ):
+ if (
+ format is not serialization.PrivateFormat.Raw
+ or encoding is not serialization.Encoding.Raw
+ or not isinstance(
+ encryption_algorithm, serialization.NoEncryption
+ )
+ ):
+ raise ValueError(
+ "When using Raw both encoding and format must be Raw "
+ "and encryption_algorithm must be NoEncryption()"
+ )
+
+ return self._raw_private_bytes()
+
+ return self._backend._private_key_bytes(
+ encoding, format, encryption_algorithm, self, self._evp_pkey, None
+ )
+
+ def _raw_private_bytes(self):
+ buf = self._backend._ffi.new("unsigned char []", _ED25519_KEY_SIZE)
+ buflen = self._backend._ffi.new("size_t *", _ED25519_KEY_SIZE)
+ res = self._backend._lib.EVP_PKEY_get_raw_private_key(
+ self._evp_pkey, buf, buflen
+ )
+ self._backend.openssl_assert(res == 1)
+ self._backend.openssl_assert(buflen[0] == _ED25519_KEY_SIZE)
+ return self._backend._ffi.buffer(buf, _ED25519_KEY_SIZE)[:]
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/ed448.py b/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/ed448.py
new file mode 100644
index 0000000000..6512770e5b
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/ed448.py
@@ -0,0 +1,146 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+from cryptography import exceptions, utils
+from cryptography.hazmat.primitives import serialization
+from cryptography.hazmat.primitives.asymmetric.ed448 import (
+ Ed448PrivateKey,
+ Ed448PublicKey,
+)
+
+_ED448_KEY_SIZE = 57
+_ED448_SIG_SIZE = 114
+
+
+@utils.register_interface(Ed448PublicKey)
+class _Ed448PublicKey(object):
+ def __init__(self, backend, evp_pkey):
+ self._backend = backend
+ self._evp_pkey = evp_pkey
+
+ def public_bytes(self, encoding, format):
+ if (
+ encoding is serialization.Encoding.Raw
+ or format is serialization.PublicFormat.Raw
+ ):
+ if (
+ encoding is not serialization.Encoding.Raw
+ or format is not serialization.PublicFormat.Raw
+ ):
+ raise ValueError(
+ "When using Raw both encoding and format must be Raw"
+ )
+
+ return self._raw_public_bytes()
+
+ return self._backend._public_key_bytes(
+ encoding, format, self, self._evp_pkey, None
+ )
+
+ def _raw_public_bytes(self):
+ buf = self._backend._ffi.new("unsigned char []", _ED448_KEY_SIZE)
+ buflen = self._backend._ffi.new("size_t *", _ED448_KEY_SIZE)
+ res = self._backend._lib.EVP_PKEY_get_raw_public_key(
+ self._evp_pkey, buf, buflen
+ )
+ self._backend.openssl_assert(res == 1)
+ self._backend.openssl_assert(buflen[0] == _ED448_KEY_SIZE)
+ return self._backend._ffi.buffer(buf, _ED448_KEY_SIZE)[:]
+
+ def verify(self, signature, data):
+ evp_md_ctx = self._backend._lib.EVP_MD_CTX_new()
+ self._backend.openssl_assert(evp_md_ctx != self._backend._ffi.NULL)
+ evp_md_ctx = self._backend._ffi.gc(
+ evp_md_ctx, self._backend._lib.EVP_MD_CTX_free
+ )
+ res = self._backend._lib.EVP_DigestVerifyInit(
+ evp_md_ctx,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ self._evp_pkey,
+ )
+ self._backend.openssl_assert(res == 1)
+ res = self._backend._lib.EVP_DigestVerify(
+ evp_md_ctx, signature, len(signature), data, len(data)
+ )
+ if res != 1:
+ self._backend._consume_errors()
+ raise exceptions.InvalidSignature
+
+
+@utils.register_interface(Ed448PrivateKey)
+class _Ed448PrivateKey(object):
+ def __init__(self, backend, evp_pkey):
+ self._backend = backend
+ self._evp_pkey = evp_pkey
+
+ def public_key(self):
+ buf = self._backend._ffi.new("unsigned char []", _ED448_KEY_SIZE)
+ buflen = self._backend._ffi.new("size_t *", _ED448_KEY_SIZE)
+ res = self._backend._lib.EVP_PKEY_get_raw_public_key(
+ self._evp_pkey, buf, buflen
+ )
+ self._backend.openssl_assert(res == 1)
+ self._backend.openssl_assert(buflen[0] == _ED448_KEY_SIZE)
+ public_bytes = self._backend._ffi.buffer(buf)[:]
+ return self._backend.ed448_load_public_bytes(public_bytes)
+
+ def sign(self, data):
+ evp_md_ctx = self._backend._lib.EVP_MD_CTX_new()
+ self._backend.openssl_assert(evp_md_ctx != self._backend._ffi.NULL)
+ evp_md_ctx = self._backend._ffi.gc(
+ evp_md_ctx, self._backend._lib.EVP_MD_CTX_free
+ )
+ res = self._backend._lib.EVP_DigestSignInit(
+ evp_md_ctx,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ self._evp_pkey,
+ )
+ self._backend.openssl_assert(res == 1)
+ buf = self._backend._ffi.new("unsigned char[]", _ED448_SIG_SIZE)
+ buflen = self._backend._ffi.new("size_t *", len(buf))
+ res = self._backend._lib.EVP_DigestSign(
+ evp_md_ctx, buf, buflen, data, len(data)
+ )
+ self._backend.openssl_assert(res == 1)
+ self._backend.openssl_assert(buflen[0] == _ED448_SIG_SIZE)
+ return self._backend._ffi.buffer(buf, buflen[0])[:]
+
+ def private_bytes(self, encoding, format, encryption_algorithm):
+ if (
+ encoding is serialization.Encoding.Raw
+ or format is serialization.PublicFormat.Raw
+ ):
+ if (
+ format is not serialization.PrivateFormat.Raw
+ or encoding is not serialization.Encoding.Raw
+ or not isinstance(
+ encryption_algorithm, serialization.NoEncryption
+ )
+ ):
+ raise ValueError(
+ "When using Raw both encoding and format must be Raw "
+ "and encryption_algorithm must be NoEncryption()"
+ )
+
+ return self._raw_private_bytes()
+
+ return self._backend._private_key_bytes(
+ encoding, format, encryption_algorithm, self, self._evp_pkey, None
+ )
+
+ def _raw_private_bytes(self):
+ buf = self._backend._ffi.new("unsigned char []", _ED448_KEY_SIZE)
+ buflen = self._backend._ffi.new("size_t *", _ED448_KEY_SIZE)
+ res = self._backend._lib.EVP_PKEY_get_raw_private_key(
+ self._evp_pkey, buf, buflen
+ )
+ self._backend.openssl_assert(res == 1)
+ self._backend.openssl_assert(buflen[0] == _ED448_KEY_SIZE)
+ return self._backend._ffi.buffer(buf, _ED448_KEY_SIZE)[:]
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/encode_asn1.py b/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/encode_asn1.py
new file mode 100644
index 0000000000..0a33200bbc
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/encode_asn1.py
@@ -0,0 +1,670 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import calendar
+import ipaddress
+
+import six
+
+from cryptography import utils, x509
+from cryptography.hazmat.backends.openssl.decode_asn1 import (
+ _CRL_ENTRY_REASON_ENUM_TO_CODE,
+ _DISTPOINT_TYPE_FULLNAME,
+ _DISTPOINT_TYPE_RELATIVENAME,
+)
+from cryptography.x509.name import _ASN1Type
+from cryptography.x509.oid import (
+ CRLEntryExtensionOID,
+ ExtensionOID,
+ OCSPExtensionOID,
+)
+
+
+def _encode_asn1_int(backend, x):
+ """
+ Converts a python integer to an ASN1_INTEGER. The returned ASN1_INTEGER
+ will not be garbage collected (to support adding them to structs that take
+ ownership of the object). Be sure to register it for GC if it will be
+ discarded after use.
+
+ """
+ # Convert Python integer to OpenSSL "bignum" in case value exceeds
+ # machine's native integer limits (note: `int_to_bn` doesn't automatically
+ # GC).
+ i = backend._int_to_bn(x)
+ i = backend._ffi.gc(i, backend._lib.BN_free)
+
+ # Wrap in an ASN.1 integer. Don't GC -- as documented.
+ i = backend._lib.BN_to_ASN1_INTEGER(i, backend._ffi.NULL)
+ backend.openssl_assert(i != backend._ffi.NULL)
+ return i
+
+
+def _encode_asn1_int_gc(backend, x):
+ i = _encode_asn1_int(backend, x)
+ i = backend._ffi.gc(i, backend._lib.ASN1_INTEGER_free)
+ return i
+
+
+def _encode_asn1_str(backend, data):
+ """
+ Create an ASN1_OCTET_STRING from a Python byte string.
+ """
+ s = backend._lib.ASN1_OCTET_STRING_new()
+ res = backend._lib.ASN1_OCTET_STRING_set(s, data, len(data))
+ backend.openssl_assert(res == 1)
+ return s
+
+
+def _encode_asn1_utf8_str(backend, string):
+ """
+ Create an ASN1_UTF8STRING from a Python unicode string.
+ This object will be an ASN1_STRING with UTF8 type in OpenSSL and
+ can be decoded with ASN1_STRING_to_UTF8.
+ """
+ s = backend._lib.ASN1_UTF8STRING_new()
+ res = backend._lib.ASN1_STRING_set(
+ s, string.encode("utf8"), len(string.encode("utf8"))
+ )
+ backend.openssl_assert(res == 1)
+ return s
+
+
+def _encode_asn1_str_gc(backend, data):
+ s = _encode_asn1_str(backend, data)
+ s = backend._ffi.gc(s, backend._lib.ASN1_OCTET_STRING_free)
+ return s
+
+
+def _encode_inhibit_any_policy(backend, inhibit_any_policy):
+ return _encode_asn1_int_gc(backend, inhibit_any_policy.skip_certs)
+
+
+def _encode_name(backend, name):
+ """
+ The X509_NAME created will not be gc'd. Use _encode_name_gc if needed.
+ """
+ subject = backend._lib.X509_NAME_new()
+ for rdn in name.rdns:
+ set_flag = 0 # indicate whether to add to last RDN or create new RDN
+ for attribute in rdn:
+ name_entry = _encode_name_entry(backend, attribute)
+ # X509_NAME_add_entry dups the object so we need to gc this copy
+ name_entry = backend._ffi.gc(
+ name_entry, backend._lib.X509_NAME_ENTRY_free
+ )
+ res = backend._lib.X509_NAME_add_entry(
+ subject, name_entry, -1, set_flag
+ )
+ backend.openssl_assert(res == 1)
+ set_flag = -1
+ return subject
+
+
+def _encode_name_gc(backend, attributes):
+ subject = _encode_name(backend, attributes)
+ subject = backend._ffi.gc(subject, backend._lib.X509_NAME_free)
+ return subject
+
+
+def _encode_sk_name_entry(backend, attributes):
+ """
+ The sk_X509_NAME_ENTRY created will not be gc'd.
+ """
+ stack = backend._lib.sk_X509_NAME_ENTRY_new_null()
+ for attribute in attributes:
+ name_entry = _encode_name_entry(backend, attribute)
+ res = backend._lib.sk_X509_NAME_ENTRY_push(stack, name_entry)
+ backend.openssl_assert(res >= 1)
+ return stack
+
+
+def _encode_name_entry(backend, attribute):
+ if attribute._type is _ASN1Type.BMPString:
+ value = attribute.value.encode("utf_16_be")
+ elif attribute._type is _ASN1Type.UniversalString:
+ value = attribute.value.encode("utf_32_be")
+ else:
+ value = attribute.value.encode("utf8")
+
+ obj = _txt2obj_gc(backend, attribute.oid.dotted_string)
+
+ name_entry = backend._lib.X509_NAME_ENTRY_create_by_OBJ(
+ backend._ffi.NULL, obj, attribute._type.value, value, len(value)
+ )
+ return name_entry
+
+
+def _encode_crl_number_delta_crl_indicator(backend, ext):
+ return _encode_asn1_int_gc(backend, ext.crl_number)
+
+
+def _encode_issuing_dist_point(backend, ext):
+ idp = backend._lib.ISSUING_DIST_POINT_new()
+ backend.openssl_assert(idp != backend._ffi.NULL)
+ idp = backend._ffi.gc(idp, backend._lib.ISSUING_DIST_POINT_free)
+ idp.onlyuser = 255 if ext.only_contains_user_certs else 0
+ idp.onlyCA = 255 if ext.only_contains_ca_certs else 0
+ idp.indirectCRL = 255 if ext.indirect_crl else 0
+ idp.onlyattr = 255 if ext.only_contains_attribute_certs else 0
+ if ext.only_some_reasons:
+ idp.onlysomereasons = _encode_reasonflags(
+ backend, ext.only_some_reasons
+ )
+
+ if ext.full_name:
+ idp.distpoint = _encode_full_name(backend, ext.full_name)
+
+ if ext.relative_name:
+ idp.distpoint = _encode_relative_name(backend, ext.relative_name)
+
+ return idp
+
+
+def _encode_crl_reason(backend, crl_reason):
+ asn1enum = backend._lib.ASN1_ENUMERATED_new()
+ backend.openssl_assert(asn1enum != backend._ffi.NULL)
+ asn1enum = backend._ffi.gc(asn1enum, backend._lib.ASN1_ENUMERATED_free)
+ res = backend._lib.ASN1_ENUMERATED_set(
+ asn1enum, _CRL_ENTRY_REASON_ENUM_TO_CODE[crl_reason.reason]
+ )
+ backend.openssl_assert(res == 1)
+
+ return asn1enum
+
+
+def _encode_invalidity_date(backend, invalidity_date):
+ time = backend._lib.ASN1_GENERALIZEDTIME_set(
+ backend._ffi.NULL,
+ calendar.timegm(invalidity_date.invalidity_date.timetuple()),
+ )
+ backend.openssl_assert(time != backend._ffi.NULL)
+ time = backend._ffi.gc(time, backend._lib.ASN1_GENERALIZEDTIME_free)
+
+ return time
+
+
+def _encode_certificate_policies(backend, certificate_policies):
+ cp = backend._lib.sk_POLICYINFO_new_null()
+ backend.openssl_assert(cp != backend._ffi.NULL)
+ cp = backend._ffi.gc(cp, backend._lib.sk_POLICYINFO_free)
+ for policy_info in certificate_policies:
+ pi = backend._lib.POLICYINFO_new()
+ backend.openssl_assert(pi != backend._ffi.NULL)
+ res = backend._lib.sk_POLICYINFO_push(cp, pi)
+ backend.openssl_assert(res >= 1)
+ oid = _txt2obj(backend, policy_info.policy_identifier.dotted_string)
+ pi.policyid = oid
+ if policy_info.policy_qualifiers:
+ pqis = backend._lib.sk_POLICYQUALINFO_new_null()
+ backend.openssl_assert(pqis != backend._ffi.NULL)
+ for qualifier in policy_info.policy_qualifiers:
+ pqi = backend._lib.POLICYQUALINFO_new()
+ backend.openssl_assert(pqi != backend._ffi.NULL)
+ res = backend._lib.sk_POLICYQUALINFO_push(pqis, pqi)
+ backend.openssl_assert(res >= 1)
+ if isinstance(qualifier, six.text_type):
+ pqi.pqualid = _txt2obj(
+ backend, x509.OID_CPS_QUALIFIER.dotted_string
+ )
+ pqi.d.cpsuri = _encode_asn1_str(
+ backend,
+ qualifier.encode("ascii"),
+ )
+ else:
+ assert isinstance(qualifier, x509.UserNotice)
+ pqi.pqualid = _txt2obj(
+ backend, x509.OID_CPS_USER_NOTICE.dotted_string
+ )
+ un = backend._lib.USERNOTICE_new()
+ backend.openssl_assert(un != backend._ffi.NULL)
+ pqi.d.usernotice = un
+ if qualifier.explicit_text:
+ un.exptext = _encode_asn1_utf8_str(
+ backend, qualifier.explicit_text
+ )
+
+ un.noticeref = _encode_notice_reference(
+ backend, qualifier.notice_reference
+ )
+
+ pi.qualifiers = pqis
+
+ return cp
+
+
+def _encode_notice_reference(backend, notice):
+ if notice is None:
+ return backend._ffi.NULL
+ else:
+ nr = backend._lib.NOTICEREF_new()
+ backend.openssl_assert(nr != backend._ffi.NULL)
+ # organization is a required field
+ nr.organization = _encode_asn1_utf8_str(backend, notice.organization)
+
+ notice_stack = backend._lib.sk_ASN1_INTEGER_new_null()
+ nr.noticenos = notice_stack
+ for number in notice.notice_numbers:
+ num = _encode_asn1_int(backend, number)
+ res = backend._lib.sk_ASN1_INTEGER_push(notice_stack, num)
+ backend.openssl_assert(res >= 1)
+
+ return nr
+
+
+def _txt2obj(backend, name):
+ """
+ Converts a Python string with an ASN.1 object ID in dotted form to a
+ ASN1_OBJECT.
+ """
+ name = name.encode("ascii")
+ obj = backend._lib.OBJ_txt2obj(name, 1)
+ backend.openssl_assert(obj != backend._ffi.NULL)
+ return obj
+
+
+def _txt2obj_gc(backend, name):
+ obj = _txt2obj(backend, name)
+ obj = backend._ffi.gc(obj, backend._lib.ASN1_OBJECT_free)
+ return obj
+
+
+def _encode_ocsp_nocheck(backend, ext):
+ # Doesn't need to be GC'd
+ return backend._lib.ASN1_NULL_new()
+
+
+def _encode_key_usage(backend, key_usage):
+ set_bit = backend._lib.ASN1_BIT_STRING_set_bit
+ ku = backend._lib.ASN1_BIT_STRING_new()
+ ku = backend._ffi.gc(ku, backend._lib.ASN1_BIT_STRING_free)
+ res = set_bit(ku, 0, key_usage.digital_signature)
+ backend.openssl_assert(res == 1)
+ res = set_bit(ku, 1, key_usage.content_commitment)
+ backend.openssl_assert(res == 1)
+ res = set_bit(ku, 2, key_usage.key_encipherment)
+ backend.openssl_assert(res == 1)
+ res = set_bit(ku, 3, key_usage.data_encipherment)
+ backend.openssl_assert(res == 1)
+ res = set_bit(ku, 4, key_usage.key_agreement)
+ backend.openssl_assert(res == 1)
+ res = set_bit(ku, 5, key_usage.key_cert_sign)
+ backend.openssl_assert(res == 1)
+ res = set_bit(ku, 6, key_usage.crl_sign)
+ backend.openssl_assert(res == 1)
+ if key_usage.key_agreement:
+ res = set_bit(ku, 7, key_usage.encipher_only)
+ backend.openssl_assert(res == 1)
+ res = set_bit(ku, 8, key_usage.decipher_only)
+ backend.openssl_assert(res == 1)
+ else:
+ res = set_bit(ku, 7, 0)
+ backend.openssl_assert(res == 1)
+ res = set_bit(ku, 8, 0)
+ backend.openssl_assert(res == 1)
+
+ return ku
+
+
+def _encode_authority_key_identifier(backend, authority_keyid):
+ akid = backend._lib.AUTHORITY_KEYID_new()
+ backend.openssl_assert(akid != backend._ffi.NULL)
+ akid = backend._ffi.gc(akid, backend._lib.AUTHORITY_KEYID_free)
+ if authority_keyid.key_identifier is not None:
+ akid.keyid = _encode_asn1_str(
+ backend,
+ authority_keyid.key_identifier,
+ )
+
+ if authority_keyid.authority_cert_issuer is not None:
+ akid.issuer = _encode_general_names(
+ backend, authority_keyid.authority_cert_issuer
+ )
+
+ if authority_keyid.authority_cert_serial_number is not None:
+ akid.serial = _encode_asn1_int(
+ backend, authority_keyid.authority_cert_serial_number
+ )
+
+ return akid
+
+
+def _encode_basic_constraints(backend, basic_constraints):
+ constraints = backend._lib.BASIC_CONSTRAINTS_new()
+ constraints = backend._ffi.gc(
+ constraints, backend._lib.BASIC_CONSTRAINTS_free
+ )
+ constraints.ca = 255 if basic_constraints.ca else 0
+ if basic_constraints.ca and basic_constraints.path_length is not None:
+ constraints.pathlen = _encode_asn1_int(
+ backend, basic_constraints.path_length
+ )
+
+ return constraints
+
+
+def _encode_information_access(backend, info_access):
+ aia = backend._lib.sk_ACCESS_DESCRIPTION_new_null()
+ backend.openssl_assert(aia != backend._ffi.NULL)
+ aia = backend._ffi.gc(
+ aia,
+ lambda x: backend._lib.sk_ACCESS_DESCRIPTION_pop_free(
+ x,
+ backend._ffi.addressof(
+ backend._lib._original_lib, "ACCESS_DESCRIPTION_free"
+ ),
+ ),
+ )
+ for access_description in info_access:
+ ad = backend._lib.ACCESS_DESCRIPTION_new()
+ method = _txt2obj(
+ backend, access_description.access_method.dotted_string
+ )
+ _encode_general_name_preallocated(
+ backend, access_description.access_location, ad.location
+ )
+ ad.method = method
+ res = backend._lib.sk_ACCESS_DESCRIPTION_push(aia, ad)
+ backend.openssl_assert(res >= 1)
+
+ return aia
+
+
+def _encode_general_names(backend, names):
+ general_names = backend._lib.GENERAL_NAMES_new()
+ backend.openssl_assert(general_names != backend._ffi.NULL)
+ for name in names:
+ gn = _encode_general_name(backend, name)
+ res = backend._lib.sk_GENERAL_NAME_push(general_names, gn)
+ backend.openssl_assert(res != 0)
+
+ return general_names
+
+
+def _encode_alt_name(backend, san):
+ general_names = _encode_general_names(backend, san)
+ general_names = backend._ffi.gc(
+ general_names, backend._lib.GENERAL_NAMES_free
+ )
+ return general_names
+
+
+def _encode_subject_key_identifier(backend, ski):
+ return _encode_asn1_str_gc(backend, ski.digest)
+
+
+def _encode_general_name(backend, name):
+ gn = backend._lib.GENERAL_NAME_new()
+ _encode_general_name_preallocated(backend, name, gn)
+ return gn
+
+
+def _encode_general_name_preallocated(backend, name, gn):
+ if isinstance(name, x509.DNSName):
+ backend.openssl_assert(gn != backend._ffi.NULL)
+ gn.type = backend._lib.GEN_DNS
+
+ ia5 = backend._lib.ASN1_IA5STRING_new()
+ backend.openssl_assert(ia5 != backend._ffi.NULL)
+ # ia5strings are supposed to be ITU T.50 but to allow round-tripping
+ # of broken certs that encode utf8 we'll encode utf8 here too.
+ value = name.value.encode("utf8")
+
+ res = backend._lib.ASN1_STRING_set(ia5, value, len(value))
+ backend.openssl_assert(res == 1)
+ gn.d.dNSName = ia5
+ elif isinstance(name, x509.RegisteredID):
+ backend.openssl_assert(gn != backend._ffi.NULL)
+ gn.type = backend._lib.GEN_RID
+ obj = backend._lib.OBJ_txt2obj(
+ name.value.dotted_string.encode("ascii"), 1
+ )
+ backend.openssl_assert(obj != backend._ffi.NULL)
+ gn.d.registeredID = obj
+ elif isinstance(name, x509.DirectoryName):
+ backend.openssl_assert(gn != backend._ffi.NULL)
+ dir_name = _encode_name(backend, name.value)
+ gn.type = backend._lib.GEN_DIRNAME
+ gn.d.directoryName = dir_name
+ elif isinstance(name, x509.IPAddress):
+ backend.openssl_assert(gn != backend._ffi.NULL)
+ if isinstance(name.value, ipaddress.IPv4Network):
+ packed = name.value.network_address.packed + utils.int_to_bytes(
+ ((1 << 32) - name.value.num_addresses), 4
+ )
+ elif isinstance(name.value, ipaddress.IPv6Network):
+ packed = name.value.network_address.packed + utils.int_to_bytes(
+ (1 << 128) - name.value.num_addresses, 16
+ )
+ else:
+ packed = name.value.packed
+ ipaddr = _encode_asn1_str(backend, packed)
+ gn.type = backend._lib.GEN_IPADD
+ gn.d.iPAddress = ipaddr
+ elif isinstance(name, x509.OtherName):
+ backend.openssl_assert(gn != backend._ffi.NULL)
+ other_name = backend._lib.OTHERNAME_new()
+ backend.openssl_assert(other_name != backend._ffi.NULL)
+
+ type_id = backend._lib.OBJ_txt2obj(
+ name.type_id.dotted_string.encode("ascii"), 1
+ )
+ backend.openssl_assert(type_id != backend._ffi.NULL)
+ data = backend._ffi.new("unsigned char[]", name.value)
+ data_ptr_ptr = backend._ffi.new("unsigned char **")
+ data_ptr_ptr[0] = data
+ value = backend._lib.d2i_ASN1_TYPE(
+ backend._ffi.NULL, data_ptr_ptr, len(name.value)
+ )
+ if value == backend._ffi.NULL:
+ backend._consume_errors()
+ raise ValueError("Invalid ASN.1 data")
+ other_name.type_id = type_id
+ other_name.value = value
+ gn.type = backend._lib.GEN_OTHERNAME
+ gn.d.otherName = other_name
+ elif isinstance(name, x509.RFC822Name):
+ backend.openssl_assert(gn != backend._ffi.NULL)
+ # ia5strings are supposed to be ITU T.50 but to allow round-tripping
+ # of broken certs that encode utf8 we'll encode utf8 here too.
+ data = name.value.encode("utf8")
+ asn1_str = _encode_asn1_str(backend, data)
+ gn.type = backend._lib.GEN_EMAIL
+ gn.d.rfc822Name = asn1_str
+ elif isinstance(name, x509.UniformResourceIdentifier):
+ backend.openssl_assert(gn != backend._ffi.NULL)
+ # ia5strings are supposed to be ITU T.50 but to allow round-tripping
+ # of broken certs that encode utf8 we'll encode utf8 here too.
+ data = name.value.encode("utf8")
+ asn1_str = _encode_asn1_str(backend, data)
+ gn.type = backend._lib.GEN_URI
+ gn.d.uniformResourceIdentifier = asn1_str
+ else:
+ raise ValueError("{} is an unknown GeneralName type".format(name))
+
+
+def _encode_extended_key_usage(backend, extended_key_usage):
+ eku = backend._lib.sk_ASN1_OBJECT_new_null()
+ eku = backend._ffi.gc(eku, backend._lib.sk_ASN1_OBJECT_free)
+ for oid in extended_key_usage:
+ obj = _txt2obj(backend, oid.dotted_string)
+ res = backend._lib.sk_ASN1_OBJECT_push(eku, obj)
+ backend.openssl_assert(res >= 1)
+
+ return eku
+
+
+_CRLREASONFLAGS = {
+ x509.ReasonFlags.key_compromise: 1,
+ x509.ReasonFlags.ca_compromise: 2,
+ x509.ReasonFlags.affiliation_changed: 3,
+ x509.ReasonFlags.superseded: 4,
+ x509.ReasonFlags.cessation_of_operation: 5,
+ x509.ReasonFlags.certificate_hold: 6,
+ x509.ReasonFlags.privilege_withdrawn: 7,
+ x509.ReasonFlags.aa_compromise: 8,
+}
+
+
+def _encode_reasonflags(backend, reasons):
+ bitmask = backend._lib.ASN1_BIT_STRING_new()
+ backend.openssl_assert(bitmask != backend._ffi.NULL)
+ for reason in reasons:
+ res = backend._lib.ASN1_BIT_STRING_set_bit(
+ bitmask, _CRLREASONFLAGS[reason], 1
+ )
+ backend.openssl_assert(res == 1)
+
+ return bitmask
+
+
+def _encode_full_name(backend, full_name):
+ dpn = backend._lib.DIST_POINT_NAME_new()
+ backend.openssl_assert(dpn != backend._ffi.NULL)
+ dpn.type = _DISTPOINT_TYPE_FULLNAME
+ dpn.name.fullname = _encode_general_names(backend, full_name)
+ return dpn
+
+
+def _encode_relative_name(backend, relative_name):
+ dpn = backend._lib.DIST_POINT_NAME_new()
+ backend.openssl_assert(dpn != backend._ffi.NULL)
+ dpn.type = _DISTPOINT_TYPE_RELATIVENAME
+ dpn.name.relativename = _encode_sk_name_entry(backend, relative_name)
+ return dpn
+
+
+def _encode_cdps_freshest_crl(backend, cdps):
+ cdp = backend._lib.sk_DIST_POINT_new_null()
+ cdp = backend._ffi.gc(cdp, backend._lib.sk_DIST_POINT_free)
+ for point in cdps:
+ dp = backend._lib.DIST_POINT_new()
+ backend.openssl_assert(dp != backend._ffi.NULL)
+
+ if point.reasons:
+ dp.reasons = _encode_reasonflags(backend, point.reasons)
+
+ if point.full_name:
+ dp.distpoint = _encode_full_name(backend, point.full_name)
+
+ if point.relative_name:
+ dp.distpoint = _encode_relative_name(backend, point.relative_name)
+
+ if point.crl_issuer:
+ dp.CRLissuer = _encode_general_names(backend, point.crl_issuer)
+
+ res = backend._lib.sk_DIST_POINT_push(cdp, dp)
+ backend.openssl_assert(res >= 1)
+
+ return cdp
+
+
+def _encode_name_constraints(backend, name_constraints):
+ nc = backend._lib.NAME_CONSTRAINTS_new()
+ backend.openssl_assert(nc != backend._ffi.NULL)
+ nc = backend._ffi.gc(nc, backend._lib.NAME_CONSTRAINTS_free)
+ permitted = _encode_general_subtree(
+ backend, name_constraints.permitted_subtrees
+ )
+ nc.permittedSubtrees = permitted
+ excluded = _encode_general_subtree(
+ backend, name_constraints.excluded_subtrees
+ )
+ nc.excludedSubtrees = excluded
+
+ return nc
+
+
+def _encode_policy_constraints(backend, policy_constraints):
+ pc = backend._lib.POLICY_CONSTRAINTS_new()
+ backend.openssl_assert(pc != backend._ffi.NULL)
+ pc = backend._ffi.gc(pc, backend._lib.POLICY_CONSTRAINTS_free)
+ if policy_constraints.require_explicit_policy is not None:
+ pc.requireExplicitPolicy = _encode_asn1_int(
+ backend, policy_constraints.require_explicit_policy
+ )
+
+ if policy_constraints.inhibit_policy_mapping is not None:
+ pc.inhibitPolicyMapping = _encode_asn1_int(
+ backend, policy_constraints.inhibit_policy_mapping
+ )
+
+ return pc
+
+
+def _encode_general_subtree(backend, subtrees):
+ if subtrees is None:
+ return backend._ffi.NULL
+ else:
+ general_subtrees = backend._lib.sk_GENERAL_SUBTREE_new_null()
+ for name in subtrees:
+ gs = backend._lib.GENERAL_SUBTREE_new()
+ gs.base = _encode_general_name(backend, name)
+ res = backend._lib.sk_GENERAL_SUBTREE_push(general_subtrees, gs)
+ backend.openssl_assert(res >= 1)
+
+ return general_subtrees
+
+
+def _encode_precert_signed_certificate_timestamps(backend, scts):
+ sct_stack = backend._lib.sk_SCT_new_null()
+ backend.openssl_assert(sct_stack != backend._ffi.NULL)
+ sct_stack = backend._ffi.gc(sct_stack, backend._lib.sk_SCT_free)
+ for sct in scts:
+ res = backend._lib.sk_SCT_push(sct_stack, sct._sct)
+ backend.openssl_assert(res >= 1)
+ return sct_stack
+
+
+def _encode_nonce(backend, nonce):
+ return _encode_asn1_str_gc(backend, nonce.nonce)
+
+
+_EXTENSION_ENCODE_HANDLERS = {
+ ExtensionOID.BASIC_CONSTRAINTS: _encode_basic_constraints,
+ ExtensionOID.SUBJECT_KEY_IDENTIFIER: _encode_subject_key_identifier,
+ ExtensionOID.KEY_USAGE: _encode_key_usage,
+ ExtensionOID.SUBJECT_ALTERNATIVE_NAME: _encode_alt_name,
+ ExtensionOID.ISSUER_ALTERNATIVE_NAME: _encode_alt_name,
+ ExtensionOID.EXTENDED_KEY_USAGE: _encode_extended_key_usage,
+ ExtensionOID.AUTHORITY_KEY_IDENTIFIER: _encode_authority_key_identifier,
+ ExtensionOID.CERTIFICATE_POLICIES: _encode_certificate_policies,
+ ExtensionOID.AUTHORITY_INFORMATION_ACCESS: _encode_information_access,
+ ExtensionOID.SUBJECT_INFORMATION_ACCESS: _encode_information_access,
+ ExtensionOID.CRL_DISTRIBUTION_POINTS: _encode_cdps_freshest_crl,
+ ExtensionOID.FRESHEST_CRL: _encode_cdps_freshest_crl,
+ ExtensionOID.INHIBIT_ANY_POLICY: _encode_inhibit_any_policy,
+ ExtensionOID.OCSP_NO_CHECK: _encode_ocsp_nocheck,
+ ExtensionOID.NAME_CONSTRAINTS: _encode_name_constraints,
+ ExtensionOID.POLICY_CONSTRAINTS: _encode_policy_constraints,
+ ExtensionOID.PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS: (
+ _encode_precert_signed_certificate_timestamps
+ ),
+}
+
+_CRL_EXTENSION_ENCODE_HANDLERS = {
+ ExtensionOID.ISSUER_ALTERNATIVE_NAME: _encode_alt_name,
+ ExtensionOID.AUTHORITY_KEY_IDENTIFIER: _encode_authority_key_identifier,
+ ExtensionOID.AUTHORITY_INFORMATION_ACCESS: _encode_information_access,
+ ExtensionOID.CRL_NUMBER: _encode_crl_number_delta_crl_indicator,
+ ExtensionOID.DELTA_CRL_INDICATOR: _encode_crl_number_delta_crl_indicator,
+ ExtensionOID.ISSUING_DISTRIBUTION_POINT: _encode_issuing_dist_point,
+ ExtensionOID.FRESHEST_CRL: _encode_cdps_freshest_crl,
+}
+
+_CRL_ENTRY_EXTENSION_ENCODE_HANDLERS = {
+ CRLEntryExtensionOID.CERTIFICATE_ISSUER: _encode_alt_name,
+ CRLEntryExtensionOID.CRL_REASON: _encode_crl_reason,
+ CRLEntryExtensionOID.INVALIDITY_DATE: _encode_invalidity_date,
+}
+
+_OCSP_REQUEST_EXTENSION_ENCODE_HANDLERS = {
+ OCSPExtensionOID.NONCE: _encode_nonce,
+}
+
+_OCSP_BASICRESP_EXTENSION_ENCODE_HANDLERS = {
+ OCSPExtensionOID.NONCE: _encode_nonce,
+}
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/hashes.py b/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/hashes.py
new file mode 100644
index 0000000000..764dce0ede
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/hashes.py
@@ -0,0 +1,82 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+
+from cryptography import utils
+from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
+from cryptography.hazmat.primitives import hashes
+
+
+@utils.register_interface(hashes.HashContext)
+class _HashContext(object):
+ def __init__(self, backend, algorithm, ctx=None):
+ self._algorithm = algorithm
+
+ self._backend = backend
+
+ if ctx is None:
+ ctx = self._backend._lib.EVP_MD_CTX_new()
+ ctx = self._backend._ffi.gc(
+ ctx, self._backend._lib.EVP_MD_CTX_free
+ )
+ evp_md = self._backend._evp_md_from_algorithm(algorithm)
+ if evp_md == self._backend._ffi.NULL:
+ raise UnsupportedAlgorithm(
+ "{} is not a supported hash on this backend.".format(
+ algorithm.name
+ ),
+ _Reasons.UNSUPPORTED_HASH,
+ )
+ res = self._backend._lib.EVP_DigestInit_ex(
+ ctx, evp_md, self._backend._ffi.NULL
+ )
+ self._backend.openssl_assert(res != 0)
+
+ self._ctx = ctx
+
+ algorithm = utils.read_only_property("_algorithm")
+
+ def copy(self):
+ copied_ctx = self._backend._lib.EVP_MD_CTX_new()
+ copied_ctx = self._backend._ffi.gc(
+ copied_ctx, self._backend._lib.EVP_MD_CTX_free
+ )
+ res = self._backend._lib.EVP_MD_CTX_copy_ex(copied_ctx, self._ctx)
+ self._backend.openssl_assert(res != 0)
+ return _HashContext(self._backend, self.algorithm, ctx=copied_ctx)
+
+ def update(self, data):
+ data_ptr = self._backend._ffi.from_buffer(data)
+ res = self._backend._lib.EVP_DigestUpdate(
+ self._ctx, data_ptr, len(data)
+ )
+ self._backend.openssl_assert(res != 0)
+
+ def finalize(self):
+ if isinstance(self.algorithm, hashes.ExtendableOutputFunction):
+ # extendable output functions use a different finalize
+ return self._finalize_xof()
+ else:
+ buf = self._backend._ffi.new(
+ "unsigned char[]", self._backend._lib.EVP_MAX_MD_SIZE
+ )
+ outlen = self._backend._ffi.new("unsigned int *")
+ res = self._backend._lib.EVP_DigestFinal_ex(self._ctx, buf, outlen)
+ self._backend.openssl_assert(res != 0)
+ self._backend.openssl_assert(
+ outlen[0] == self.algorithm.digest_size
+ )
+ return self._backend._ffi.buffer(buf)[: outlen[0]]
+
+ def _finalize_xof(self):
+ buf = self._backend._ffi.new(
+ "unsigned char[]", self.algorithm.digest_size
+ )
+ res = self._backend._lib.EVP_DigestFinalXOF(
+ self._ctx, buf, self.algorithm.digest_size
+ )
+ self._backend.openssl_assert(res != 0)
+ return self._backend._ffi.buffer(buf)[: self.algorithm.digest_size]
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/hmac.py b/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/hmac.py
new file mode 100644
index 0000000000..1cc9d99fec
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/hmac.py
@@ -0,0 +1,76 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+
+from cryptography import utils
+from cryptography.exceptions import (
+ InvalidSignature,
+ UnsupportedAlgorithm,
+ _Reasons,
+)
+from cryptography.hazmat.primitives import constant_time, hashes
+
+
+@utils.register_interface(hashes.HashContext)
+class _HMACContext(object):
+ def __init__(self, backend, key, algorithm, ctx=None):
+ self._algorithm = algorithm
+ self._backend = backend
+
+ if ctx is None:
+ ctx = self._backend._lib.HMAC_CTX_new()
+ self._backend.openssl_assert(ctx != self._backend._ffi.NULL)
+ ctx = self._backend._ffi.gc(ctx, self._backend._lib.HMAC_CTX_free)
+ evp_md = self._backend._evp_md_from_algorithm(algorithm)
+ if evp_md == self._backend._ffi.NULL:
+ raise UnsupportedAlgorithm(
+ "{} is not a supported hash on this backend".format(
+ algorithm.name
+ ),
+ _Reasons.UNSUPPORTED_HASH,
+ )
+ key_ptr = self._backend._ffi.from_buffer(key)
+ res = self._backend._lib.HMAC_Init_ex(
+ ctx, key_ptr, len(key), evp_md, self._backend._ffi.NULL
+ )
+ self._backend.openssl_assert(res != 0)
+
+ self._ctx = ctx
+ self._key = key
+
+ algorithm = utils.read_only_property("_algorithm")
+
+ def copy(self):
+ copied_ctx = self._backend._lib.HMAC_CTX_new()
+ self._backend.openssl_assert(copied_ctx != self._backend._ffi.NULL)
+ copied_ctx = self._backend._ffi.gc(
+ copied_ctx, self._backend._lib.HMAC_CTX_free
+ )
+ res = self._backend._lib.HMAC_CTX_copy(copied_ctx, self._ctx)
+ self._backend.openssl_assert(res != 0)
+ return _HMACContext(
+ self._backend, self._key, self.algorithm, ctx=copied_ctx
+ )
+
+ def update(self, data):
+ data_ptr = self._backend._ffi.from_buffer(data)
+ res = self._backend._lib.HMAC_Update(self._ctx, data_ptr, len(data))
+ self._backend.openssl_assert(res != 0)
+
+ def finalize(self):
+ buf = self._backend._ffi.new(
+ "unsigned char[]", self._backend._lib.EVP_MAX_MD_SIZE
+ )
+ outlen = self._backend._ffi.new("unsigned int *")
+ res = self._backend._lib.HMAC_Final(self._ctx, buf, outlen)
+ self._backend.openssl_assert(res != 0)
+ self._backend.openssl_assert(outlen[0] == self.algorithm.digest_size)
+ return self._backend._ffi.buffer(buf)[: outlen[0]]
+
+ def verify(self, signature):
+ digest = self.finalize()
+ if not constant_time.bytes_eq(digest, signature):
+ raise InvalidSignature("Signature did not match digest.")
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/ocsp.py b/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/ocsp.py
new file mode 100644
index 0000000000..50c02e7a80
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/ocsp.py
@@ -0,0 +1,401 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import functools
+
+from cryptography import utils, x509
+from cryptography.exceptions import UnsupportedAlgorithm
+from cryptography.hazmat.backends.openssl.decode_asn1 import (
+ _CRL_ENTRY_REASON_CODE_TO_ENUM,
+ _asn1_integer_to_int,
+ _asn1_string_to_bytes,
+ _decode_x509_name,
+ _obj2txt,
+ _parse_asn1_generalized_time,
+)
+from cryptography.hazmat.backends.openssl.x509 import _Certificate
+from cryptography.hazmat.primitives import serialization
+from cryptography.x509.ocsp import (
+ OCSPCertStatus,
+ OCSPRequest,
+ OCSPResponse,
+ OCSPResponseStatus,
+ _CERT_STATUS_TO_ENUM,
+ _OIDS_TO_HASH,
+ _RESPONSE_STATUS_TO_ENUM,
+)
+
+
+def _requires_successful_response(func):
+ @functools.wraps(func)
+ def wrapper(self, *args):
+ if self.response_status != OCSPResponseStatus.SUCCESSFUL:
+ raise ValueError(
+ "OCSP response status is not successful so the property "
+ "has no value"
+ )
+ else:
+ return func(self, *args)
+
+ return wrapper
+
+
+def _issuer_key_hash(backend, cert_id):
+ key_hash = backend._ffi.new("ASN1_OCTET_STRING **")
+ res = backend._lib.OCSP_id_get0_info(
+ backend._ffi.NULL,
+ backend._ffi.NULL,
+ key_hash,
+ backend._ffi.NULL,
+ cert_id,
+ )
+ backend.openssl_assert(res == 1)
+ backend.openssl_assert(key_hash[0] != backend._ffi.NULL)
+ return _asn1_string_to_bytes(backend, key_hash[0])
+
+
+def _issuer_name_hash(backend, cert_id):
+ name_hash = backend._ffi.new("ASN1_OCTET_STRING **")
+ res = backend._lib.OCSP_id_get0_info(
+ name_hash,
+ backend._ffi.NULL,
+ backend._ffi.NULL,
+ backend._ffi.NULL,
+ cert_id,
+ )
+ backend.openssl_assert(res == 1)
+ backend.openssl_assert(name_hash[0] != backend._ffi.NULL)
+ return _asn1_string_to_bytes(backend, name_hash[0])
+
+
+def _serial_number(backend, cert_id):
+ num = backend._ffi.new("ASN1_INTEGER **")
+ res = backend._lib.OCSP_id_get0_info(
+ backend._ffi.NULL, backend._ffi.NULL, backend._ffi.NULL, num, cert_id
+ )
+ backend.openssl_assert(res == 1)
+ backend.openssl_assert(num[0] != backend._ffi.NULL)
+ return _asn1_integer_to_int(backend, num[0])
+
+
+def _hash_algorithm(backend, cert_id):
+ asn1obj = backend._ffi.new("ASN1_OBJECT **")
+ res = backend._lib.OCSP_id_get0_info(
+ backend._ffi.NULL,
+ asn1obj,
+ backend._ffi.NULL,
+ backend._ffi.NULL,
+ cert_id,
+ )
+ backend.openssl_assert(res == 1)
+ backend.openssl_assert(asn1obj[0] != backend._ffi.NULL)
+ oid = _obj2txt(backend, asn1obj[0])
+ try:
+ return _OIDS_TO_HASH[oid]
+ except KeyError:
+ raise UnsupportedAlgorithm(
+ "Signature algorithm OID: {} not recognized".format(oid)
+ )
+
+
+@utils.register_interface(OCSPResponse)
+class _OCSPResponse(object):
+ def __init__(self, backend, ocsp_response):
+ self._backend = backend
+ self._ocsp_response = ocsp_response
+ status = self._backend._lib.OCSP_response_status(self._ocsp_response)
+ self._backend.openssl_assert(status in _RESPONSE_STATUS_TO_ENUM)
+ self._status = _RESPONSE_STATUS_TO_ENUM[status]
+ if self._status is OCSPResponseStatus.SUCCESSFUL:
+ basic = self._backend._lib.OCSP_response_get1_basic(
+ self._ocsp_response
+ )
+ self._backend.openssl_assert(basic != self._backend._ffi.NULL)
+ self._basic = self._backend._ffi.gc(
+ basic, self._backend._lib.OCSP_BASICRESP_free
+ )
+ num_resp = self._backend._lib.OCSP_resp_count(self._basic)
+ if num_resp != 1:
+ raise ValueError(
+ "OCSP response contains more than one SINGLERESP structure"
+ ", which this library does not support. "
+ "{} found".format(num_resp)
+ )
+ self._single = self._backend._lib.OCSP_resp_get0(self._basic, 0)
+ self._backend.openssl_assert(
+ self._single != self._backend._ffi.NULL
+ )
+ self._cert_id = self._backend._lib.OCSP_SINGLERESP_get0_id(
+ self._single
+ )
+ self._backend.openssl_assert(
+ self._cert_id != self._backend._ffi.NULL
+ )
+
+ response_status = utils.read_only_property("_status")
+
+ @property
+ @_requires_successful_response
+ def signature_algorithm_oid(self):
+ alg = self._backend._lib.OCSP_resp_get0_tbs_sigalg(self._basic)
+ self._backend.openssl_assert(alg != self._backend._ffi.NULL)
+ oid = _obj2txt(self._backend, alg.algorithm)
+ return x509.ObjectIdentifier(oid)
+
+ @property
+ @_requires_successful_response
+ def signature_hash_algorithm(self):
+ oid = self.signature_algorithm_oid
+ try:
+ return x509._SIG_OIDS_TO_HASH[oid]
+ except KeyError:
+ raise UnsupportedAlgorithm(
+ "Signature algorithm OID:{} not recognized".format(oid)
+ )
+
+ @property
+ @_requires_successful_response
+ def signature(self):
+ sig = self._backend._lib.OCSP_resp_get0_signature(self._basic)
+ self._backend.openssl_assert(sig != self._backend._ffi.NULL)
+ return _asn1_string_to_bytes(self._backend, sig)
+
+ @property
+ @_requires_successful_response
+ def tbs_response_bytes(self):
+ respdata = self._backend._lib.OCSP_resp_get0_respdata(self._basic)
+ self._backend.openssl_assert(respdata != self._backend._ffi.NULL)
+ pp = self._backend._ffi.new("unsigned char **")
+ res = self._backend._lib.i2d_OCSP_RESPDATA(respdata, pp)
+ self._backend.openssl_assert(pp[0] != self._backend._ffi.NULL)
+ pp = self._backend._ffi.gc(
+ pp, lambda pointer: self._backend._lib.OPENSSL_free(pointer[0])
+ )
+ self._backend.openssl_assert(res > 0)
+ return self._backend._ffi.buffer(pp[0], res)[:]
+
+ @property
+ @_requires_successful_response
+ def certificates(self):
+ sk_x509 = self._backend._lib.OCSP_resp_get0_certs(self._basic)
+ num = self._backend._lib.sk_X509_num(sk_x509)
+ certs = []
+ for i in range(num):
+ x509 = self._backend._lib.sk_X509_value(sk_x509, i)
+ self._backend.openssl_assert(x509 != self._backend._ffi.NULL)
+ cert = _Certificate(self._backend, x509)
+ # We need to keep the OCSP response that the certificate came from
+ # alive until the Certificate object itself goes out of scope, so
+ # we give it a private reference.
+ cert._ocsp_resp = self
+ certs.append(cert)
+
+ return certs
+
+ @property
+ @_requires_successful_response
+ def responder_key_hash(self):
+ _, asn1_string = self._responder_key_name()
+ if asn1_string == self._backend._ffi.NULL:
+ return None
+ else:
+ return _asn1_string_to_bytes(self._backend, asn1_string)
+
+ @property
+ @_requires_successful_response
+ def responder_name(self):
+ x509_name, _ = self._responder_key_name()
+ if x509_name == self._backend._ffi.NULL:
+ return None
+ else:
+ return _decode_x509_name(self._backend, x509_name)
+
+ def _responder_key_name(self):
+ asn1_string = self._backend._ffi.new("ASN1_OCTET_STRING **")
+ x509_name = self._backend._ffi.new("X509_NAME **")
+ res = self._backend._lib.OCSP_resp_get0_id(
+ self._basic, asn1_string, x509_name
+ )
+ self._backend.openssl_assert(res == 1)
+ return x509_name[0], asn1_string[0]
+
+ @property
+ @_requires_successful_response
+ def produced_at(self):
+ produced_at = self._backend._lib.OCSP_resp_get0_produced_at(
+ self._basic
+ )
+ return _parse_asn1_generalized_time(self._backend, produced_at)
+
+ @property
+ @_requires_successful_response
+ def certificate_status(self):
+ status = self._backend._lib.OCSP_single_get0_status(
+ self._single,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ )
+ self._backend.openssl_assert(status in _CERT_STATUS_TO_ENUM)
+ return _CERT_STATUS_TO_ENUM[status]
+
+ @property
+ @_requires_successful_response
+ def revocation_time(self):
+ if self.certificate_status is not OCSPCertStatus.REVOKED:
+ return None
+
+ asn1_time = self._backend._ffi.new("ASN1_GENERALIZEDTIME **")
+ self._backend._lib.OCSP_single_get0_status(
+ self._single,
+ self._backend._ffi.NULL,
+ asn1_time,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ )
+ self._backend.openssl_assert(asn1_time[0] != self._backend._ffi.NULL)
+ return _parse_asn1_generalized_time(self._backend, asn1_time[0])
+
+ @property
+ @_requires_successful_response
+ def revocation_reason(self):
+ if self.certificate_status is not OCSPCertStatus.REVOKED:
+ return None
+
+ reason_ptr = self._backend._ffi.new("int *")
+ self._backend._lib.OCSP_single_get0_status(
+ self._single,
+ reason_ptr,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ )
+ # If no reason is encoded OpenSSL returns -1
+ if reason_ptr[0] == -1:
+ return None
+ else:
+ self._backend.openssl_assert(
+ reason_ptr[0] in _CRL_ENTRY_REASON_CODE_TO_ENUM
+ )
+ return _CRL_ENTRY_REASON_CODE_TO_ENUM[reason_ptr[0]]
+
+ @property
+ @_requires_successful_response
+ def this_update(self):
+ asn1_time = self._backend._ffi.new("ASN1_GENERALIZEDTIME **")
+ self._backend._lib.OCSP_single_get0_status(
+ self._single,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ asn1_time,
+ self._backend._ffi.NULL,
+ )
+ self._backend.openssl_assert(asn1_time[0] != self._backend._ffi.NULL)
+ return _parse_asn1_generalized_time(self._backend, asn1_time[0])
+
+ @property
+ @_requires_successful_response
+ def next_update(self):
+ asn1_time = self._backend._ffi.new("ASN1_GENERALIZEDTIME **")
+ self._backend._lib.OCSP_single_get0_status(
+ self._single,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ asn1_time,
+ )
+ if asn1_time[0] != self._backend._ffi.NULL:
+ return _parse_asn1_generalized_time(self._backend, asn1_time[0])
+ else:
+ return None
+
+ @property
+ @_requires_successful_response
+ def issuer_key_hash(self):
+ return _issuer_key_hash(self._backend, self._cert_id)
+
+ @property
+ @_requires_successful_response
+ def issuer_name_hash(self):
+ return _issuer_name_hash(self._backend, self._cert_id)
+
+ @property
+ @_requires_successful_response
+ def hash_algorithm(self):
+ return _hash_algorithm(self._backend, self._cert_id)
+
+ @property
+ @_requires_successful_response
+ def serial_number(self):
+ return _serial_number(self._backend, self._cert_id)
+
+ @utils.cached_property
+ @_requires_successful_response
+ def extensions(self):
+ return self._backend._ocsp_basicresp_ext_parser.parse(self._basic)
+
+ @utils.cached_property
+ @_requires_successful_response
+ def single_extensions(self):
+ return self._backend._ocsp_singleresp_ext_parser.parse(self._single)
+
+ def public_bytes(self, encoding):
+ if encoding is not serialization.Encoding.DER:
+ raise ValueError("The only allowed encoding value is Encoding.DER")
+
+ bio = self._backend._create_mem_bio_gc()
+ res = self._backend._lib.i2d_OCSP_RESPONSE_bio(
+ bio, self._ocsp_response
+ )
+ self._backend.openssl_assert(res > 0)
+ return self._backend._read_mem_bio(bio)
+
+
+@utils.register_interface(OCSPRequest)
+class _OCSPRequest(object):
+ def __init__(self, backend, ocsp_request):
+ if backend._lib.OCSP_request_onereq_count(ocsp_request) > 1:
+ raise NotImplementedError(
+ "OCSP request contains more than one request"
+ )
+ self._backend = backend
+ self._ocsp_request = ocsp_request
+ self._request = self._backend._lib.OCSP_request_onereq_get0(
+ self._ocsp_request, 0
+ )
+ self._backend.openssl_assert(self._request != self._backend._ffi.NULL)
+ self._cert_id = self._backend._lib.OCSP_onereq_get0_id(self._request)
+ self._backend.openssl_assert(self._cert_id != self._backend._ffi.NULL)
+
+ @property
+ def issuer_key_hash(self):
+ return _issuer_key_hash(self._backend, self._cert_id)
+
+ @property
+ def issuer_name_hash(self):
+ return _issuer_name_hash(self._backend, self._cert_id)
+
+ @property
+ def serial_number(self):
+ return _serial_number(self._backend, self._cert_id)
+
+ @property
+ def hash_algorithm(self):
+ return _hash_algorithm(self._backend, self._cert_id)
+
+ @utils.cached_property
+ def extensions(self):
+ return self._backend._ocsp_req_ext_parser.parse(self._ocsp_request)
+
+ def public_bytes(self, encoding):
+ if encoding is not serialization.Encoding.DER:
+ raise ValueError("The only allowed encoding value is Encoding.DER")
+
+ bio = self._backend._create_mem_bio_gc()
+ res = self._backend._lib.i2d_OCSP_REQUEST_bio(bio, self._ocsp_request)
+ self._backend.openssl_assert(res > 0)
+ return self._backend._read_mem_bio(bio)
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/poly1305.py b/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/poly1305.py
new file mode 100644
index 0000000000..5699918b17
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/poly1305.py
@@ -0,0 +1,65 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+
+from cryptography.exceptions import InvalidSignature
+from cryptography.hazmat.primitives import constant_time
+
+
+_POLY1305_TAG_SIZE = 16
+_POLY1305_KEY_SIZE = 32
+
+
+class _Poly1305Context(object):
+ def __init__(self, backend, key):
+ self._backend = backend
+
+ key_ptr = self._backend._ffi.from_buffer(key)
+ # This function copies the key into OpenSSL-owned memory so we don't
+ # need to retain it ourselves
+ evp_pkey = self._backend._lib.EVP_PKEY_new_raw_private_key(
+ self._backend._lib.NID_poly1305,
+ self._backend._ffi.NULL,
+ key_ptr,
+ len(key),
+ )
+ self._backend.openssl_assert(evp_pkey != self._backend._ffi.NULL)
+ self._evp_pkey = self._backend._ffi.gc(
+ evp_pkey, self._backend._lib.EVP_PKEY_free
+ )
+ ctx = self._backend._lib.EVP_MD_CTX_new()
+ self._backend.openssl_assert(ctx != self._backend._ffi.NULL)
+ self._ctx = self._backend._ffi.gc(
+ ctx, self._backend._lib.EVP_MD_CTX_free
+ )
+ res = self._backend._lib.EVP_DigestSignInit(
+ self._ctx,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ self._evp_pkey,
+ )
+ self._backend.openssl_assert(res == 1)
+
+ def update(self, data):
+ data_ptr = self._backend._ffi.from_buffer(data)
+ res = self._backend._lib.EVP_DigestSignUpdate(
+ self._ctx, data_ptr, len(data)
+ )
+ self._backend.openssl_assert(res != 0)
+
+ def finalize(self):
+ buf = self._backend._ffi.new("unsigned char[]", _POLY1305_TAG_SIZE)
+ outlen = self._backend._ffi.new("size_t *")
+ res = self._backend._lib.EVP_DigestSignFinal(self._ctx, buf, outlen)
+ self._backend.openssl_assert(res != 0)
+ self._backend.openssl_assert(outlen[0] == _POLY1305_TAG_SIZE)
+ return self._backend._ffi.buffer(buf)[: outlen[0]]
+
+ def verify(self, tag):
+ mac = self.finalize()
+ if not constant_time.bytes_eq(mac, tag):
+ raise InvalidSignature("Value did not match computed tag.")
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/rsa.py b/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/rsa.py
new file mode 100644
index 0000000000..82cd49c960
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/rsa.py
@@ -0,0 +1,516 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+from cryptography import utils
+from cryptography.exceptions import (
+ InvalidSignature,
+ UnsupportedAlgorithm,
+ _Reasons,
+)
+from cryptography.hazmat.backends.openssl.utils import (
+ _calculate_digest_and_algorithm,
+ _check_not_prehashed,
+ _warn_sign_verify_deprecated,
+)
+from cryptography.hazmat.primitives import hashes
+from cryptography.hazmat.primitives.asymmetric import (
+ AsymmetricSignatureContext,
+ AsymmetricVerificationContext,
+ rsa,
+)
+from cryptography.hazmat.primitives.asymmetric.padding import (
+ AsymmetricPadding,
+ MGF1,
+ OAEP,
+ PKCS1v15,
+ PSS,
+ calculate_max_pss_salt_length,
+)
+from cryptography.hazmat.primitives.asymmetric.rsa import (
+ RSAPrivateKeyWithSerialization,
+ RSAPublicKeyWithSerialization,
+)
+
+
+def _get_rsa_pss_salt_length(pss, key, hash_algorithm):
+ salt = pss._salt_length
+
+ if salt is MGF1.MAX_LENGTH or salt is PSS.MAX_LENGTH:
+ return calculate_max_pss_salt_length(key, hash_algorithm)
+ else:
+ return salt
+
+
+def _enc_dec_rsa(backend, key, data, padding):
+ if not isinstance(padding, AsymmetricPadding):
+ raise TypeError("Padding must be an instance of AsymmetricPadding.")
+
+ if isinstance(padding, PKCS1v15):
+ padding_enum = backend._lib.RSA_PKCS1_PADDING
+ elif isinstance(padding, OAEP):
+ padding_enum = backend._lib.RSA_PKCS1_OAEP_PADDING
+
+ if not isinstance(padding._mgf, MGF1):
+ raise UnsupportedAlgorithm(
+ "Only MGF1 is supported by this backend.",
+ _Reasons.UNSUPPORTED_MGF,
+ )
+
+ if not backend.rsa_padding_supported(padding):
+ raise UnsupportedAlgorithm(
+ "This combination of padding and hash algorithm is not "
+ "supported by this backend.",
+ _Reasons.UNSUPPORTED_PADDING,
+ )
+
+ else:
+ raise UnsupportedAlgorithm(
+ "{} is not supported by this backend.".format(padding.name),
+ _Reasons.UNSUPPORTED_PADDING,
+ )
+
+ return _enc_dec_rsa_pkey_ctx(backend, key, data, padding_enum, padding)
+
+
+def _enc_dec_rsa_pkey_ctx(backend, key, data, padding_enum, padding):
+ if isinstance(key, _RSAPublicKey):
+ init = backend._lib.EVP_PKEY_encrypt_init
+ crypt = backend._lib.EVP_PKEY_encrypt
+ else:
+ init = backend._lib.EVP_PKEY_decrypt_init
+ crypt = backend._lib.EVP_PKEY_decrypt
+
+ pkey_ctx = backend._lib.EVP_PKEY_CTX_new(key._evp_pkey, backend._ffi.NULL)
+ backend.openssl_assert(pkey_ctx != backend._ffi.NULL)
+ pkey_ctx = backend._ffi.gc(pkey_ctx, backend._lib.EVP_PKEY_CTX_free)
+ res = init(pkey_ctx)
+ backend.openssl_assert(res == 1)
+ res = backend._lib.EVP_PKEY_CTX_set_rsa_padding(pkey_ctx, padding_enum)
+ backend.openssl_assert(res > 0)
+ buf_size = backend._lib.EVP_PKEY_size(key._evp_pkey)
+ backend.openssl_assert(buf_size > 0)
+ if isinstance(padding, OAEP) and backend._lib.Cryptography_HAS_RSA_OAEP_MD:
+ mgf1_md = backend._evp_md_non_null_from_algorithm(
+ padding._mgf._algorithm
+ )
+ res = backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md(pkey_ctx, mgf1_md)
+ backend.openssl_assert(res > 0)
+ oaep_md = backend._evp_md_non_null_from_algorithm(padding._algorithm)
+ res = backend._lib.EVP_PKEY_CTX_set_rsa_oaep_md(pkey_ctx, oaep_md)
+ backend.openssl_assert(res > 0)
+
+ if (
+ isinstance(padding, OAEP)
+ and padding._label is not None
+ and len(padding._label) > 0
+ ):
+ # set0_rsa_oaep_label takes ownership of the char * so we need to
+ # copy it into some new memory
+ labelptr = backend._lib.OPENSSL_malloc(len(padding._label))
+ backend.openssl_assert(labelptr != backend._ffi.NULL)
+ backend._ffi.memmove(labelptr, padding._label, len(padding._label))
+ res = backend._lib.EVP_PKEY_CTX_set0_rsa_oaep_label(
+ pkey_ctx, labelptr, len(padding._label)
+ )
+ backend.openssl_assert(res == 1)
+
+ outlen = backend._ffi.new("size_t *", buf_size)
+ buf = backend._ffi.new("unsigned char[]", buf_size)
+ # Everything from this line onwards is written with the goal of being as
+ # constant-time as is practical given the constraints of Python and our
+ # API. See Bleichenbacher's '98 attack on RSA, and its many many variants.
+ # As such, you should not attempt to change this (particularly to "clean it
+ # up") without understanding why it was written this way (see
+ # Chesterton's Fence), and without measuring to verify you have not
+ # introduced observable time differences.
+ res = crypt(pkey_ctx, buf, outlen, data, len(data))
+ resbuf = backend._ffi.buffer(buf)[: outlen[0]]
+ backend._lib.ERR_clear_error()
+ if res <= 0:
+ raise ValueError("Encryption/decryption failed.")
+ return resbuf
+
+
+def _rsa_sig_determine_padding(backend, key, padding, algorithm):
+ if not isinstance(padding, AsymmetricPadding):
+ raise TypeError("Expected provider of AsymmetricPadding.")
+
+ pkey_size = backend._lib.EVP_PKEY_size(key._evp_pkey)
+ backend.openssl_assert(pkey_size > 0)
+
+ if isinstance(padding, PKCS1v15):
+ # Hash algorithm is ignored for PKCS1v15-padding, may be None.
+ padding_enum = backend._lib.RSA_PKCS1_PADDING
+ elif isinstance(padding, PSS):
+ if not isinstance(padding._mgf, MGF1):
+ raise UnsupportedAlgorithm(
+ "Only MGF1 is supported by this backend.",
+ _Reasons.UNSUPPORTED_MGF,
+ )
+
+ # PSS padding requires a hash algorithm
+ if not isinstance(algorithm, hashes.HashAlgorithm):
+ raise TypeError("Expected instance of hashes.HashAlgorithm.")
+
+ # Size of key in bytes - 2 is the maximum
+ # PSS signature length (salt length is checked later)
+ if pkey_size - algorithm.digest_size - 2 < 0:
+ raise ValueError(
+ "Digest too large for key size. Use a larger "
+ "key or different digest."
+ )
+
+ padding_enum = backend._lib.RSA_PKCS1_PSS_PADDING
+ else:
+ raise UnsupportedAlgorithm(
+ "{} is not supported by this backend.".format(padding.name),
+ _Reasons.UNSUPPORTED_PADDING,
+ )
+
+ return padding_enum
+
+
+# Hash algorithm can be absent (None) to initialize the context without setting
+# any message digest algorithm. This is currently only valid for the PKCS1v15
+# padding type, where it means that the signature data is encoded/decoded
+# as provided, without being wrapped in a DigestInfo structure.
+def _rsa_sig_setup(backend, padding, algorithm, key, init_func):
+ padding_enum = _rsa_sig_determine_padding(backend, key, padding, algorithm)
+ pkey_ctx = backend._lib.EVP_PKEY_CTX_new(key._evp_pkey, backend._ffi.NULL)
+ backend.openssl_assert(pkey_ctx != backend._ffi.NULL)
+ pkey_ctx = backend._ffi.gc(pkey_ctx, backend._lib.EVP_PKEY_CTX_free)
+ res = init_func(pkey_ctx)
+ backend.openssl_assert(res == 1)
+ if algorithm is not None:
+ evp_md = backend._evp_md_non_null_from_algorithm(algorithm)
+ res = backend._lib.EVP_PKEY_CTX_set_signature_md(pkey_ctx, evp_md)
+ if res == 0:
+ backend._consume_errors()
+ raise UnsupportedAlgorithm(
+ "{} is not supported by this backend for RSA signing.".format(
+ algorithm.name
+ ),
+ _Reasons.UNSUPPORTED_HASH,
+ )
+ res = backend._lib.EVP_PKEY_CTX_set_rsa_padding(pkey_ctx, padding_enum)
+ if res <= 0:
+ backend._consume_errors()
+ raise UnsupportedAlgorithm(
+ "{} is not supported for the RSA signature operation.".format(
+ padding.name
+ ),
+ _Reasons.UNSUPPORTED_PADDING,
+ )
+ if isinstance(padding, PSS):
+ res = backend._lib.EVP_PKEY_CTX_set_rsa_pss_saltlen(
+ pkey_ctx, _get_rsa_pss_salt_length(padding, key, algorithm)
+ )
+ backend.openssl_assert(res > 0)
+
+ mgf1_md = backend._evp_md_non_null_from_algorithm(
+ padding._mgf._algorithm
+ )
+ res = backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md(pkey_ctx, mgf1_md)
+ backend.openssl_assert(res > 0)
+
+ return pkey_ctx
+
+
+def _rsa_sig_sign(backend, padding, algorithm, private_key, data):
+ pkey_ctx = _rsa_sig_setup(
+ backend,
+ padding,
+ algorithm,
+ private_key,
+ backend._lib.EVP_PKEY_sign_init,
+ )
+ buflen = backend._ffi.new("size_t *")
+ res = backend._lib.EVP_PKEY_sign(
+ pkey_ctx, backend._ffi.NULL, buflen, data, len(data)
+ )
+ backend.openssl_assert(res == 1)
+ buf = backend._ffi.new("unsigned char[]", buflen[0])
+ res = backend._lib.EVP_PKEY_sign(pkey_ctx, buf, buflen, data, len(data))
+ if res != 1:
+ errors = backend._consume_errors_with_text()
+ raise ValueError(
+ "Digest or salt length too long for key size. Use a larger key "
+ "or shorter salt length if you are specifying a PSS salt",
+ errors,
+ )
+
+ return backend._ffi.buffer(buf)[:]
+
+
+def _rsa_sig_verify(backend, padding, algorithm, public_key, signature, data):
+ pkey_ctx = _rsa_sig_setup(
+ backend,
+ padding,
+ algorithm,
+ public_key,
+ backend._lib.EVP_PKEY_verify_init,
+ )
+ res = backend._lib.EVP_PKEY_verify(
+ pkey_ctx, signature, len(signature), data, len(data)
+ )
+ # The previous call can return negative numbers in the event of an
+ # error. This is not a signature failure but we need to fail if it
+ # occurs.
+ backend.openssl_assert(res >= 0)
+ if res == 0:
+ backend._consume_errors()
+ raise InvalidSignature
+
+
+def _rsa_sig_recover(backend, padding, algorithm, public_key, signature):
+ pkey_ctx = _rsa_sig_setup(
+ backend,
+ padding,
+ algorithm,
+ public_key,
+ backend._lib.EVP_PKEY_verify_recover_init,
+ )
+
+ # Attempt to keep the rest of the code in this function as constant/time
+ # as possible. See the comment in _enc_dec_rsa_pkey_ctx. Note that the
+ # outlen parameter is used even though its value may be undefined in the
+ # error case. Due to the tolerant nature of Python slicing this does not
+ # trigger any exceptions.
+ maxlen = backend._lib.EVP_PKEY_size(public_key._evp_pkey)
+ backend.openssl_assert(maxlen > 0)
+ buf = backend._ffi.new("unsigned char[]", maxlen)
+ buflen = backend._ffi.new("size_t *", maxlen)
+ res = backend._lib.EVP_PKEY_verify_recover(
+ pkey_ctx, buf, buflen, signature, len(signature)
+ )
+ resbuf = backend._ffi.buffer(buf)[: buflen[0]]
+ backend._lib.ERR_clear_error()
+ # Assume that all parameter errors are handled during the setup phase and
+ # any error here is due to invalid signature.
+ if res != 1:
+ raise InvalidSignature
+ return resbuf
+
+
+@utils.register_interface(AsymmetricSignatureContext)
+class _RSASignatureContext(object):
+ def __init__(self, backend, private_key, padding, algorithm):
+ self._backend = backend
+ self._private_key = private_key
+
+ # We now call _rsa_sig_determine_padding in _rsa_sig_setup. However
+ # we need to make a pointless call to it here so we maintain the
+ # API of erroring on init with this context if the values are invalid.
+ _rsa_sig_determine_padding(backend, private_key, padding, algorithm)
+ self._padding = padding
+ self._algorithm = algorithm
+ self._hash_ctx = hashes.Hash(self._algorithm, self._backend)
+
+ def update(self, data):
+ self._hash_ctx.update(data)
+
+ def finalize(self):
+ return _rsa_sig_sign(
+ self._backend,
+ self._padding,
+ self._algorithm,
+ self._private_key,
+ self._hash_ctx.finalize(),
+ )
+
+
+@utils.register_interface(AsymmetricVerificationContext)
+class _RSAVerificationContext(object):
+ def __init__(self, backend, public_key, signature, padding, algorithm):
+ self._backend = backend
+ self._public_key = public_key
+ self._signature = signature
+ self._padding = padding
+ # We now call _rsa_sig_determine_padding in _rsa_sig_setup. However
+ # we need to make a pointless call to it here so we maintain the
+ # API of erroring on init with this context if the values are invalid.
+ _rsa_sig_determine_padding(backend, public_key, padding, algorithm)
+
+ padding = padding
+ self._algorithm = algorithm
+ self._hash_ctx = hashes.Hash(self._algorithm, self._backend)
+
+ def update(self, data):
+ self._hash_ctx.update(data)
+
+ def verify(self):
+ return _rsa_sig_verify(
+ self._backend,
+ self._padding,
+ self._algorithm,
+ self._public_key,
+ self._signature,
+ self._hash_ctx.finalize(),
+ )
+
+
+@utils.register_interface(RSAPrivateKeyWithSerialization)
+class _RSAPrivateKey(object):
+ def __init__(self, backend, rsa_cdata, evp_pkey):
+ res = backend._lib.RSA_check_key(rsa_cdata)
+ if res != 1:
+ errors = backend._consume_errors_with_text()
+ raise ValueError("Invalid private key", errors)
+
+ # Blinding is on by default in many versions of OpenSSL, but let's
+ # just be conservative here.
+ res = backend._lib.RSA_blinding_on(rsa_cdata, backend._ffi.NULL)
+ backend.openssl_assert(res == 1)
+
+ self._backend = backend
+ self._rsa_cdata = rsa_cdata
+ self._evp_pkey = evp_pkey
+
+ n = self._backend._ffi.new("BIGNUM **")
+ self._backend._lib.RSA_get0_key(
+ self._rsa_cdata,
+ n,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ )
+ self._backend.openssl_assert(n[0] != self._backend._ffi.NULL)
+ self._key_size = self._backend._lib.BN_num_bits(n[0])
+
+ key_size = utils.read_only_property("_key_size")
+
+ def signer(self, padding, algorithm):
+ _warn_sign_verify_deprecated()
+ _check_not_prehashed(algorithm)
+ return _RSASignatureContext(self._backend, self, padding, algorithm)
+
+ def decrypt(self, ciphertext, padding):
+ key_size_bytes = (self.key_size + 7) // 8
+ if key_size_bytes != len(ciphertext):
+ raise ValueError("Ciphertext length must be equal to key size.")
+
+ return _enc_dec_rsa(self._backend, self, ciphertext, padding)
+
+ def public_key(self):
+ ctx = self._backend._lib.RSAPublicKey_dup(self._rsa_cdata)
+ self._backend.openssl_assert(ctx != self._backend._ffi.NULL)
+ ctx = self._backend._ffi.gc(ctx, self._backend._lib.RSA_free)
+ evp_pkey = self._backend._rsa_cdata_to_evp_pkey(ctx)
+ return _RSAPublicKey(self._backend, ctx, evp_pkey)
+
+ def private_numbers(self):
+ n = self._backend._ffi.new("BIGNUM **")
+ e = self._backend._ffi.new("BIGNUM **")
+ d = self._backend._ffi.new("BIGNUM **")
+ p = self._backend._ffi.new("BIGNUM **")
+ q = self._backend._ffi.new("BIGNUM **")
+ dmp1 = self._backend._ffi.new("BIGNUM **")
+ dmq1 = self._backend._ffi.new("BIGNUM **")
+ iqmp = self._backend._ffi.new("BIGNUM **")
+ self._backend._lib.RSA_get0_key(self._rsa_cdata, n, e, d)
+ self._backend.openssl_assert(n[0] != self._backend._ffi.NULL)
+ self._backend.openssl_assert(e[0] != self._backend._ffi.NULL)
+ self._backend.openssl_assert(d[0] != self._backend._ffi.NULL)
+ self._backend._lib.RSA_get0_factors(self._rsa_cdata, p, q)
+ self._backend.openssl_assert(p[0] != self._backend._ffi.NULL)
+ self._backend.openssl_assert(q[0] != self._backend._ffi.NULL)
+ self._backend._lib.RSA_get0_crt_params(
+ self._rsa_cdata, dmp1, dmq1, iqmp
+ )
+ self._backend.openssl_assert(dmp1[0] != self._backend._ffi.NULL)
+ self._backend.openssl_assert(dmq1[0] != self._backend._ffi.NULL)
+ self._backend.openssl_assert(iqmp[0] != self._backend._ffi.NULL)
+ return rsa.RSAPrivateNumbers(
+ p=self._backend._bn_to_int(p[0]),
+ q=self._backend._bn_to_int(q[0]),
+ d=self._backend._bn_to_int(d[0]),
+ dmp1=self._backend._bn_to_int(dmp1[0]),
+ dmq1=self._backend._bn_to_int(dmq1[0]),
+ iqmp=self._backend._bn_to_int(iqmp[0]),
+ public_numbers=rsa.RSAPublicNumbers(
+ e=self._backend._bn_to_int(e[0]),
+ n=self._backend._bn_to_int(n[0]),
+ ),
+ )
+
+ def private_bytes(self, encoding, format, encryption_algorithm):
+ return self._backend._private_key_bytes(
+ encoding,
+ format,
+ encryption_algorithm,
+ self,
+ self._evp_pkey,
+ self._rsa_cdata,
+ )
+
+ def sign(self, data, padding, algorithm):
+ data, algorithm = _calculate_digest_and_algorithm(
+ self._backend, data, algorithm
+ )
+ return _rsa_sig_sign(self._backend, padding, algorithm, self, data)
+
+
+@utils.register_interface(RSAPublicKeyWithSerialization)
+class _RSAPublicKey(object):
+ def __init__(self, backend, rsa_cdata, evp_pkey):
+ self._backend = backend
+ self._rsa_cdata = rsa_cdata
+ self._evp_pkey = evp_pkey
+
+ n = self._backend._ffi.new("BIGNUM **")
+ self._backend._lib.RSA_get0_key(
+ self._rsa_cdata,
+ n,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ )
+ self._backend.openssl_assert(n[0] != self._backend._ffi.NULL)
+ self._key_size = self._backend._lib.BN_num_bits(n[0])
+
+ key_size = utils.read_only_property("_key_size")
+
+ def verifier(self, signature, padding, algorithm):
+ _warn_sign_verify_deprecated()
+ utils._check_bytes("signature", signature)
+
+ _check_not_prehashed(algorithm)
+ return _RSAVerificationContext(
+ self._backend, self, signature, padding, algorithm
+ )
+
+ def encrypt(self, plaintext, padding):
+ return _enc_dec_rsa(self._backend, self, plaintext, padding)
+
+ def public_numbers(self):
+ n = self._backend._ffi.new("BIGNUM **")
+ e = self._backend._ffi.new("BIGNUM **")
+ self._backend._lib.RSA_get0_key(
+ self._rsa_cdata, n, e, self._backend._ffi.NULL
+ )
+ self._backend.openssl_assert(n[0] != self._backend._ffi.NULL)
+ self._backend.openssl_assert(e[0] != self._backend._ffi.NULL)
+ return rsa.RSAPublicNumbers(
+ e=self._backend._bn_to_int(e[0]),
+ n=self._backend._bn_to_int(n[0]),
+ )
+
+ def public_bytes(self, encoding, format):
+ return self._backend._public_key_bytes(
+ encoding, format, self, self._evp_pkey, self._rsa_cdata
+ )
+
+ def verify(self, signature, data, padding, algorithm):
+ data, algorithm = _calculate_digest_and_algorithm(
+ self._backend, data, algorithm
+ )
+ return _rsa_sig_verify(
+ self._backend, padding, algorithm, self, signature, data
+ )
+
+ def recover_data_from_signature(self, signature, padding, algorithm):
+ _check_not_prehashed(algorithm)
+ return _rsa_sig_recover(
+ self._backend, padding, algorithm, self, signature
+ )
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/utils.py b/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/utils.py
new file mode 100644
index 0000000000..3d697d1fb5
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/utils.py
@@ -0,0 +1,66 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import warnings
+
+from cryptography import utils
+from cryptography.hazmat.primitives import hashes
+from cryptography.hazmat.primitives.asymmetric.utils import Prehashed
+
+
+def _evp_pkey_derive(backend, evp_pkey, peer_public_key):
+ ctx = backend._lib.EVP_PKEY_CTX_new(evp_pkey, backend._ffi.NULL)
+ backend.openssl_assert(ctx != backend._ffi.NULL)
+ ctx = backend._ffi.gc(ctx, backend._lib.EVP_PKEY_CTX_free)
+ res = backend._lib.EVP_PKEY_derive_init(ctx)
+ backend.openssl_assert(res == 1)
+ res = backend._lib.EVP_PKEY_derive_set_peer(ctx, peer_public_key._evp_pkey)
+ backend.openssl_assert(res == 1)
+ keylen = backend._ffi.new("size_t *")
+ res = backend._lib.EVP_PKEY_derive(ctx, backend._ffi.NULL, keylen)
+ backend.openssl_assert(res == 1)
+ backend.openssl_assert(keylen[0] > 0)
+ buf = backend._ffi.new("unsigned char[]", keylen[0])
+ res = backend._lib.EVP_PKEY_derive(ctx, buf, keylen)
+ if res != 1:
+ raise ValueError("Null shared key derived from public/private pair.")
+
+ return backend._ffi.buffer(buf, keylen[0])[:]
+
+
+def _calculate_digest_and_algorithm(backend, data, algorithm):
+ if not isinstance(algorithm, Prehashed):
+ hash_ctx = hashes.Hash(algorithm, backend)
+ hash_ctx.update(data)
+ data = hash_ctx.finalize()
+ else:
+ algorithm = algorithm._algorithm
+
+ if len(data) != algorithm.digest_size:
+ raise ValueError(
+ "The provided data must be the same length as the hash "
+ "algorithm's digest size."
+ )
+
+ return (data, algorithm)
+
+
+def _check_not_prehashed(signature_algorithm):
+ if isinstance(signature_algorithm, Prehashed):
+ raise TypeError(
+ "Prehashed is only supported in the sign and verify methods. "
+ "It cannot be used with signer, verifier or "
+ "recover_data_from_signature."
+ )
+
+
+def _warn_sign_verify_deprecated():
+ warnings.warn(
+ "signer and verifier have been deprecated. Please use sign "
+ "and verify instead.",
+ utils.PersistentlyDeprecated2017,
+ stacklevel=3,
+ )
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/x25519.py b/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/x25519.py
new file mode 100644
index 0000000000..4971c54814
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/x25519.py
@@ -0,0 +1,123 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+from cryptography import utils
+from cryptography.hazmat.backends.openssl.utils import _evp_pkey_derive
+from cryptography.hazmat.primitives import serialization
+from cryptography.hazmat.primitives.asymmetric.x25519 import (
+ X25519PrivateKey,
+ X25519PublicKey,
+)
+
+
+_X25519_KEY_SIZE = 32
+
+
+@utils.register_interface(X25519PublicKey)
+class _X25519PublicKey(object):
+ def __init__(self, backend, evp_pkey):
+ self._backend = backend
+ self._evp_pkey = evp_pkey
+
+ def public_bytes(self, encoding, format):
+ if (
+ encoding is serialization.Encoding.Raw
+ or format is serialization.PublicFormat.Raw
+ ):
+ if (
+ encoding is not serialization.Encoding.Raw
+ or format is not serialization.PublicFormat.Raw
+ ):
+ raise ValueError(
+ "When using Raw both encoding and format must be Raw"
+ )
+
+ return self._raw_public_bytes()
+
+ return self._backend._public_key_bytes(
+ encoding, format, self, self._evp_pkey, None
+ )
+
+ def _raw_public_bytes(self):
+ ucharpp = self._backend._ffi.new("unsigned char **")
+ res = self._backend._lib.EVP_PKEY_get1_tls_encodedpoint(
+ self._evp_pkey, ucharpp
+ )
+ self._backend.openssl_assert(res == 32)
+ self._backend.openssl_assert(ucharpp[0] != self._backend._ffi.NULL)
+ data = self._backend._ffi.gc(
+ ucharpp[0], self._backend._lib.OPENSSL_free
+ )
+ return self._backend._ffi.buffer(data, res)[:]
+
+
+@utils.register_interface(X25519PrivateKey)
+class _X25519PrivateKey(object):
+ def __init__(self, backend, evp_pkey):
+ self._backend = backend
+ self._evp_pkey = evp_pkey
+
+ def public_key(self):
+ bio = self._backend._create_mem_bio_gc()
+ res = self._backend._lib.i2d_PUBKEY_bio(bio, self._evp_pkey)
+ self._backend.openssl_assert(res == 1)
+ evp_pkey = self._backend._lib.d2i_PUBKEY_bio(
+ bio, self._backend._ffi.NULL
+ )
+ self._backend.openssl_assert(evp_pkey != self._backend._ffi.NULL)
+ evp_pkey = self._backend._ffi.gc(
+ evp_pkey, self._backend._lib.EVP_PKEY_free
+ )
+ return _X25519PublicKey(self._backend, evp_pkey)
+
+ def exchange(self, peer_public_key):
+ if not isinstance(peer_public_key, X25519PublicKey):
+ raise TypeError("peer_public_key must be X25519PublicKey.")
+
+ return _evp_pkey_derive(self._backend, self._evp_pkey, peer_public_key)
+
+ def private_bytes(self, encoding, format, encryption_algorithm):
+ if (
+ encoding is serialization.Encoding.Raw
+ or format is serialization.PublicFormat.Raw
+ ):
+ if (
+ format is not serialization.PrivateFormat.Raw
+ or encoding is not serialization.Encoding.Raw
+ or not isinstance(
+ encryption_algorithm, serialization.NoEncryption
+ )
+ ):
+ raise ValueError(
+ "When using Raw both encoding and format must be Raw "
+ "and encryption_algorithm must be NoEncryption()"
+ )
+
+ return self._raw_private_bytes()
+
+ return self._backend._private_key_bytes(
+ encoding, format, encryption_algorithm, self, self._evp_pkey, None
+ )
+
+ def _raw_private_bytes(self):
+ # When we drop support for CRYPTOGRAPHY_OPENSSL_LESS_THAN_111 we can
+ # switch this to EVP_PKEY_new_raw_private_key
+ # The trick we use here is serializing to a PKCS8 key and just
+ # using the last 32 bytes, which is the key itself.
+ bio = self._backend._create_mem_bio_gc()
+ res = self._backend._lib.i2d_PKCS8PrivateKey_bio(
+ bio,
+ self._evp_pkey,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ 0,
+ self._backend._ffi.NULL,
+ self._backend._ffi.NULL,
+ )
+ self._backend.openssl_assert(res == 1)
+ pkcs8 = self._backend._read_mem_bio(bio)
+ self._backend.openssl_assert(len(pkcs8) == 48)
+ return pkcs8[-_X25519_KEY_SIZE:]
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/x448.py b/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/x448.py
new file mode 100644
index 0000000000..7ebcdf84bc
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/x448.py
@@ -0,0 +1,107 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+from cryptography import utils
+from cryptography.hazmat.backends.openssl.utils import _evp_pkey_derive
+from cryptography.hazmat.primitives import serialization
+from cryptography.hazmat.primitives.asymmetric.x448 import (
+ X448PrivateKey,
+ X448PublicKey,
+)
+
+_X448_KEY_SIZE = 56
+
+
+@utils.register_interface(X448PublicKey)
+class _X448PublicKey(object):
+ def __init__(self, backend, evp_pkey):
+ self._backend = backend
+ self._evp_pkey = evp_pkey
+
+ def public_bytes(self, encoding, format):
+ if (
+ encoding is serialization.Encoding.Raw
+ or format is serialization.PublicFormat.Raw
+ ):
+ if (
+ encoding is not serialization.Encoding.Raw
+ or format is not serialization.PublicFormat.Raw
+ ):
+ raise ValueError(
+ "When using Raw both encoding and format must be Raw"
+ )
+
+ return self._raw_public_bytes()
+
+ return self._backend._public_key_bytes(
+ encoding, format, self, self._evp_pkey, None
+ )
+
+ def _raw_public_bytes(self):
+ buf = self._backend._ffi.new("unsigned char []", _X448_KEY_SIZE)
+ buflen = self._backend._ffi.new("size_t *", _X448_KEY_SIZE)
+ res = self._backend._lib.EVP_PKEY_get_raw_public_key(
+ self._evp_pkey, buf, buflen
+ )
+ self._backend.openssl_assert(res == 1)
+ self._backend.openssl_assert(buflen[0] == _X448_KEY_SIZE)
+ return self._backend._ffi.buffer(buf, _X448_KEY_SIZE)[:]
+
+
+@utils.register_interface(X448PrivateKey)
+class _X448PrivateKey(object):
+ def __init__(self, backend, evp_pkey):
+ self._backend = backend
+ self._evp_pkey = evp_pkey
+
+ def public_key(self):
+ buf = self._backend._ffi.new("unsigned char []", _X448_KEY_SIZE)
+ buflen = self._backend._ffi.new("size_t *", _X448_KEY_SIZE)
+ res = self._backend._lib.EVP_PKEY_get_raw_public_key(
+ self._evp_pkey, buf, buflen
+ )
+ self._backend.openssl_assert(res == 1)
+ self._backend.openssl_assert(buflen[0] == _X448_KEY_SIZE)
+ return self._backend.x448_load_public_bytes(buf)
+
+ def exchange(self, peer_public_key):
+ if not isinstance(peer_public_key, X448PublicKey):
+ raise TypeError("peer_public_key must be X448PublicKey.")
+
+ return _evp_pkey_derive(self._backend, self._evp_pkey, peer_public_key)
+
+ def private_bytes(self, encoding, format, encryption_algorithm):
+ if (
+ encoding is serialization.Encoding.Raw
+ or format is serialization.PublicFormat.Raw
+ ):
+ if (
+ format is not serialization.PrivateFormat.Raw
+ or encoding is not serialization.Encoding.Raw
+ or not isinstance(
+ encryption_algorithm, serialization.NoEncryption
+ )
+ ):
+ raise ValueError(
+ "When using Raw both encoding and format must be Raw "
+ "and encryption_algorithm must be NoEncryption()"
+ )
+
+ return self._raw_private_bytes()
+
+ return self._backend._private_key_bytes(
+ encoding, format, encryption_algorithm, self, self._evp_pkey, None
+ )
+
+ def _raw_private_bytes(self):
+ buf = self._backend._ffi.new("unsigned char []", _X448_KEY_SIZE)
+ buflen = self._backend._ffi.new("size_t *", _X448_KEY_SIZE)
+ res = self._backend._lib.EVP_PKEY_get_raw_private_key(
+ self._evp_pkey, buf, buflen
+ )
+ self._backend.openssl_assert(res == 1)
+ self._backend.openssl_assert(buflen[0] == _X448_KEY_SIZE)
+ return self._backend._ffi.buffer(buf, _X448_KEY_SIZE)[:]
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/x509.py b/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/x509.py
new file mode 100644
index 0000000000..4d0dac7649
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/backends/openssl/x509.py
@@ -0,0 +1,587 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import datetime
+import operator
+
+from cryptography import utils, x509
+from cryptography.exceptions import UnsupportedAlgorithm
+from cryptography.hazmat.backends.openssl.decode_asn1 import (
+ _asn1_integer_to_int,
+ _asn1_string_to_bytes,
+ _decode_x509_name,
+ _obj2txt,
+ _parse_asn1_time,
+)
+from cryptography.hazmat.backends.openssl.encode_asn1 import (
+ _encode_asn1_int_gc,
+ _txt2obj_gc,
+)
+from cryptography.hazmat.primitives import hashes, serialization
+from cryptography.hazmat.primitives.asymmetric import dsa, ec, rsa
+from cryptography.x509.name import _ASN1Type
+
+
+@utils.register_interface(x509.Certificate)
+class _Certificate(object):
+ def __init__(self, backend, x509_cert):
+ self._backend = backend
+ self._x509 = x509_cert
+
+ version = self._backend._lib.X509_get_version(self._x509)
+ if version == 0:
+ self._version = x509.Version.v1
+ elif version == 2:
+ self._version = x509.Version.v3
+ else:
+ raise x509.InvalidVersion(
+ "{} is not a valid X509 version".format(version), version
+ )
+
+ def __repr__(self):
+ return "<Certificate(subject={}, ...)>".format(self.subject)
+
+ def __eq__(self, other):
+ if not isinstance(other, x509.Certificate):
+ return NotImplemented
+
+ res = self._backend._lib.X509_cmp(self._x509, other._x509)
+ return res == 0
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash(self.public_bytes(serialization.Encoding.DER))
+
+ def __deepcopy__(self, memo):
+ return self
+
+ def fingerprint(self, algorithm):
+ h = hashes.Hash(algorithm, self._backend)
+ h.update(self.public_bytes(serialization.Encoding.DER))
+ return h.finalize()
+
+ version = utils.read_only_property("_version")
+
+ @property
+ def serial_number(self):
+ asn1_int = self._backend._lib.X509_get_serialNumber(self._x509)
+ self._backend.openssl_assert(asn1_int != self._backend._ffi.NULL)
+ return _asn1_integer_to_int(self._backend, asn1_int)
+
+ def public_key(self):
+ pkey = self._backend._lib.X509_get_pubkey(self._x509)
+ if pkey == self._backend._ffi.NULL:
+ # Remove errors from the stack.
+ self._backend._consume_errors()
+ raise ValueError("Certificate public key is of an unknown type")
+
+ pkey = self._backend._ffi.gc(pkey, self._backend._lib.EVP_PKEY_free)
+
+ return self._backend._evp_pkey_to_public_key(pkey)
+
+ @property
+ def not_valid_before(self):
+ asn1_time = self._backend._lib.X509_getm_notBefore(self._x509)
+ return _parse_asn1_time(self._backend, asn1_time)
+
+ @property
+ def not_valid_after(self):
+ asn1_time = self._backend._lib.X509_getm_notAfter(self._x509)
+ return _parse_asn1_time(self._backend, asn1_time)
+
+ @property
+ def issuer(self):
+ issuer = self._backend._lib.X509_get_issuer_name(self._x509)
+ self._backend.openssl_assert(issuer != self._backend._ffi.NULL)
+ return _decode_x509_name(self._backend, issuer)
+
+ @property
+ def subject(self):
+ subject = self._backend._lib.X509_get_subject_name(self._x509)
+ self._backend.openssl_assert(subject != self._backend._ffi.NULL)
+ return _decode_x509_name(self._backend, subject)
+
+ @property
+ def signature_hash_algorithm(self):
+ oid = self.signature_algorithm_oid
+ try:
+ return x509._SIG_OIDS_TO_HASH[oid]
+ except KeyError:
+ raise UnsupportedAlgorithm(
+ "Signature algorithm OID:{} not recognized".format(oid)
+ )
+
+ @property
+ def signature_algorithm_oid(self):
+ alg = self._backend._ffi.new("X509_ALGOR **")
+ self._backend._lib.X509_get0_signature(
+ self._backend._ffi.NULL, alg, self._x509
+ )
+ self._backend.openssl_assert(alg[0] != self._backend._ffi.NULL)
+ oid = _obj2txt(self._backend, alg[0].algorithm)
+ return x509.ObjectIdentifier(oid)
+
+ @utils.cached_property
+ def extensions(self):
+ return self._backend._certificate_extension_parser.parse(self._x509)
+
+ @property
+ def signature(self):
+ sig = self._backend._ffi.new("ASN1_BIT_STRING **")
+ self._backend._lib.X509_get0_signature(
+ sig, self._backend._ffi.NULL, self._x509
+ )
+ self._backend.openssl_assert(sig[0] != self._backend._ffi.NULL)
+ return _asn1_string_to_bytes(self._backend, sig[0])
+
+ @property
+ def tbs_certificate_bytes(self):
+ pp = self._backend._ffi.new("unsigned char **")
+ res = self._backend._lib.i2d_re_X509_tbs(self._x509, pp)
+ self._backend.openssl_assert(res > 0)
+ pp = self._backend._ffi.gc(
+ pp, lambda pointer: self._backend._lib.OPENSSL_free(pointer[0])
+ )
+ return self._backend._ffi.buffer(pp[0], res)[:]
+
+ def public_bytes(self, encoding):
+ bio = self._backend._create_mem_bio_gc()
+ if encoding is serialization.Encoding.PEM:
+ res = self._backend._lib.PEM_write_bio_X509(bio, self._x509)
+ elif encoding is serialization.Encoding.DER:
+ res = self._backend._lib.i2d_X509_bio(bio, self._x509)
+ else:
+ raise TypeError("encoding must be an item from the Encoding enum")
+
+ self._backend.openssl_assert(res == 1)
+ return self._backend._read_mem_bio(bio)
+
+
+@utils.register_interface(x509.RevokedCertificate)
+class _RevokedCertificate(object):
+ def __init__(self, backend, crl, x509_revoked):
+ self._backend = backend
+ # The X509_REVOKED_value is a X509_REVOKED * that has
+ # no reference counting. This means when X509_CRL_free is
+ # called then the CRL and all X509_REVOKED * are freed. Since
+ # you can retain a reference to a single revoked certificate
+ # and let the CRL fall out of scope we need to retain a
+ # private reference to the CRL inside the RevokedCertificate
+ # object to prevent the gc from being called inappropriately.
+ self._crl = crl
+ self._x509_revoked = x509_revoked
+
+ @property
+ def serial_number(self):
+ asn1_int = self._backend._lib.X509_REVOKED_get0_serialNumber(
+ self._x509_revoked
+ )
+ self._backend.openssl_assert(asn1_int != self._backend._ffi.NULL)
+ return _asn1_integer_to_int(self._backend, asn1_int)
+
+ @property
+ def revocation_date(self):
+ return _parse_asn1_time(
+ self._backend,
+ self._backend._lib.X509_REVOKED_get0_revocationDate(
+ self._x509_revoked
+ ),
+ )
+
+ @utils.cached_property
+ def extensions(self):
+ return self._backend._revoked_cert_extension_parser.parse(
+ self._x509_revoked
+ )
+
+
+@utils.register_interface(x509.CertificateRevocationList)
+class _CertificateRevocationList(object):
+ def __init__(self, backend, x509_crl):
+ self._backend = backend
+ self._x509_crl = x509_crl
+
+ def __eq__(self, other):
+ if not isinstance(other, x509.CertificateRevocationList):
+ return NotImplemented
+
+ res = self._backend._lib.X509_CRL_cmp(self._x509_crl, other._x509_crl)
+ return res == 0
+
+ def __ne__(self, other):
+ return not self == other
+
+ def fingerprint(self, algorithm):
+ h = hashes.Hash(algorithm, self._backend)
+ bio = self._backend._create_mem_bio_gc()
+ res = self._backend._lib.i2d_X509_CRL_bio(bio, self._x509_crl)
+ self._backend.openssl_assert(res == 1)
+ der = self._backend._read_mem_bio(bio)
+ h.update(der)
+ return h.finalize()
+
+ @utils.cached_property
+ def _sorted_crl(self):
+ # X509_CRL_get0_by_serial sorts in place, which breaks a variety of
+ # things we don't want to break (like iteration and the signature).
+ # Let's dupe it and sort that instead.
+ dup = self._backend._lib.X509_CRL_dup(self._x509_crl)
+ self._backend.openssl_assert(dup != self._backend._ffi.NULL)
+ dup = self._backend._ffi.gc(dup, self._backend._lib.X509_CRL_free)
+ return dup
+
+ def get_revoked_certificate_by_serial_number(self, serial_number):
+ revoked = self._backend._ffi.new("X509_REVOKED **")
+ asn1_int = _encode_asn1_int_gc(self._backend, serial_number)
+ res = self._backend._lib.X509_CRL_get0_by_serial(
+ self._sorted_crl, revoked, asn1_int
+ )
+ if res == 0:
+ return None
+ else:
+ self._backend.openssl_assert(revoked[0] != self._backend._ffi.NULL)
+ return _RevokedCertificate(
+ self._backend, self._sorted_crl, revoked[0]
+ )
+
+ @property
+ def signature_hash_algorithm(self):
+ oid = self.signature_algorithm_oid
+ try:
+ return x509._SIG_OIDS_TO_HASH[oid]
+ except KeyError:
+ raise UnsupportedAlgorithm(
+ "Signature algorithm OID:{} not recognized".format(oid)
+ )
+
+ @property
+ def signature_algorithm_oid(self):
+ alg = self._backend._ffi.new("X509_ALGOR **")
+ self._backend._lib.X509_CRL_get0_signature(
+ self._x509_crl, self._backend._ffi.NULL, alg
+ )
+ self._backend.openssl_assert(alg[0] != self._backend._ffi.NULL)
+ oid = _obj2txt(self._backend, alg[0].algorithm)
+ return x509.ObjectIdentifier(oid)
+
+ @property
+ def issuer(self):
+ issuer = self._backend._lib.X509_CRL_get_issuer(self._x509_crl)
+ self._backend.openssl_assert(issuer != self._backend._ffi.NULL)
+ return _decode_x509_name(self._backend, issuer)
+
+ @property
+ def next_update(self):
+ nu = self._backend._lib.X509_CRL_get_nextUpdate(self._x509_crl)
+ self._backend.openssl_assert(nu != self._backend._ffi.NULL)
+ return _parse_asn1_time(self._backend, nu)
+
+ @property
+ def last_update(self):
+ lu = self._backend._lib.X509_CRL_get_lastUpdate(self._x509_crl)
+ self._backend.openssl_assert(lu != self._backend._ffi.NULL)
+ return _parse_asn1_time(self._backend, lu)
+
+ @property
+ def signature(self):
+ sig = self._backend._ffi.new("ASN1_BIT_STRING **")
+ self._backend._lib.X509_CRL_get0_signature(
+ self._x509_crl, sig, self._backend._ffi.NULL
+ )
+ self._backend.openssl_assert(sig[0] != self._backend._ffi.NULL)
+ return _asn1_string_to_bytes(self._backend, sig[0])
+
+ @property
+ def tbs_certlist_bytes(self):
+ pp = self._backend._ffi.new("unsigned char **")
+ res = self._backend._lib.i2d_re_X509_CRL_tbs(self._x509_crl, pp)
+ self._backend.openssl_assert(res > 0)
+ pp = self._backend._ffi.gc(
+ pp, lambda pointer: self._backend._lib.OPENSSL_free(pointer[0])
+ )
+ return self._backend._ffi.buffer(pp[0], res)[:]
+
+ def public_bytes(self, encoding):
+ bio = self._backend._create_mem_bio_gc()
+ if encoding is serialization.Encoding.PEM:
+ res = self._backend._lib.PEM_write_bio_X509_CRL(
+ bio, self._x509_crl
+ )
+ elif encoding is serialization.Encoding.DER:
+ res = self._backend._lib.i2d_X509_CRL_bio(bio, self._x509_crl)
+ else:
+ raise TypeError("encoding must be an item from the Encoding enum")
+
+ self._backend.openssl_assert(res == 1)
+ return self._backend._read_mem_bio(bio)
+
+ def _revoked_cert(self, idx):
+ revoked = self._backend._lib.X509_CRL_get_REVOKED(self._x509_crl)
+ r = self._backend._lib.sk_X509_REVOKED_value(revoked, idx)
+ self._backend.openssl_assert(r != self._backend._ffi.NULL)
+ return _RevokedCertificate(self._backend, self, r)
+
+ def __iter__(self):
+ for i in range(len(self)):
+ yield self._revoked_cert(i)
+
+ def __getitem__(self, idx):
+ if isinstance(idx, slice):
+ start, stop, step = idx.indices(len(self))
+ return [self._revoked_cert(i) for i in range(start, stop, step)]
+ else:
+ idx = operator.index(idx)
+ if idx < 0:
+ idx += len(self)
+ if not 0 <= idx < len(self):
+ raise IndexError
+ return self._revoked_cert(idx)
+
+ def __len__(self):
+ revoked = self._backend._lib.X509_CRL_get_REVOKED(self._x509_crl)
+ if revoked == self._backend._ffi.NULL:
+ return 0
+ else:
+ return self._backend._lib.sk_X509_REVOKED_num(revoked)
+
+ @utils.cached_property
+ def extensions(self):
+ return self._backend._crl_extension_parser.parse(self._x509_crl)
+
+ def is_signature_valid(self, public_key):
+ if not isinstance(
+ public_key,
+ (dsa.DSAPublicKey, rsa.RSAPublicKey, ec.EllipticCurvePublicKey),
+ ):
+ raise TypeError(
+ "Expecting one of DSAPublicKey, RSAPublicKey,"
+ " or EllipticCurvePublicKey."
+ )
+ res = self._backend._lib.X509_CRL_verify(
+ self._x509_crl, public_key._evp_pkey
+ )
+
+ if res != 1:
+ self._backend._consume_errors()
+ return False
+
+ return True
+
+
+@utils.register_interface(x509.CertificateSigningRequest)
+class _CertificateSigningRequest(object):
+ def __init__(self, backend, x509_req):
+ self._backend = backend
+ self._x509_req = x509_req
+
+ def __eq__(self, other):
+ if not isinstance(other, _CertificateSigningRequest):
+ return NotImplemented
+
+ self_bytes = self.public_bytes(serialization.Encoding.DER)
+ other_bytes = other.public_bytes(serialization.Encoding.DER)
+ return self_bytes == other_bytes
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash(self.public_bytes(serialization.Encoding.DER))
+
+ def public_key(self):
+ pkey = self._backend._lib.X509_REQ_get_pubkey(self._x509_req)
+ self._backend.openssl_assert(pkey != self._backend._ffi.NULL)
+ pkey = self._backend._ffi.gc(pkey, self._backend._lib.EVP_PKEY_free)
+ return self._backend._evp_pkey_to_public_key(pkey)
+
+ @property
+ def subject(self):
+ subject = self._backend._lib.X509_REQ_get_subject_name(self._x509_req)
+ self._backend.openssl_assert(subject != self._backend._ffi.NULL)
+ return _decode_x509_name(self._backend, subject)
+
+ @property
+ def signature_hash_algorithm(self):
+ oid = self.signature_algorithm_oid
+ try:
+ return x509._SIG_OIDS_TO_HASH[oid]
+ except KeyError:
+ raise UnsupportedAlgorithm(
+ "Signature algorithm OID:{} not recognized".format(oid)
+ )
+
+ @property
+ def signature_algorithm_oid(self):
+ alg = self._backend._ffi.new("X509_ALGOR **")
+ self._backend._lib.X509_REQ_get0_signature(
+ self._x509_req, self._backend._ffi.NULL, alg
+ )
+ self._backend.openssl_assert(alg[0] != self._backend._ffi.NULL)
+ oid = _obj2txt(self._backend, alg[0].algorithm)
+ return x509.ObjectIdentifier(oid)
+
+ @utils.cached_property
+ def extensions(self):
+ x509_exts = self._backend._lib.X509_REQ_get_extensions(self._x509_req)
+ x509_exts = self._backend._ffi.gc(
+ x509_exts,
+ lambda x: self._backend._lib.sk_X509_EXTENSION_pop_free(
+ x,
+ self._backend._ffi.addressof(
+ self._backend._lib._original_lib, "X509_EXTENSION_free"
+ ),
+ ),
+ )
+ return self._backend._csr_extension_parser.parse(x509_exts)
+
+ def public_bytes(self, encoding):
+ bio = self._backend._create_mem_bio_gc()
+ if encoding is serialization.Encoding.PEM:
+ res = self._backend._lib.PEM_write_bio_X509_REQ(
+ bio, self._x509_req
+ )
+ elif encoding is serialization.Encoding.DER:
+ res = self._backend._lib.i2d_X509_REQ_bio(bio, self._x509_req)
+ else:
+ raise TypeError("encoding must be an item from the Encoding enum")
+
+ self._backend.openssl_assert(res == 1)
+ return self._backend._read_mem_bio(bio)
+
+ @property
+ def tbs_certrequest_bytes(self):
+ pp = self._backend._ffi.new("unsigned char **")
+ res = self._backend._lib.i2d_re_X509_REQ_tbs(self._x509_req, pp)
+ self._backend.openssl_assert(res > 0)
+ pp = self._backend._ffi.gc(
+ pp, lambda pointer: self._backend._lib.OPENSSL_free(pointer[0])
+ )
+ return self._backend._ffi.buffer(pp[0], res)[:]
+
+ @property
+ def signature(self):
+ sig = self._backend._ffi.new("ASN1_BIT_STRING **")
+ self._backend._lib.X509_REQ_get0_signature(
+ self._x509_req, sig, self._backend._ffi.NULL
+ )
+ self._backend.openssl_assert(sig[0] != self._backend._ffi.NULL)
+ return _asn1_string_to_bytes(self._backend, sig[0])
+
+ @property
+ def is_signature_valid(self):
+ pkey = self._backend._lib.X509_REQ_get_pubkey(self._x509_req)
+ self._backend.openssl_assert(pkey != self._backend._ffi.NULL)
+ pkey = self._backend._ffi.gc(pkey, self._backend._lib.EVP_PKEY_free)
+ res = self._backend._lib.X509_REQ_verify(self._x509_req, pkey)
+
+ if res != 1:
+ self._backend._consume_errors()
+ return False
+
+ return True
+
+ def get_attribute_for_oid(self, oid):
+ obj = _txt2obj_gc(self._backend, oid.dotted_string)
+ pos = self._backend._lib.X509_REQ_get_attr_by_OBJ(
+ self._x509_req, obj, -1
+ )
+ if pos == -1:
+ raise x509.AttributeNotFound(
+ "No {} attribute was found".format(oid), oid
+ )
+
+ attr = self._backend._lib.X509_REQ_get_attr(self._x509_req, pos)
+ self._backend.openssl_assert(attr != self._backend._ffi.NULL)
+ # We don't support multiple valued attributes for now.
+ self._backend.openssl_assert(
+ self._backend._lib.X509_ATTRIBUTE_count(attr) == 1
+ )
+ asn1_type = self._backend._lib.X509_ATTRIBUTE_get0_type(attr, 0)
+ self._backend.openssl_assert(asn1_type != self._backend._ffi.NULL)
+ # We need this to ensure that our C type cast is safe.
+ # Also this should always be a sane string type, but we'll see if
+ # that is true in the real world...
+ if asn1_type.type not in (
+ _ASN1Type.UTF8String.value,
+ _ASN1Type.PrintableString.value,
+ _ASN1Type.IA5String.value,
+ ):
+ raise ValueError(
+ "OID {} has a disallowed ASN.1 type: {}".format(
+ oid, asn1_type.type
+ )
+ )
+
+ data = self._backend._lib.X509_ATTRIBUTE_get0_data(
+ attr, 0, asn1_type.type, self._backend._ffi.NULL
+ )
+ self._backend.openssl_assert(data != self._backend._ffi.NULL)
+ # This cast is safe iff we assert on the type above to ensure
+ # that it is always a type of ASN1_STRING
+ data = self._backend._ffi.cast("ASN1_STRING *", data)
+ return _asn1_string_to_bytes(self._backend, data)
+
+
+@utils.register_interface(
+ x509.certificate_transparency.SignedCertificateTimestamp
+)
+class _SignedCertificateTimestamp(object):
+ def __init__(self, backend, sct_list, sct):
+ self._backend = backend
+ # Keep the SCT_LIST that this SCT came from alive.
+ self._sct_list = sct_list
+ self._sct = sct
+
+ @property
+ def version(self):
+ version = self._backend._lib.SCT_get_version(self._sct)
+ assert version == self._backend._lib.SCT_VERSION_V1
+ return x509.certificate_transparency.Version.v1
+
+ @property
+ def log_id(self):
+ out = self._backend._ffi.new("unsigned char **")
+ log_id_length = self._backend._lib.SCT_get0_log_id(self._sct, out)
+ assert log_id_length >= 0
+ return self._backend._ffi.buffer(out[0], log_id_length)[:]
+
+ @property
+ def timestamp(self):
+ timestamp = self._backend._lib.SCT_get_timestamp(self._sct)
+ milliseconds = timestamp % 1000
+ return datetime.datetime.utcfromtimestamp(timestamp // 1000).replace(
+ microsecond=milliseconds * 1000
+ )
+
+ @property
+ def entry_type(self):
+ entry_type = self._backend._lib.SCT_get_log_entry_type(self._sct)
+ # We currently only support loading SCTs from the X.509 extension, so
+ # we only have precerts.
+ assert entry_type == self._backend._lib.CT_LOG_ENTRY_TYPE_PRECERT
+ return x509.certificate_transparency.LogEntryType.PRE_CERTIFICATE
+
+ @property
+ def _signature(self):
+ ptrptr = self._backend._ffi.new("unsigned char **")
+ res = self._backend._lib.SCT_get0_signature(self._sct, ptrptr)
+ self._backend.openssl_assert(res > 0)
+ self._backend.openssl_assert(ptrptr[0] != self._backend._ffi.NULL)
+ return self._backend._ffi.buffer(ptrptr[0], res)[:]
+
+ def __hash__(self):
+ return hash(self._signature)
+
+ def __eq__(self, other):
+ if not isinstance(other, _SignedCertificateTimestamp):
+ return NotImplemented
+
+ return self._signature == other._signature
+
+ def __ne__(self, other):
+ return not self == other
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/bindings/__init__.py b/contrib/python/cryptography/py2/cryptography/hazmat/bindings/__init__.py
new file mode 100644
index 0000000000..4b540884df
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/bindings/__init__.py
@@ -0,0 +1,5 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/bindings/openssl/__init__.py b/contrib/python/cryptography/py2/cryptography/hazmat/bindings/openssl/__init__.py
new file mode 100644
index 0000000000..4b540884df
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/bindings/openssl/__init__.py
@@ -0,0 +1,5 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/bindings/openssl/_conditional.py b/contrib/python/cryptography/py2/cryptography/hazmat/bindings/openssl/_conditional.py
new file mode 100644
index 0000000000..ca50fed134
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/bindings/openssl/_conditional.py
@@ -0,0 +1,322 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+
+def cryptography_has_ec2m():
+ return [
+ "EC_POINT_set_affine_coordinates_GF2m",
+ "EC_POINT_get_affine_coordinates_GF2m",
+ "EC_POINT_set_compressed_coordinates_GF2m",
+ ]
+
+
+def cryptography_has_rsa_oaep_md():
+ return [
+ "EVP_PKEY_CTX_set_rsa_oaep_md",
+ ]
+
+
+def cryptography_has_rsa_oaep_label():
+ return [
+ "EVP_PKEY_CTX_set0_rsa_oaep_label",
+ ]
+
+
+def cryptography_has_ssl3_method():
+ return [
+ "SSLv3_method",
+ "SSLv3_client_method",
+ "SSLv3_server_method",
+ ]
+
+
+def cryptography_has_102_verification():
+ return [
+ "X509_V_ERR_SUITE_B_INVALID_VERSION",
+ "X509_V_ERR_SUITE_B_INVALID_ALGORITHM",
+ "X509_V_ERR_SUITE_B_INVALID_CURVE",
+ "X509_V_ERR_SUITE_B_INVALID_SIGNATURE_ALGORITHM",
+ "X509_V_ERR_SUITE_B_LOS_NOT_ALLOWED",
+ "X509_V_ERR_SUITE_B_CANNOT_SIGN_P_384_WITH_P_256",
+ "X509_V_FLAG_SUITEB_128_LOS_ONLY",
+ "X509_V_FLAG_SUITEB_192_LOS",
+ "X509_V_FLAG_SUITEB_128_LOS",
+ ]
+
+
+def cryptography_has_110_verification_params():
+ return ["X509_CHECK_FLAG_NEVER_CHECK_SUBJECT"]
+
+
+def cryptography_has_set_cert_cb():
+ return [
+ "SSL_CTX_set_cert_cb",
+ "SSL_set_cert_cb",
+ ]
+
+
+def cryptography_has_ssl_st():
+ return [
+ "SSL_ST_BEFORE",
+ "SSL_ST_OK",
+ "SSL_ST_INIT",
+ "SSL_ST_RENEGOTIATE",
+ ]
+
+
+def cryptography_has_tls_st():
+ return [
+ "TLS_ST_BEFORE",
+ "TLS_ST_OK",
+ ]
+
+
+def cryptography_has_scrypt():
+ return [
+ "EVP_PBE_scrypt",
+ ]
+
+
+def cryptography_has_evp_pkey_dhx():
+ return [
+ "EVP_PKEY_DHX",
+ ]
+
+
+def cryptography_has_mem_functions():
+ return [
+ "Cryptography_CRYPTO_set_mem_functions",
+ ]
+
+
+def cryptography_has_sct():
+ return [
+ "SCT_get_version",
+ "SCT_get_log_entry_type",
+ "SCT_get0_log_id",
+ "SCT_get0_signature",
+ "SCT_get_timestamp",
+ "SCT_set_source",
+ "sk_SCT_new_null",
+ "sk_SCT_free",
+ "sk_SCT_num",
+ "sk_SCT_value",
+ "sk_SCT_push",
+ "SCT_LIST_free",
+ "SCT_new",
+ "SCT_set1_log_id",
+ "SCT_set_timestamp",
+ "SCT_set_version",
+ "SCT_set_log_entry_type",
+ ]
+
+
+def cryptography_has_x509_store_ctx_get_issuer():
+ return [
+ "X509_STORE_get_get_issuer",
+ "X509_STORE_set_get_issuer",
+ ]
+
+
+def cryptography_has_ed448():
+ return [
+ "EVP_PKEY_ED448",
+ "NID_ED448",
+ ]
+
+
+def cryptography_has_ed25519():
+ return [
+ "NID_ED25519",
+ "EVP_PKEY_ED25519",
+ ]
+
+
+def cryptography_has_poly1305():
+ return [
+ "NID_poly1305",
+ "EVP_PKEY_POLY1305",
+ ]
+
+
+def cryptography_has_oneshot_evp_digest_sign_verify():
+ return [
+ "EVP_DigestSign",
+ "EVP_DigestVerify",
+ ]
+
+
+def cryptography_has_evp_digestfinal_xof():
+ return [
+ "EVP_DigestFinalXOF",
+ ]
+
+
+def cryptography_has_evp_pkey_get_set_tls_encodedpoint():
+ return [
+ "EVP_PKEY_get1_tls_encodedpoint",
+ "EVP_PKEY_set1_tls_encodedpoint",
+ ]
+
+
+def cryptography_has_fips():
+ return [
+ "FIPS_mode_set",
+ "FIPS_mode",
+ ]
+
+
+def cryptography_has_ssl_sigalgs():
+ return [
+ "SSL_CTX_set1_sigalgs_list",
+ "SSL_get_sigalgs",
+ ]
+
+
+def cryptography_has_psk():
+ return [
+ "SSL_CTX_use_psk_identity_hint",
+ "SSL_CTX_set_psk_server_callback",
+ "SSL_CTX_set_psk_client_callback",
+ ]
+
+
+def cryptography_has_custom_ext():
+ return [
+ "SSL_CTX_add_client_custom_ext",
+ "SSL_CTX_add_server_custom_ext",
+ "SSL_extension_supported",
+ ]
+
+
+def cryptography_has_openssl_cleanup():
+ return [
+ "OPENSSL_cleanup",
+ ]
+
+
+def cryptography_has_tlsv13():
+ return [
+ "SSL_OP_NO_TLSv1_3",
+ "SSL_VERIFY_POST_HANDSHAKE",
+ "SSL_CTX_set_ciphersuites",
+ "SSL_verify_client_post_handshake",
+ "SSL_CTX_set_post_handshake_auth",
+ "SSL_set_post_handshake_auth",
+ "SSL_SESSION_get_max_early_data",
+ "SSL_write_early_data",
+ "SSL_read_early_data",
+ "SSL_CTX_set_max_early_data",
+ ]
+
+
+def cryptography_has_keylog():
+ return [
+ "SSL_CTX_set_keylog_callback",
+ "SSL_CTX_get_keylog_callback",
+ ]
+
+
+def cryptography_has_raw_key():
+ return [
+ "EVP_PKEY_new_raw_private_key",
+ "EVP_PKEY_new_raw_public_key",
+ "EVP_PKEY_get_raw_private_key",
+ "EVP_PKEY_get_raw_public_key",
+ ]
+
+
+def cryptography_has_engine():
+ return [
+ "ENGINE_by_id",
+ "ENGINE_init",
+ "ENGINE_finish",
+ "ENGINE_get_default_RAND",
+ "ENGINE_set_default_RAND",
+ "ENGINE_unregister_RAND",
+ "ENGINE_ctrl_cmd",
+ "ENGINE_free",
+ "ENGINE_get_name",
+ "Cryptography_add_osrandom_engine",
+ "ENGINE_ctrl_cmd_string",
+ "ENGINE_load_builtin_engines",
+ "ENGINE_load_private_key",
+ "ENGINE_load_public_key",
+ ]
+
+
+def cryptography_has_verified_chain():
+ return [
+ "SSL_get0_verified_chain",
+ ]
+
+
+def cryptography_has_srtp():
+ return [
+ "SSL_CTX_set_tlsext_use_srtp",
+ "SSL_set_tlsext_use_srtp",
+ "SSL_get_selected_srtp_profile",
+ ]
+
+
+def cryptography_has_get_proto_version():
+ return [
+ "SSL_CTX_get_min_proto_version",
+ "SSL_CTX_get_max_proto_version",
+ "SSL_get_min_proto_version",
+ "SSL_get_max_proto_version",
+ ]
+
+
+# This is a mapping of
+# {condition: function-returning-names-dependent-on-that-condition} so we can
+# loop over them and delete unsupported names at runtime. It will be removed
+# when cffi supports #if in cdef. We use functions instead of just a dict of
+# lists so we can use coverage to measure which are used.
+CONDITIONAL_NAMES = {
+ "Cryptography_HAS_EC2M": cryptography_has_ec2m,
+ "Cryptography_HAS_RSA_OAEP_MD": cryptography_has_rsa_oaep_md,
+ "Cryptography_HAS_RSA_OAEP_LABEL": cryptography_has_rsa_oaep_label,
+ "Cryptography_HAS_SSL3_METHOD": cryptography_has_ssl3_method,
+ "Cryptography_HAS_102_VERIFICATION": cryptography_has_102_verification,
+ "Cryptography_HAS_110_VERIFICATION_PARAMS": (
+ cryptography_has_110_verification_params
+ ),
+ "Cryptography_HAS_SET_CERT_CB": cryptography_has_set_cert_cb,
+ "Cryptography_HAS_SSL_ST": cryptography_has_ssl_st,
+ "Cryptography_HAS_TLS_ST": cryptography_has_tls_st,
+ "Cryptography_HAS_SCRYPT": cryptography_has_scrypt,
+ "Cryptography_HAS_EVP_PKEY_DHX": cryptography_has_evp_pkey_dhx,
+ "Cryptography_HAS_MEM_FUNCTIONS": cryptography_has_mem_functions,
+ "Cryptography_HAS_SCT": cryptography_has_sct,
+ "Cryptography_HAS_X509_STORE_CTX_GET_ISSUER": (
+ cryptography_has_x509_store_ctx_get_issuer
+ ),
+ "Cryptography_HAS_ED448": cryptography_has_ed448,
+ "Cryptography_HAS_ED25519": cryptography_has_ed25519,
+ "Cryptography_HAS_POLY1305": cryptography_has_poly1305,
+ "Cryptography_HAS_ONESHOT_EVP_DIGEST_SIGN_VERIFY": (
+ cryptography_has_oneshot_evp_digest_sign_verify
+ ),
+ "Cryptography_HAS_EVP_PKEY_get_set_tls_encodedpoint": (
+ cryptography_has_evp_pkey_get_set_tls_encodedpoint
+ ),
+ "Cryptography_HAS_FIPS": cryptography_has_fips,
+ "Cryptography_HAS_SIGALGS": cryptography_has_ssl_sigalgs,
+ "Cryptography_HAS_PSK": cryptography_has_psk,
+ "Cryptography_HAS_CUSTOM_EXT": cryptography_has_custom_ext,
+ "Cryptography_HAS_OPENSSL_CLEANUP": cryptography_has_openssl_cleanup,
+ "Cryptography_HAS_TLSv1_3": cryptography_has_tlsv13,
+ "Cryptography_HAS_KEYLOG": cryptography_has_keylog,
+ "Cryptography_HAS_RAW_KEY": cryptography_has_raw_key,
+ "Cryptography_HAS_EVP_DIGESTFINAL_XOF": (
+ cryptography_has_evp_digestfinal_xof
+ ),
+ "Cryptography_HAS_ENGINE": cryptography_has_engine,
+ "Cryptography_HAS_VERIFIED_CHAIN": cryptography_has_verified_chain,
+ "Cryptography_HAS_SRTP": cryptography_has_srtp,
+ "Cryptography_HAS_GET_PROTO_VERSION": cryptography_has_get_proto_version,
+}
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/bindings/openssl/binding.py b/contrib/python/cryptography/py2/cryptography/hazmat/bindings/openssl/binding.py
new file mode 100644
index 0000000000..7a84a340e4
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/bindings/openssl/binding.py
@@ -0,0 +1,172 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import collections
+import threading
+import types
+
+import cryptography
+from cryptography import utils
+from cryptography.exceptions import InternalError
+from cryptography.hazmat.bindings._openssl import ffi, lib
+from cryptography.hazmat.bindings.openssl._conditional import CONDITIONAL_NAMES
+
+_OpenSSLErrorWithText = collections.namedtuple(
+ "_OpenSSLErrorWithText", ["code", "lib", "func", "reason", "reason_text"]
+)
+
+
+class _OpenSSLError(object):
+ def __init__(self, code, lib, func, reason):
+ self._code = code
+ self._lib = lib
+ self._func = func
+ self._reason = reason
+
+ def _lib_reason_match(self, lib, reason):
+ return lib == self.lib and reason == self.reason
+
+ code = utils.read_only_property("_code")
+ lib = utils.read_only_property("_lib")
+ func = utils.read_only_property("_func")
+ reason = utils.read_only_property("_reason")
+
+
+def _consume_errors(lib):
+ errors = []
+ while True:
+ code = lib.ERR_get_error()
+ if code == 0:
+ break
+
+ err_lib = lib.ERR_GET_LIB(code)
+ err_func = lib.ERR_GET_FUNC(code)
+ err_reason = lib.ERR_GET_REASON(code)
+
+ errors.append(_OpenSSLError(code, err_lib, err_func, err_reason))
+
+ return errors
+
+
+def _errors_with_text(errors):
+ errors_with_text = []
+ for err in errors:
+ buf = ffi.new("char[]", 256)
+ lib.ERR_error_string_n(err.code, buf, len(buf))
+ err_text_reason = ffi.string(buf)
+
+ errors_with_text.append(
+ _OpenSSLErrorWithText(
+ err.code, err.lib, err.func, err.reason, err_text_reason
+ )
+ )
+
+ return errors_with_text
+
+
+def _consume_errors_with_text(lib):
+ return _errors_with_text(_consume_errors(lib))
+
+
+def _openssl_assert(lib, ok, errors=None):
+ if not ok:
+ if errors is None:
+ errors = _consume_errors(lib)
+ errors_with_text = _errors_with_text(errors)
+
+ raise InternalError(
+ "Unknown OpenSSL error. This error is commonly encountered when "
+ "another library is not cleaning up the OpenSSL error stack. If "
+ "you are using cryptography with another library that uses "
+ "OpenSSL try disabling it before reporting a bug. Otherwise "
+ "please file an issue at https://github.com/pyca/cryptography/"
+ "issues with information on how to reproduce "
+ "this. ({0!r})".format(errors_with_text),
+ errors_with_text,
+ )
+
+
+def build_conditional_library(lib, conditional_names):
+ conditional_lib = types.ModuleType("lib")
+ conditional_lib._original_lib = lib
+ excluded_names = set()
+ for condition, names_cb in conditional_names.items():
+ if not getattr(lib, condition):
+ excluded_names.update(names_cb())
+
+ for attr in dir(lib):
+ if attr not in excluded_names:
+ setattr(conditional_lib, attr, getattr(lib, attr))
+
+ return conditional_lib
+
+
+class Binding(object):
+ """
+ OpenSSL API wrapper.
+ """
+
+ lib = None
+ ffi = ffi
+ _lib_loaded = False
+ _init_lock = threading.Lock()
+
+ def __init__(self):
+ self._ensure_ffi_initialized()
+
+ @classmethod
+ def _register_osrandom_engine(cls):
+ # Clear any errors extant in the queue before we start. In many
+ # scenarios other things may be interacting with OpenSSL in the same
+ # process space and it has proven untenable to assume that they will
+ # reliably clear the error queue. Once we clear it here we will
+ # error on any subsequent unexpected item in the stack.
+ cls.lib.ERR_clear_error()
+ if cls.lib.CRYPTOGRAPHY_NEEDS_OSRANDOM_ENGINE:
+ result = cls.lib.Cryptography_add_osrandom_engine()
+ _openssl_assert(cls.lib, result in (1, 2))
+
+ @classmethod
+ def _ensure_ffi_initialized(cls):
+ with cls._init_lock:
+ if not cls._lib_loaded:
+ cls.lib = build_conditional_library(lib, CONDITIONAL_NAMES)
+ cls._lib_loaded = True
+ # initialize the SSL library
+ cls.lib.SSL_library_init()
+ # adds all ciphers/digests for EVP
+ cls.lib.OpenSSL_add_all_algorithms()
+ cls._register_osrandom_engine()
+
+ @classmethod
+ def init_static_locks(cls):
+ cls._ensure_ffi_initialized()
+
+
+def _verify_package_version(version):
+ # Occasionally we run into situations where the version of the Python
+ # package does not match the version of the shared object that is loaded.
+ # This may occur in environments where multiple versions of cryptography
+ # are installed and available in the python path. To avoid errors cropping
+ # up later this code checks that the currently imported package and the
+ # shared object that were loaded have the same version and raise an
+ # ImportError if they do not
+ so_package_version = ffi.string(lib.CRYPTOGRAPHY_PACKAGE_VERSION)
+ if version.encode("ascii") != so_package_version:
+ raise ImportError(
+ "The version of cryptography does not match the loaded "
+ "shared object. This can happen if you have multiple copies of "
+ "cryptography installed in your Python path. Please try creating "
+ "a new virtual environment to resolve this issue. "
+ "Loaded python version: {}, shared object version: {}".format(
+ version, so_package_version
+ )
+ )
+
+
+_verify_package_version(cryptography.__version__)
+
+Binding.init_static_locks()
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/primitives/__init__.py b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/__init__.py
new file mode 100644
index 0000000000..4b540884df
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/__init__.py
@@ -0,0 +1,5 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/primitives/asymmetric/__init__.py b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/asymmetric/__init__.py
new file mode 100644
index 0000000000..494a7a1350
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/asymmetric/__init__.py
@@ -0,0 +1,40 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import abc
+
+import six
+
+
+@six.add_metaclass(abc.ABCMeta)
+class AsymmetricSignatureContext(object):
+ @abc.abstractmethod
+ def update(self, data):
+ """
+ Processes the provided bytes and returns nothing.
+ """
+
+ @abc.abstractmethod
+ def finalize(self):
+ """
+ Returns the signature as bytes.
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class AsymmetricVerificationContext(object):
+ @abc.abstractmethod
+ def update(self, data):
+ """
+ Processes the provided bytes and returns nothing.
+ """
+
+ @abc.abstractmethod
+ def verify(self):
+ """
+ Raises an exception if the bytes provided to update do not match the
+ signature or the signature does not match the public key.
+ """
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/primitives/asymmetric/dh.py b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/asymmetric/dh.py
new file mode 100644
index 0000000000..74a311d501
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/asymmetric/dh.py
@@ -0,0 +1,224 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import abc
+
+import six
+
+from cryptography import utils
+from cryptography.hazmat.backends import _get_backend
+
+
+_MIN_MODULUS_SIZE = 512
+
+
+def generate_parameters(generator, key_size, backend=None):
+ backend = _get_backend(backend)
+ return backend.generate_dh_parameters(generator, key_size)
+
+
+class DHPrivateNumbers(object):
+ def __init__(self, x, public_numbers):
+ if not isinstance(x, six.integer_types):
+ raise TypeError("x must be an integer.")
+
+ if not isinstance(public_numbers, DHPublicNumbers):
+ raise TypeError(
+ "public_numbers must be an instance of " "DHPublicNumbers."
+ )
+
+ self._x = x
+ self._public_numbers = public_numbers
+
+ def __eq__(self, other):
+ if not isinstance(other, DHPrivateNumbers):
+ return NotImplemented
+
+ return (
+ self._x == other._x
+ and self._public_numbers == other._public_numbers
+ )
+
+ def __ne__(self, other):
+ return not self == other
+
+ def private_key(self, backend=None):
+ backend = _get_backend(backend)
+ return backend.load_dh_private_numbers(self)
+
+ public_numbers = utils.read_only_property("_public_numbers")
+ x = utils.read_only_property("_x")
+
+
+class DHPublicNumbers(object):
+ def __init__(self, y, parameter_numbers):
+ if not isinstance(y, six.integer_types):
+ raise TypeError("y must be an integer.")
+
+ if not isinstance(parameter_numbers, DHParameterNumbers):
+ raise TypeError(
+ "parameters must be an instance of DHParameterNumbers."
+ )
+
+ self._y = y
+ self._parameter_numbers = parameter_numbers
+
+ def __eq__(self, other):
+ if not isinstance(other, DHPublicNumbers):
+ return NotImplemented
+
+ return (
+ self._y == other._y
+ and self._parameter_numbers == other._parameter_numbers
+ )
+
+ def __ne__(self, other):
+ return not self == other
+
+ def public_key(self, backend=None):
+ backend = _get_backend(backend)
+ return backend.load_dh_public_numbers(self)
+
+ y = utils.read_only_property("_y")
+ parameter_numbers = utils.read_only_property("_parameter_numbers")
+
+
+class DHParameterNumbers(object):
+ def __init__(self, p, g, q=None):
+ if not isinstance(p, six.integer_types) or not isinstance(
+ g, six.integer_types
+ ):
+ raise TypeError("p and g must be integers")
+ if q is not None and not isinstance(q, six.integer_types):
+ raise TypeError("q must be integer or None")
+
+ if g < 2:
+ raise ValueError("DH generator must be 2 or greater")
+
+ if p.bit_length() < _MIN_MODULUS_SIZE:
+ raise ValueError(
+ "p (modulus) must be at least {}-bit".format(_MIN_MODULUS_SIZE)
+ )
+
+ self._p = p
+ self._g = g
+ self._q = q
+
+ def __eq__(self, other):
+ if not isinstance(other, DHParameterNumbers):
+ return NotImplemented
+
+ return (
+ self._p == other._p and self._g == other._g and self._q == other._q
+ )
+
+ def __ne__(self, other):
+ return not self == other
+
+ def parameters(self, backend=None):
+ backend = _get_backend(backend)
+ return backend.load_dh_parameter_numbers(self)
+
+ p = utils.read_only_property("_p")
+ g = utils.read_only_property("_g")
+ q = utils.read_only_property("_q")
+
+
+@six.add_metaclass(abc.ABCMeta)
+class DHParameters(object):
+ @abc.abstractmethod
+ def generate_private_key(self):
+ """
+ Generates and returns a DHPrivateKey.
+ """
+
+ @abc.abstractmethod
+ def parameter_bytes(self, encoding, format):
+ """
+ Returns the parameters serialized as bytes.
+ """
+
+ @abc.abstractmethod
+ def parameter_numbers(self):
+ """
+ Returns a DHParameterNumbers.
+ """
+
+
+DHParametersWithSerialization = DHParameters
+
+
+@six.add_metaclass(abc.ABCMeta)
+class DHPrivateKey(object):
+ @abc.abstractproperty
+ def key_size(self):
+ """
+ The bit length of the prime modulus.
+ """
+
+ @abc.abstractmethod
+ def public_key(self):
+ """
+ The DHPublicKey associated with this private key.
+ """
+
+ @abc.abstractmethod
+ def parameters(self):
+ """
+ The DHParameters object associated with this private key.
+ """
+
+ @abc.abstractmethod
+ def exchange(self, peer_public_key):
+ """
+ Given peer's DHPublicKey, carry out the key exchange and
+ return shared key as bytes.
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class DHPrivateKeyWithSerialization(DHPrivateKey):
+ @abc.abstractmethod
+ def private_numbers(self):
+ """
+ Returns a DHPrivateNumbers.
+ """
+
+ @abc.abstractmethod
+ def private_bytes(self, encoding, format, encryption_algorithm):
+ """
+ Returns the key serialized as bytes.
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class DHPublicKey(object):
+ @abc.abstractproperty
+ def key_size(self):
+ """
+ The bit length of the prime modulus.
+ """
+
+ @abc.abstractmethod
+ def parameters(self):
+ """
+ The DHParameters object associated with this public key.
+ """
+
+ @abc.abstractmethod
+ def public_numbers(self):
+ """
+ Returns a DHPublicNumbers.
+ """
+
+ @abc.abstractmethod
+ def public_bytes(self, encoding, format):
+ """
+ Returns the key serialized as bytes.
+ """
+
+
+DHPublicKeyWithSerialization = DHPublicKey
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/primitives/asymmetric/dsa.py b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/asymmetric/dsa.py
new file mode 100644
index 0000000000..8ccc66665f
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/asymmetric/dsa.py
@@ -0,0 +1,261 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import abc
+
+import six
+
+from cryptography import utils
+from cryptography.hazmat.backends import _get_backend
+
+
+@six.add_metaclass(abc.ABCMeta)
+class DSAParameters(object):
+ @abc.abstractmethod
+ def generate_private_key(self):
+ """
+ Generates and returns a DSAPrivateKey.
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class DSAParametersWithNumbers(DSAParameters):
+ @abc.abstractmethod
+ def parameter_numbers(self):
+ """
+ Returns a DSAParameterNumbers.
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class DSAPrivateKey(object):
+ @abc.abstractproperty
+ def key_size(self):
+ """
+ The bit length of the prime modulus.
+ """
+
+ @abc.abstractmethod
+ def public_key(self):
+ """
+ The DSAPublicKey associated with this private key.
+ """
+
+ @abc.abstractmethod
+ def parameters(self):
+ """
+ The DSAParameters object associated with this private key.
+ """
+
+ @abc.abstractmethod
+ def signer(self, signature_algorithm):
+ """
+ Returns an AsymmetricSignatureContext used for signing data.
+ """
+
+ @abc.abstractmethod
+ def sign(self, data, algorithm):
+ """
+ Signs the data
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class DSAPrivateKeyWithSerialization(DSAPrivateKey):
+ @abc.abstractmethod
+ def private_numbers(self):
+ """
+ Returns a DSAPrivateNumbers.
+ """
+
+ @abc.abstractmethod
+ def private_bytes(self, encoding, format, encryption_algorithm):
+ """
+ Returns the key serialized as bytes.
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class DSAPublicKey(object):
+ @abc.abstractproperty
+ def key_size(self):
+ """
+ The bit length of the prime modulus.
+ """
+
+ @abc.abstractmethod
+ def parameters(self):
+ """
+ The DSAParameters object associated with this public key.
+ """
+
+ @abc.abstractmethod
+ def verifier(self, signature, signature_algorithm):
+ """
+ Returns an AsymmetricVerificationContext used for signing data.
+ """
+
+ @abc.abstractmethod
+ def public_numbers(self):
+ """
+ Returns a DSAPublicNumbers.
+ """
+
+ @abc.abstractmethod
+ def public_bytes(self, encoding, format):
+ """
+ Returns the key serialized as bytes.
+ """
+
+ @abc.abstractmethod
+ def verify(self, signature, data, algorithm):
+ """
+ Verifies the signature of the data.
+ """
+
+
+DSAPublicKeyWithSerialization = DSAPublicKey
+
+
+def generate_parameters(key_size, backend=None):
+ backend = _get_backend(backend)
+ return backend.generate_dsa_parameters(key_size)
+
+
+def generate_private_key(key_size, backend=None):
+ backend = _get_backend(backend)
+ return backend.generate_dsa_private_key_and_parameters(key_size)
+
+
+def _check_dsa_parameters(parameters):
+ if parameters.p.bit_length() not in [1024, 2048, 3072, 4096]:
+ raise ValueError(
+ "p must be exactly 1024, 2048, 3072, or 4096 bits long"
+ )
+ if parameters.q.bit_length() not in [160, 224, 256]:
+ raise ValueError("q must be exactly 160, 224, or 256 bits long")
+
+ if not (1 < parameters.g < parameters.p):
+ raise ValueError("g, p don't satisfy 1 < g < p.")
+
+
+def _check_dsa_private_numbers(numbers):
+ parameters = numbers.public_numbers.parameter_numbers
+ _check_dsa_parameters(parameters)
+ if numbers.x <= 0 or numbers.x >= parameters.q:
+ raise ValueError("x must be > 0 and < q.")
+
+ if numbers.public_numbers.y != pow(parameters.g, numbers.x, parameters.p):
+ raise ValueError("y must be equal to (g ** x % p).")
+
+
+class DSAParameterNumbers(object):
+ def __init__(self, p, q, g):
+ if (
+ not isinstance(p, six.integer_types)
+ or not isinstance(q, six.integer_types)
+ or not isinstance(g, six.integer_types)
+ ):
+ raise TypeError(
+ "DSAParameterNumbers p, q, and g arguments must be integers."
+ )
+
+ self._p = p
+ self._q = q
+ self._g = g
+
+ p = utils.read_only_property("_p")
+ q = utils.read_only_property("_q")
+ g = utils.read_only_property("_g")
+
+ def parameters(self, backend=None):
+ backend = _get_backend(backend)
+ return backend.load_dsa_parameter_numbers(self)
+
+ def __eq__(self, other):
+ if not isinstance(other, DSAParameterNumbers):
+ return NotImplemented
+
+ return self.p == other.p and self.q == other.q and self.g == other.g
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __repr__(self):
+ return (
+ "<DSAParameterNumbers(p={self.p}, q={self.q}, "
+ "g={self.g})>".format(self=self)
+ )
+
+
+class DSAPublicNumbers(object):
+ def __init__(self, y, parameter_numbers):
+ if not isinstance(y, six.integer_types):
+ raise TypeError("DSAPublicNumbers y argument must be an integer.")
+
+ if not isinstance(parameter_numbers, DSAParameterNumbers):
+ raise TypeError(
+ "parameter_numbers must be a DSAParameterNumbers instance."
+ )
+
+ self._y = y
+ self._parameter_numbers = parameter_numbers
+
+ y = utils.read_only_property("_y")
+ parameter_numbers = utils.read_only_property("_parameter_numbers")
+
+ def public_key(self, backend=None):
+ backend = _get_backend(backend)
+ return backend.load_dsa_public_numbers(self)
+
+ def __eq__(self, other):
+ if not isinstance(other, DSAPublicNumbers):
+ return NotImplemented
+
+ return (
+ self.y == other.y
+ and self.parameter_numbers == other.parameter_numbers
+ )
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __repr__(self):
+ return (
+ "<DSAPublicNumbers(y={self.y}, "
+ "parameter_numbers={self.parameter_numbers})>".format(self=self)
+ )
+
+
+class DSAPrivateNumbers(object):
+ def __init__(self, x, public_numbers):
+ if not isinstance(x, six.integer_types):
+ raise TypeError("DSAPrivateNumbers x argument must be an integer.")
+
+ if not isinstance(public_numbers, DSAPublicNumbers):
+ raise TypeError(
+ "public_numbers must be a DSAPublicNumbers instance."
+ )
+ self._public_numbers = public_numbers
+ self._x = x
+
+ x = utils.read_only_property("_x")
+ public_numbers = utils.read_only_property("_public_numbers")
+
+ def private_key(self, backend=None):
+ backend = _get_backend(backend)
+ return backend.load_dsa_private_numbers(self)
+
+ def __eq__(self, other):
+ if not isinstance(other, DSAPrivateNumbers):
+ return NotImplemented
+
+ return (
+ self.x == other.x and self.public_numbers == other.public_numbers
+ )
+
+ def __ne__(self, other):
+ return not self == other
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/primitives/asymmetric/ec.py b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/asymmetric/ec.py
new file mode 100644
index 0000000000..c7e694fc56
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/asymmetric/ec.py
@@ -0,0 +1,502 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import abc
+import warnings
+
+import six
+
+from cryptography import utils
+from cryptography.hazmat._oid import ObjectIdentifier
+from cryptography.hazmat.backends import _get_backend
+
+
+class EllipticCurveOID(object):
+ SECP192R1 = ObjectIdentifier("1.2.840.10045.3.1.1")
+ SECP224R1 = ObjectIdentifier("1.3.132.0.33")
+ SECP256K1 = ObjectIdentifier("1.3.132.0.10")
+ SECP256R1 = ObjectIdentifier("1.2.840.10045.3.1.7")
+ SECP384R1 = ObjectIdentifier("1.3.132.0.34")
+ SECP521R1 = ObjectIdentifier("1.3.132.0.35")
+ BRAINPOOLP256R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.7")
+ BRAINPOOLP384R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.11")
+ BRAINPOOLP512R1 = ObjectIdentifier("1.3.36.3.3.2.8.1.1.13")
+ SECT163K1 = ObjectIdentifier("1.3.132.0.1")
+ SECT163R2 = ObjectIdentifier("1.3.132.0.15")
+ SECT233K1 = ObjectIdentifier("1.3.132.0.26")
+ SECT233R1 = ObjectIdentifier("1.3.132.0.27")
+ SECT283K1 = ObjectIdentifier("1.3.132.0.16")
+ SECT283R1 = ObjectIdentifier("1.3.132.0.17")
+ SECT409K1 = ObjectIdentifier("1.3.132.0.36")
+ SECT409R1 = ObjectIdentifier("1.3.132.0.37")
+ SECT571K1 = ObjectIdentifier("1.3.132.0.38")
+ SECT571R1 = ObjectIdentifier("1.3.132.0.39")
+
+
+@six.add_metaclass(abc.ABCMeta)
+class EllipticCurve(object):
+ @abc.abstractproperty
+ def name(self):
+ """
+ The name of the curve. e.g. secp256r1.
+ """
+
+ @abc.abstractproperty
+ def key_size(self):
+ """
+ Bit size of a secret scalar for the curve.
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class EllipticCurveSignatureAlgorithm(object):
+ @abc.abstractproperty
+ def algorithm(self):
+ """
+ The digest algorithm used with this signature.
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class EllipticCurvePrivateKey(object):
+ @abc.abstractmethod
+ def signer(self, signature_algorithm):
+ """
+ Returns an AsymmetricSignatureContext used for signing data.
+ """
+
+ @abc.abstractmethod
+ def exchange(self, algorithm, peer_public_key):
+ """
+ Performs a key exchange operation using the provided algorithm with the
+ provided peer's public key.
+ """
+
+ @abc.abstractmethod
+ def public_key(self):
+ """
+ The EllipticCurvePublicKey for this private key.
+ """
+
+ @abc.abstractproperty
+ def curve(self):
+ """
+ The EllipticCurve that this key is on.
+ """
+
+ @abc.abstractproperty
+ def key_size(self):
+ """
+ Bit size of a secret scalar for the curve.
+ """
+
+ @abc.abstractmethod
+ def sign(self, data, signature_algorithm):
+ """
+ Signs the data
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class EllipticCurvePrivateKeyWithSerialization(EllipticCurvePrivateKey):
+ @abc.abstractmethod
+ def private_numbers(self):
+ """
+ Returns an EllipticCurvePrivateNumbers.
+ """
+
+ @abc.abstractmethod
+ def private_bytes(self, encoding, format, encryption_algorithm):
+ """
+ Returns the key serialized as bytes.
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class EllipticCurvePublicKey(object):
+ @abc.abstractmethod
+ def verifier(self, signature, signature_algorithm):
+ """
+ Returns an AsymmetricVerificationContext used for signing data.
+ """
+
+ @abc.abstractproperty
+ def curve(self):
+ """
+ The EllipticCurve that this key is on.
+ """
+
+ @abc.abstractproperty
+ def key_size(self):
+ """
+ Bit size of a secret scalar for the curve.
+ """
+
+ @abc.abstractmethod
+ def public_numbers(self):
+ """
+ Returns an EllipticCurvePublicNumbers.
+ """
+
+ @abc.abstractmethod
+ def public_bytes(self, encoding, format):
+ """
+ Returns the key serialized as bytes.
+ """
+
+ @abc.abstractmethod
+ def verify(self, signature, data, signature_algorithm):
+ """
+ Verifies the signature of the data.
+ """
+
+ @classmethod
+ def from_encoded_point(cls, curve, data):
+ utils._check_bytes("data", data)
+
+ if not isinstance(curve, EllipticCurve):
+ raise TypeError("curve must be an EllipticCurve instance")
+
+ if len(data) == 0:
+ raise ValueError("data must not be an empty byte string")
+
+ if six.indexbytes(data, 0) not in [0x02, 0x03, 0x04]:
+ raise ValueError("Unsupported elliptic curve point type")
+
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ return backend.load_elliptic_curve_public_bytes(curve, data)
+
+
+EllipticCurvePublicKeyWithSerialization = EllipticCurvePublicKey
+
+
+@utils.register_interface(EllipticCurve)
+class SECT571R1(object):
+ name = "sect571r1"
+ key_size = 570
+
+
+@utils.register_interface(EllipticCurve)
+class SECT409R1(object):
+ name = "sect409r1"
+ key_size = 409
+
+
+@utils.register_interface(EllipticCurve)
+class SECT283R1(object):
+ name = "sect283r1"
+ key_size = 283
+
+
+@utils.register_interface(EllipticCurve)
+class SECT233R1(object):
+ name = "sect233r1"
+ key_size = 233
+
+
+@utils.register_interface(EllipticCurve)
+class SECT163R2(object):
+ name = "sect163r2"
+ key_size = 163
+
+
+@utils.register_interface(EllipticCurve)
+class SECT571K1(object):
+ name = "sect571k1"
+ key_size = 571
+
+
+@utils.register_interface(EllipticCurve)
+class SECT409K1(object):
+ name = "sect409k1"
+ key_size = 409
+
+
+@utils.register_interface(EllipticCurve)
+class SECT283K1(object):
+ name = "sect283k1"
+ key_size = 283
+
+
+@utils.register_interface(EllipticCurve)
+class SECT233K1(object):
+ name = "sect233k1"
+ key_size = 233
+
+
+@utils.register_interface(EllipticCurve)
+class SECT163K1(object):
+ name = "sect163k1"
+ key_size = 163
+
+
+@utils.register_interface(EllipticCurve)
+class SECP521R1(object):
+ name = "secp521r1"
+ key_size = 521
+
+
+@utils.register_interface(EllipticCurve)
+class SECP384R1(object):
+ name = "secp384r1"
+ key_size = 384
+
+
+@utils.register_interface(EllipticCurve)
+class SECP256R1(object):
+ name = "secp256r1"
+ key_size = 256
+
+
+@utils.register_interface(EllipticCurve)
+class SECP256K1(object):
+ name = "secp256k1"
+ key_size = 256
+
+
+@utils.register_interface(EllipticCurve)
+class SECP224R1(object):
+ name = "secp224r1"
+ key_size = 224
+
+
+@utils.register_interface(EllipticCurve)
+class SECP192R1(object):
+ name = "secp192r1"
+ key_size = 192
+
+
+@utils.register_interface(EllipticCurve)
+class BrainpoolP256R1(object):
+ name = "brainpoolP256r1"
+ key_size = 256
+
+
+@utils.register_interface(EllipticCurve)
+class BrainpoolP384R1(object):
+ name = "brainpoolP384r1"
+ key_size = 384
+
+
+@utils.register_interface(EllipticCurve)
+class BrainpoolP512R1(object):
+ name = "brainpoolP512r1"
+ key_size = 512
+
+
+_CURVE_TYPES = {
+ "prime192v1": SECP192R1,
+ "prime256v1": SECP256R1,
+ "secp192r1": SECP192R1,
+ "secp224r1": SECP224R1,
+ "secp256r1": SECP256R1,
+ "secp384r1": SECP384R1,
+ "secp521r1": SECP521R1,
+ "secp256k1": SECP256K1,
+ "sect163k1": SECT163K1,
+ "sect233k1": SECT233K1,
+ "sect283k1": SECT283K1,
+ "sect409k1": SECT409K1,
+ "sect571k1": SECT571K1,
+ "sect163r2": SECT163R2,
+ "sect233r1": SECT233R1,
+ "sect283r1": SECT283R1,
+ "sect409r1": SECT409R1,
+ "sect571r1": SECT571R1,
+ "brainpoolP256r1": BrainpoolP256R1,
+ "brainpoolP384r1": BrainpoolP384R1,
+ "brainpoolP512r1": BrainpoolP512R1,
+}
+
+
+@utils.register_interface(EllipticCurveSignatureAlgorithm)
+class ECDSA(object):
+ def __init__(self, algorithm):
+ self._algorithm = algorithm
+
+ algorithm = utils.read_only_property("_algorithm")
+
+
+def generate_private_key(curve, backend=None):
+ backend = _get_backend(backend)
+ return backend.generate_elliptic_curve_private_key(curve)
+
+
+def derive_private_key(private_value, curve, backend=None):
+ backend = _get_backend(backend)
+ if not isinstance(private_value, six.integer_types):
+ raise TypeError("private_value must be an integer type.")
+
+ if private_value <= 0:
+ raise ValueError("private_value must be a positive integer.")
+
+ if not isinstance(curve, EllipticCurve):
+ raise TypeError("curve must provide the EllipticCurve interface.")
+
+ return backend.derive_elliptic_curve_private_key(private_value, curve)
+
+
+class EllipticCurvePublicNumbers(object):
+ def __init__(self, x, y, curve):
+ if not isinstance(x, six.integer_types) or not isinstance(
+ y, six.integer_types
+ ):
+ raise TypeError("x and y must be integers.")
+
+ if not isinstance(curve, EllipticCurve):
+ raise TypeError("curve must provide the EllipticCurve interface.")
+
+ self._y = y
+ self._x = x
+ self._curve = curve
+
+ def public_key(self, backend=None):
+ backend = _get_backend(backend)
+ return backend.load_elliptic_curve_public_numbers(self)
+
+ def encode_point(self):
+ warnings.warn(
+ "encode_point has been deprecated on EllipticCurvePublicNumbers"
+ " and will be removed in a future version. Please use "
+ "EllipticCurvePublicKey.public_bytes to obtain both "
+ "compressed and uncompressed point encoding.",
+ utils.PersistentlyDeprecated2019,
+ stacklevel=2,
+ )
+ # key_size is in bits. Convert to bytes and round up
+ byte_length = (self.curve.key_size + 7) // 8
+ return (
+ b"\x04"
+ + utils.int_to_bytes(self.x, byte_length)
+ + utils.int_to_bytes(self.y, byte_length)
+ )
+
+ @classmethod
+ def from_encoded_point(cls, curve, data):
+ if not isinstance(curve, EllipticCurve):
+ raise TypeError("curve must be an EllipticCurve instance")
+
+ warnings.warn(
+ "Support for unsafe construction of public numbers from "
+ "encoded data will be removed in a future version. "
+ "Please use EllipticCurvePublicKey.from_encoded_point",
+ utils.PersistentlyDeprecated2019,
+ stacklevel=2,
+ )
+
+ if data.startswith(b"\x04"):
+ # key_size is in bits. Convert to bytes and round up
+ byte_length = (curve.key_size + 7) // 8
+ if len(data) == 2 * byte_length + 1:
+ x = utils.int_from_bytes(data[1 : byte_length + 1], "big")
+ y = utils.int_from_bytes(data[byte_length + 1 :], "big")
+ return cls(x, y, curve)
+ else:
+ raise ValueError("Invalid elliptic curve point data length")
+ else:
+ raise ValueError("Unsupported elliptic curve point type")
+
+ curve = utils.read_only_property("_curve")
+ x = utils.read_only_property("_x")
+ y = utils.read_only_property("_y")
+
+ def __eq__(self, other):
+ if not isinstance(other, EllipticCurvePublicNumbers):
+ return NotImplemented
+
+ return (
+ self.x == other.x
+ and self.y == other.y
+ and self.curve.name == other.curve.name
+ and self.curve.key_size == other.curve.key_size
+ )
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash((self.x, self.y, self.curve.name, self.curve.key_size))
+
+ def __repr__(self):
+ return (
+ "<EllipticCurvePublicNumbers(curve={0.curve.name}, x={0.x}, "
+ "y={0.y}>".format(self)
+ )
+
+
+class EllipticCurvePrivateNumbers(object):
+ def __init__(self, private_value, public_numbers):
+ if not isinstance(private_value, six.integer_types):
+ raise TypeError("private_value must be an integer.")
+
+ if not isinstance(public_numbers, EllipticCurvePublicNumbers):
+ raise TypeError(
+ "public_numbers must be an EllipticCurvePublicNumbers "
+ "instance."
+ )
+
+ self._private_value = private_value
+ self._public_numbers = public_numbers
+
+ def private_key(self, backend=None):
+ backend = _get_backend(backend)
+ return backend.load_elliptic_curve_private_numbers(self)
+
+ private_value = utils.read_only_property("_private_value")
+ public_numbers = utils.read_only_property("_public_numbers")
+
+ def __eq__(self, other):
+ if not isinstance(other, EllipticCurvePrivateNumbers):
+ return NotImplemented
+
+ return (
+ self.private_value == other.private_value
+ and self.public_numbers == other.public_numbers
+ )
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash((self.private_value, self.public_numbers))
+
+
+class ECDH(object):
+ pass
+
+
+_OID_TO_CURVE = {
+ EllipticCurveOID.SECP192R1: SECP192R1,
+ EllipticCurveOID.SECP224R1: SECP224R1,
+ EllipticCurveOID.SECP256K1: SECP256K1,
+ EllipticCurveOID.SECP256R1: SECP256R1,
+ EllipticCurveOID.SECP384R1: SECP384R1,
+ EllipticCurveOID.SECP521R1: SECP521R1,
+ EllipticCurveOID.BRAINPOOLP256R1: BrainpoolP256R1,
+ EllipticCurveOID.BRAINPOOLP384R1: BrainpoolP384R1,
+ EllipticCurveOID.BRAINPOOLP512R1: BrainpoolP512R1,
+ EllipticCurveOID.SECT163K1: SECT163K1,
+ EllipticCurveOID.SECT163R2: SECT163R2,
+ EllipticCurveOID.SECT233K1: SECT233K1,
+ EllipticCurveOID.SECT233R1: SECT233R1,
+ EllipticCurveOID.SECT283K1: SECT283K1,
+ EllipticCurveOID.SECT283R1: SECT283R1,
+ EllipticCurveOID.SECT409K1: SECT409K1,
+ EllipticCurveOID.SECT409R1: SECT409R1,
+ EllipticCurveOID.SECT571K1: SECT571K1,
+ EllipticCurveOID.SECT571R1: SECT571R1,
+}
+
+
+def get_curve_for_oid(oid):
+ try:
+ return _OID_TO_CURVE[oid]
+ except KeyError:
+ raise LookupError(
+ "The provided object identifier has no matching elliptic "
+ "curve class"
+ )
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/primitives/asymmetric/ed25519.py b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/asymmetric/ed25519.py
new file mode 100644
index 0000000000..2d07a029bc
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/asymmetric/ed25519.py
@@ -0,0 +1,87 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import abc
+
+import six
+
+from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
+
+
+_ED25519_KEY_SIZE = 32
+_ED25519_SIG_SIZE = 64
+
+
+@six.add_metaclass(abc.ABCMeta)
+class Ed25519PublicKey(object):
+ @classmethod
+ def from_public_bytes(cls, data):
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.ed25519_supported():
+ raise UnsupportedAlgorithm(
+ "ed25519 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM,
+ )
+
+ return backend.ed25519_load_public_bytes(data)
+
+ @abc.abstractmethod
+ def public_bytes(self, encoding, format):
+ """
+ The serialized bytes of the public key.
+ """
+
+ @abc.abstractmethod
+ def verify(self, signature, data):
+ """
+ Verify the signature.
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class Ed25519PrivateKey(object):
+ @classmethod
+ def generate(cls):
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.ed25519_supported():
+ raise UnsupportedAlgorithm(
+ "ed25519 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM,
+ )
+
+ return backend.ed25519_generate_key()
+
+ @classmethod
+ def from_private_bytes(cls, data):
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.ed25519_supported():
+ raise UnsupportedAlgorithm(
+ "ed25519 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM,
+ )
+
+ return backend.ed25519_load_private_bytes(data)
+
+ @abc.abstractmethod
+ def public_key(self):
+ """
+ The Ed25519PublicKey derived from the private key.
+ """
+
+ @abc.abstractmethod
+ def private_bytes(self, encoding, format, encryption_algorithm):
+ """
+ The serialized bytes of the private key.
+ """
+
+ @abc.abstractmethod
+ def sign(self, data):
+ """
+ Signs the data.
+ """
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/primitives/asymmetric/ed448.py b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/asymmetric/ed448.py
new file mode 100644
index 0000000000..520ffcbcbc
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/asymmetric/ed448.py
@@ -0,0 +1,82 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import abc
+
+import six
+
+from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
+
+
+@six.add_metaclass(abc.ABCMeta)
+class Ed448PublicKey(object):
+ @classmethod
+ def from_public_bytes(cls, data):
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.ed448_supported():
+ raise UnsupportedAlgorithm(
+ "ed448 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM,
+ )
+
+ return backend.ed448_load_public_bytes(data)
+
+ @abc.abstractmethod
+ def public_bytes(self, encoding, format):
+ """
+ The serialized bytes of the public key.
+ """
+
+ @abc.abstractmethod
+ def verify(self, signature, data):
+ """
+ Verify the signature.
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class Ed448PrivateKey(object):
+ @classmethod
+ def generate(cls):
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.ed448_supported():
+ raise UnsupportedAlgorithm(
+ "ed448 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM,
+ )
+ return backend.ed448_generate_key()
+
+ @classmethod
+ def from_private_bytes(cls, data):
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.ed448_supported():
+ raise UnsupportedAlgorithm(
+ "ed448 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM,
+ )
+
+ return backend.ed448_load_private_bytes(data)
+
+ @abc.abstractmethod
+ def public_key(self):
+ """
+ The Ed448PublicKey derived from the private key.
+ """
+
+ @abc.abstractmethod
+ def sign(self, data):
+ """
+ Signs the data.
+ """
+
+ @abc.abstractmethod
+ def private_bytes(self, encoding, format, encryption_algorithm):
+ """
+ The serialized bytes of the private key.
+ """
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/primitives/asymmetric/padding.py b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/asymmetric/padding.py
new file mode 100644
index 0000000000..fc8f6e26a9
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/asymmetric/padding.py
@@ -0,0 +1,80 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import abc
+
+import six
+
+from cryptography import utils
+from cryptography.hazmat.primitives import hashes
+from cryptography.hazmat.primitives.asymmetric import rsa
+
+
+@six.add_metaclass(abc.ABCMeta)
+class AsymmetricPadding(object):
+ @abc.abstractproperty
+ def name(self):
+ """
+ A string naming this padding (e.g. "PSS", "PKCS1").
+ """
+
+
+@utils.register_interface(AsymmetricPadding)
+class PKCS1v15(object):
+ name = "EMSA-PKCS1-v1_5"
+
+
+@utils.register_interface(AsymmetricPadding)
+class PSS(object):
+ MAX_LENGTH = object()
+ name = "EMSA-PSS"
+
+ def __init__(self, mgf, salt_length):
+ self._mgf = mgf
+
+ if (
+ not isinstance(salt_length, six.integer_types)
+ and salt_length is not self.MAX_LENGTH
+ ):
+ raise TypeError("salt_length must be an integer.")
+
+ if salt_length is not self.MAX_LENGTH and salt_length < 0:
+ raise ValueError("salt_length must be zero or greater.")
+
+ self._salt_length = salt_length
+
+
+@utils.register_interface(AsymmetricPadding)
+class OAEP(object):
+ name = "EME-OAEP"
+
+ def __init__(self, mgf, algorithm, label):
+ if not isinstance(algorithm, hashes.HashAlgorithm):
+ raise TypeError("Expected instance of hashes.HashAlgorithm.")
+
+ self._mgf = mgf
+ self._algorithm = algorithm
+ self._label = label
+
+
+class MGF1(object):
+ MAX_LENGTH = object()
+
+ def __init__(self, algorithm):
+ if not isinstance(algorithm, hashes.HashAlgorithm):
+ raise TypeError("Expected instance of hashes.HashAlgorithm.")
+
+ self._algorithm = algorithm
+
+
+def calculate_max_pss_salt_length(key, hash_algorithm):
+ if not isinstance(key, (rsa.RSAPrivateKey, rsa.RSAPublicKey)):
+ raise TypeError("key must be an RSA public or private key")
+ # bit length - 1 per RFC 3447
+ emlen = (key.key_size + 6) // 8
+ salt_length = emlen - hash_algorithm.digest_size - 2
+ assert salt_length >= 0
+ return salt_length
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/primitives/asymmetric/rsa.py b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/asymmetric/rsa.py
new file mode 100644
index 0000000000..ea16bbf66e
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/asymmetric/rsa.py
@@ -0,0 +1,380 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import abc
+
+try:
+ # Only available in math in 3.5+
+ from math import gcd
+except ImportError:
+ from fractions import gcd
+
+import six
+
+from cryptography import utils
+from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
+from cryptography.hazmat.backends import _get_backend
+from cryptography.hazmat.backends.interfaces import RSABackend
+
+
+@six.add_metaclass(abc.ABCMeta)
+class RSAPrivateKey(object):
+ @abc.abstractmethod
+ def signer(self, padding, algorithm):
+ """
+ Returns an AsymmetricSignatureContext used for signing data.
+ """
+
+ @abc.abstractmethod
+ def decrypt(self, ciphertext, padding):
+ """
+ Decrypts the provided ciphertext.
+ """
+
+ @abc.abstractproperty
+ def key_size(self):
+ """
+ The bit length of the public modulus.
+ """
+
+ @abc.abstractmethod
+ def public_key(self):
+ """
+ The RSAPublicKey associated with this private key.
+ """
+
+ @abc.abstractmethod
+ def sign(self, data, padding, algorithm):
+ """
+ Signs the data.
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class RSAPrivateKeyWithSerialization(RSAPrivateKey):
+ @abc.abstractmethod
+ def private_numbers(self):
+ """
+ Returns an RSAPrivateNumbers.
+ """
+
+ @abc.abstractmethod
+ def private_bytes(self, encoding, format, encryption_algorithm):
+ """
+ Returns the key serialized as bytes.
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class RSAPublicKey(object):
+ @abc.abstractmethod
+ def verifier(self, signature, padding, algorithm):
+ """
+ Returns an AsymmetricVerificationContext used for verifying signatures.
+ """
+
+ @abc.abstractmethod
+ def encrypt(self, plaintext, padding):
+ """
+ Encrypts the given plaintext.
+ """
+
+ @abc.abstractproperty
+ def key_size(self):
+ """
+ The bit length of the public modulus.
+ """
+
+ @abc.abstractmethod
+ def public_numbers(self):
+ """
+ Returns an RSAPublicNumbers
+ """
+
+ @abc.abstractmethod
+ def public_bytes(self, encoding, format):
+ """
+ Returns the key serialized as bytes.
+ """
+
+ @abc.abstractmethod
+ def verify(self, signature, data, padding, algorithm):
+ """
+ Verifies the signature of the data.
+ """
+
+ @abc.abstractmethod
+ def recover_data_from_signature(self, signature, padding, algorithm):
+ """
+ Recovers the original data from the signature.
+ """
+
+
+RSAPublicKeyWithSerialization = RSAPublicKey
+
+
+def generate_private_key(public_exponent, key_size, backend=None):
+ backend = _get_backend(backend)
+ if not isinstance(backend, RSABackend):
+ raise UnsupportedAlgorithm(
+ "Backend object does not implement RSABackend.",
+ _Reasons.BACKEND_MISSING_INTERFACE,
+ )
+
+ _verify_rsa_parameters(public_exponent, key_size)
+ return backend.generate_rsa_private_key(public_exponent, key_size)
+
+
+def _verify_rsa_parameters(public_exponent, key_size):
+ if public_exponent not in (3, 65537):
+ raise ValueError(
+ "public_exponent must be either 3 (for legacy compatibility) or "
+ "65537. Almost everyone should choose 65537 here!"
+ )
+
+ if key_size < 512:
+ raise ValueError("key_size must be at least 512-bits.")
+
+
+def _check_private_key_components(
+ p, q, private_exponent, dmp1, dmq1, iqmp, public_exponent, modulus
+):
+ if modulus < 3:
+ raise ValueError("modulus must be >= 3.")
+
+ if p >= modulus:
+ raise ValueError("p must be < modulus.")
+
+ if q >= modulus:
+ raise ValueError("q must be < modulus.")
+
+ if dmp1 >= modulus:
+ raise ValueError("dmp1 must be < modulus.")
+
+ if dmq1 >= modulus:
+ raise ValueError("dmq1 must be < modulus.")
+
+ if iqmp >= modulus:
+ raise ValueError("iqmp must be < modulus.")
+
+ if private_exponent >= modulus:
+ raise ValueError("private_exponent must be < modulus.")
+
+ if public_exponent < 3 or public_exponent >= modulus:
+ raise ValueError("public_exponent must be >= 3 and < modulus.")
+
+ if public_exponent & 1 == 0:
+ raise ValueError("public_exponent must be odd.")
+
+ if dmp1 & 1 == 0:
+ raise ValueError("dmp1 must be odd.")
+
+ if dmq1 & 1 == 0:
+ raise ValueError("dmq1 must be odd.")
+
+ if p * q != modulus:
+ raise ValueError("p*q must equal modulus.")
+
+
+def _check_public_key_components(e, n):
+ if n < 3:
+ raise ValueError("n must be >= 3.")
+
+ if e < 3 or e >= n:
+ raise ValueError("e must be >= 3 and < n.")
+
+ if e & 1 == 0:
+ raise ValueError("e must be odd.")
+
+
+def _modinv(e, m):
+ """
+ Modular Multiplicative Inverse. Returns x such that: (x*e) mod m == 1
+ """
+ x1, x2 = 1, 0
+ a, b = e, m
+ while b > 0:
+ q, r = divmod(a, b)
+ xn = x1 - q * x2
+ a, b, x1, x2 = b, r, x2, xn
+ return x1 % m
+
+
+def rsa_crt_iqmp(p, q):
+ """
+ Compute the CRT (q ** -1) % p value from RSA primes p and q.
+ """
+ return _modinv(q, p)
+
+
+def rsa_crt_dmp1(private_exponent, p):
+ """
+ Compute the CRT private_exponent % (p - 1) value from the RSA
+ private_exponent (d) and p.
+ """
+ return private_exponent % (p - 1)
+
+
+def rsa_crt_dmq1(private_exponent, q):
+ """
+ Compute the CRT private_exponent % (q - 1) value from the RSA
+ private_exponent (d) and q.
+ """
+ return private_exponent % (q - 1)
+
+
+# Controls the number of iterations rsa_recover_prime_factors will perform
+# to obtain the prime factors. Each iteration increments by 2 so the actual
+# maximum attempts is half this number.
+_MAX_RECOVERY_ATTEMPTS = 1000
+
+
+def rsa_recover_prime_factors(n, e, d):
+ """
+ Compute factors p and q from the private exponent d. We assume that n has
+ no more than two factors. This function is adapted from code in PyCrypto.
+ """
+ # See 8.2.2(i) in Handbook of Applied Cryptography.
+ ktot = d * e - 1
+ # The quantity d*e-1 is a multiple of phi(n), even,
+ # and can be represented as t*2^s.
+ t = ktot
+ while t % 2 == 0:
+ t = t // 2
+ # Cycle through all multiplicative inverses in Zn.
+ # The algorithm is non-deterministic, but there is a 50% chance
+ # any candidate a leads to successful factoring.
+ # See "Digitalized Signatures and Public Key Functions as Intractable
+ # as Factorization", M. Rabin, 1979
+ spotted = False
+ a = 2
+ while not spotted and a < _MAX_RECOVERY_ATTEMPTS:
+ k = t
+ # Cycle through all values a^{t*2^i}=a^k
+ while k < ktot:
+ cand = pow(a, k, n)
+ # Check if a^k is a non-trivial root of unity (mod n)
+ if cand != 1 and cand != (n - 1) and pow(cand, 2, n) == 1:
+ # We have found a number such that (cand-1)(cand+1)=0 (mod n).
+ # Either of the terms divides n.
+ p = gcd(cand + 1, n)
+ spotted = True
+ break
+ k *= 2
+ # This value was not any good... let's try another!
+ a += 2
+ if not spotted:
+ raise ValueError("Unable to compute factors p and q from exponent d.")
+ # Found !
+ q, r = divmod(n, p)
+ assert r == 0
+ p, q = sorted((p, q), reverse=True)
+ return (p, q)
+
+
+class RSAPrivateNumbers(object):
+ def __init__(self, p, q, d, dmp1, dmq1, iqmp, public_numbers):
+ if (
+ not isinstance(p, six.integer_types)
+ or not isinstance(q, six.integer_types)
+ or not isinstance(d, six.integer_types)
+ or not isinstance(dmp1, six.integer_types)
+ or not isinstance(dmq1, six.integer_types)
+ or not isinstance(iqmp, six.integer_types)
+ ):
+ raise TypeError(
+ "RSAPrivateNumbers p, q, d, dmp1, dmq1, iqmp arguments must"
+ " all be an integers."
+ )
+
+ if not isinstance(public_numbers, RSAPublicNumbers):
+ raise TypeError(
+ "RSAPrivateNumbers public_numbers must be an RSAPublicNumbers"
+ " instance."
+ )
+
+ self._p = p
+ self._q = q
+ self._d = d
+ self._dmp1 = dmp1
+ self._dmq1 = dmq1
+ self._iqmp = iqmp
+ self._public_numbers = public_numbers
+
+ p = utils.read_only_property("_p")
+ q = utils.read_only_property("_q")
+ d = utils.read_only_property("_d")
+ dmp1 = utils.read_only_property("_dmp1")
+ dmq1 = utils.read_only_property("_dmq1")
+ iqmp = utils.read_only_property("_iqmp")
+ public_numbers = utils.read_only_property("_public_numbers")
+
+ def private_key(self, backend=None):
+ backend = _get_backend(backend)
+ return backend.load_rsa_private_numbers(self)
+
+ def __eq__(self, other):
+ if not isinstance(other, RSAPrivateNumbers):
+ return NotImplemented
+
+ return (
+ self.p == other.p
+ and self.q == other.q
+ and self.d == other.d
+ and self.dmp1 == other.dmp1
+ and self.dmq1 == other.dmq1
+ and self.iqmp == other.iqmp
+ and self.public_numbers == other.public_numbers
+ )
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash(
+ (
+ self.p,
+ self.q,
+ self.d,
+ self.dmp1,
+ self.dmq1,
+ self.iqmp,
+ self.public_numbers,
+ )
+ )
+
+
+class RSAPublicNumbers(object):
+ def __init__(self, e, n):
+ if not isinstance(e, six.integer_types) or not isinstance(
+ n, six.integer_types
+ ):
+ raise TypeError("RSAPublicNumbers arguments must be integers.")
+
+ self._e = e
+ self._n = n
+
+ e = utils.read_only_property("_e")
+ n = utils.read_only_property("_n")
+
+ def public_key(self, backend=None):
+ backend = _get_backend(backend)
+ return backend.load_rsa_public_numbers(self)
+
+ def __repr__(self):
+ return "<RSAPublicNumbers(e={0.e}, n={0.n})>".format(self)
+
+ def __eq__(self, other):
+ if not isinstance(other, RSAPublicNumbers):
+ return NotImplemented
+
+ return self.e == other.e and self.n == other.n
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash((self.e, self.n))
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/primitives/asymmetric/utils.py b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/asymmetric/utils.py
new file mode 100644
index 0000000000..5f9b677868
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/asymmetric/utils.py
@@ -0,0 +1,41 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+from cryptography import utils
+from cryptography.hazmat._der import (
+ DERReader,
+ INTEGER,
+ SEQUENCE,
+ encode_der,
+ encode_der_integer,
+)
+from cryptography.hazmat.primitives import hashes
+
+
+def decode_dss_signature(signature):
+ with DERReader(signature).read_single_element(SEQUENCE) as seq:
+ r = seq.read_element(INTEGER).as_integer()
+ s = seq.read_element(INTEGER).as_integer()
+ return r, s
+
+
+def encode_dss_signature(r, s):
+ return encode_der(
+ SEQUENCE,
+ encode_der(INTEGER, encode_der_integer(r)),
+ encode_der(INTEGER, encode_der_integer(s)),
+ )
+
+
+class Prehashed(object):
+ def __init__(self, algorithm):
+ if not isinstance(algorithm, hashes.HashAlgorithm):
+ raise TypeError("Expected instance of HashAlgorithm.")
+
+ self._algorithm = algorithm
+ self._digest_size = algorithm.digest_size
+
+ digest_size = utils.read_only_property("_digest_size")
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/primitives/asymmetric/x25519.py b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/asymmetric/x25519.py
new file mode 100644
index 0000000000..fc63691536
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/asymmetric/x25519.py
@@ -0,0 +1,76 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import abc
+
+import six
+
+from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
+
+
+@six.add_metaclass(abc.ABCMeta)
+class X25519PublicKey(object):
+ @classmethod
+ def from_public_bytes(cls, data):
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.x25519_supported():
+ raise UnsupportedAlgorithm(
+ "X25519 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM,
+ )
+
+ return backend.x25519_load_public_bytes(data)
+
+ @abc.abstractmethod
+ def public_bytes(self, encoding, format):
+ """
+ The serialized bytes of the public key.
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class X25519PrivateKey(object):
+ @classmethod
+ def generate(cls):
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.x25519_supported():
+ raise UnsupportedAlgorithm(
+ "X25519 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM,
+ )
+ return backend.x25519_generate_key()
+
+ @classmethod
+ def from_private_bytes(cls, data):
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.x25519_supported():
+ raise UnsupportedAlgorithm(
+ "X25519 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM,
+ )
+
+ return backend.x25519_load_private_bytes(data)
+
+ @abc.abstractmethod
+ def public_key(self):
+ """
+ The serialized bytes of the public key.
+ """
+
+ @abc.abstractmethod
+ def private_bytes(self, encoding, format, encryption_algorithm):
+ """
+ The serialized bytes of the private key.
+ """
+
+ @abc.abstractmethod
+ def exchange(self, peer_public_key):
+ """
+ Performs a key exchange operation using the provided peer's public key.
+ """
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/primitives/asymmetric/x448.py b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/asymmetric/x448.py
new file mode 100644
index 0000000000..3ac067bfd5
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/asymmetric/x448.py
@@ -0,0 +1,76 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import abc
+
+import six
+
+from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
+
+
+@six.add_metaclass(abc.ABCMeta)
+class X448PublicKey(object):
+ @classmethod
+ def from_public_bytes(cls, data):
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.x448_supported():
+ raise UnsupportedAlgorithm(
+ "X448 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM,
+ )
+
+ return backend.x448_load_public_bytes(data)
+
+ @abc.abstractmethod
+ def public_bytes(self, encoding, format):
+ """
+ The serialized bytes of the public key.
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class X448PrivateKey(object):
+ @classmethod
+ def generate(cls):
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.x448_supported():
+ raise UnsupportedAlgorithm(
+ "X448 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM,
+ )
+ return backend.x448_generate_key()
+
+ @classmethod
+ def from_private_bytes(cls, data):
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.x448_supported():
+ raise UnsupportedAlgorithm(
+ "X448 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM,
+ )
+
+ return backend.x448_load_private_bytes(data)
+
+ @abc.abstractmethod
+ def public_key(self):
+ """
+ The serialized bytes of the public key.
+ """
+
+ @abc.abstractmethod
+ def private_bytes(self, encoding, format, encryption_algorithm):
+ """
+ The serialized bytes of the private key.
+ """
+
+ @abc.abstractmethod
+ def exchange(self, peer_public_key):
+ """
+ Performs a key exchange operation using the provided peer's public key.
+ """
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/primitives/ciphers/__init__.py b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/ciphers/__init__.py
new file mode 100644
index 0000000000..4380f72b2e
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/ciphers/__init__.py
@@ -0,0 +1,26 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+from cryptography.hazmat.primitives.ciphers.base import (
+ AEADCipherContext,
+ AEADDecryptionContext,
+ AEADEncryptionContext,
+ BlockCipherAlgorithm,
+ Cipher,
+ CipherAlgorithm,
+ CipherContext,
+)
+
+
+__all__ = [
+ "Cipher",
+ "CipherAlgorithm",
+ "BlockCipherAlgorithm",
+ "CipherContext",
+ "AEADCipherContext",
+ "AEADDecryptionContext",
+ "AEADEncryptionContext",
+]
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/primitives/ciphers/aead.py b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/ciphers/aead.py
new file mode 100644
index 0000000000..c8c93955ce
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/ciphers/aead.py
@@ -0,0 +1,174 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import os
+
+from cryptography import exceptions, utils
+from cryptography.hazmat.backends.openssl import aead
+from cryptography.hazmat.backends.openssl.backend import backend
+
+
+class ChaCha20Poly1305(object):
+ _MAX_SIZE = 2 ** 32
+
+ def __init__(self, key):
+ if not backend.aead_cipher_supported(self):
+ raise exceptions.UnsupportedAlgorithm(
+ "ChaCha20Poly1305 is not supported by this version of OpenSSL",
+ exceptions._Reasons.UNSUPPORTED_CIPHER,
+ )
+ utils._check_byteslike("key", key)
+
+ if len(key) != 32:
+ raise ValueError("ChaCha20Poly1305 key must be 32 bytes.")
+
+ self._key = key
+
+ @classmethod
+ def generate_key(cls):
+ return os.urandom(32)
+
+ def encrypt(self, nonce, data, associated_data):
+ if associated_data is None:
+ associated_data = b""
+
+ if len(data) > self._MAX_SIZE or len(associated_data) > self._MAX_SIZE:
+ # This is OverflowError to match what cffi would raise
+ raise OverflowError(
+ "Data or associated data too long. Max 2**32 bytes"
+ )
+
+ self._check_params(nonce, data, associated_data)
+ return aead._encrypt(backend, self, nonce, data, associated_data, 16)
+
+ def decrypt(self, nonce, data, associated_data):
+ if associated_data is None:
+ associated_data = b""
+
+ self._check_params(nonce, data, associated_data)
+ return aead._decrypt(backend, self, nonce, data, associated_data, 16)
+
+ def _check_params(self, nonce, data, associated_data):
+ utils._check_byteslike("nonce", nonce)
+ utils._check_bytes("data", data)
+ utils._check_bytes("associated_data", associated_data)
+ if len(nonce) != 12:
+ raise ValueError("Nonce must be 12 bytes")
+
+
+class AESCCM(object):
+ _MAX_SIZE = 2 ** 32
+
+ def __init__(self, key, tag_length=16):
+ utils._check_byteslike("key", key)
+ if len(key) not in (16, 24, 32):
+ raise ValueError("AESCCM key must be 128, 192, or 256 bits.")
+
+ self._key = key
+ if not isinstance(tag_length, int):
+ raise TypeError("tag_length must be an integer")
+
+ if tag_length not in (4, 6, 8, 10, 12, 14, 16):
+ raise ValueError("Invalid tag_length")
+
+ self._tag_length = tag_length
+
+ @classmethod
+ def generate_key(cls, bit_length):
+ if not isinstance(bit_length, int):
+ raise TypeError("bit_length must be an integer")
+
+ if bit_length not in (128, 192, 256):
+ raise ValueError("bit_length must be 128, 192, or 256")
+
+ return os.urandom(bit_length // 8)
+
+ def encrypt(self, nonce, data, associated_data):
+ if associated_data is None:
+ associated_data = b""
+
+ if len(data) > self._MAX_SIZE or len(associated_data) > self._MAX_SIZE:
+ # This is OverflowError to match what cffi would raise
+ raise OverflowError(
+ "Data or associated data too long. Max 2**32 bytes"
+ )
+
+ self._check_params(nonce, data, associated_data)
+ self._validate_lengths(nonce, len(data))
+ return aead._encrypt(
+ backend, self, nonce, data, associated_data, self._tag_length
+ )
+
+ def decrypt(self, nonce, data, associated_data):
+ if associated_data is None:
+ associated_data = b""
+
+ self._check_params(nonce, data, associated_data)
+ return aead._decrypt(
+ backend, self, nonce, data, associated_data, self._tag_length
+ )
+
+ def _validate_lengths(self, nonce, data_len):
+ # For information about computing this, see
+ # https://tools.ietf.org/html/rfc3610#section-2.1
+ l_val = 15 - len(nonce)
+ if 2 ** (8 * l_val) < data_len:
+ raise ValueError("Data too long for nonce")
+
+ def _check_params(self, nonce, data, associated_data):
+ utils._check_byteslike("nonce", nonce)
+ utils._check_bytes("data", data)
+ utils._check_bytes("associated_data", associated_data)
+ if not 7 <= len(nonce) <= 13:
+ raise ValueError("Nonce must be between 7 and 13 bytes")
+
+
+class AESGCM(object):
+ _MAX_SIZE = 2 ** 32
+
+ def __init__(self, key):
+ utils._check_byteslike("key", key)
+ if len(key) not in (16, 24, 32):
+ raise ValueError("AESGCM key must be 128, 192, or 256 bits.")
+
+ self._key = key
+
+ @classmethod
+ def generate_key(cls, bit_length):
+ if not isinstance(bit_length, int):
+ raise TypeError("bit_length must be an integer")
+
+ if bit_length not in (128, 192, 256):
+ raise ValueError("bit_length must be 128, 192, or 256")
+
+ return os.urandom(bit_length // 8)
+
+ def encrypt(self, nonce, data, associated_data):
+ if associated_data is None:
+ associated_data = b""
+
+ if len(data) > self._MAX_SIZE or len(associated_data) > self._MAX_SIZE:
+ # This is OverflowError to match what cffi would raise
+ raise OverflowError(
+ "Data or associated data too long. Max 2**32 bytes"
+ )
+
+ self._check_params(nonce, data, associated_data)
+ return aead._encrypt(backend, self, nonce, data, associated_data, 16)
+
+ def decrypt(self, nonce, data, associated_data):
+ if associated_data is None:
+ associated_data = b""
+
+ self._check_params(nonce, data, associated_data)
+ return aead._decrypt(backend, self, nonce, data, associated_data, 16)
+
+ def _check_params(self, nonce, data, associated_data):
+ utils._check_byteslike("nonce", nonce)
+ utils._check_bytes("data", data)
+ utils._check_bytes("associated_data", associated_data)
+ if len(nonce) < 8 or len(nonce) > 128:
+ raise ValueError("Nonce must be between 8 and 128 bytes")
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/primitives/ciphers/algorithms.py b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/ciphers/algorithms.py
new file mode 100644
index 0000000000..8072cedd17
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/ciphers/algorithms.py
@@ -0,0 +1,170 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+from cryptography import utils
+from cryptography.hazmat.primitives.ciphers import (
+ BlockCipherAlgorithm,
+ CipherAlgorithm,
+)
+from cryptography.hazmat.primitives.ciphers.modes import ModeWithNonce
+
+
+def _verify_key_size(algorithm, key):
+ # Verify that the key is instance of bytes
+ utils._check_byteslike("key", key)
+
+ # Verify that the key size matches the expected key size
+ if len(key) * 8 not in algorithm.key_sizes:
+ raise ValueError(
+ "Invalid key size ({}) for {}.".format(
+ len(key) * 8, algorithm.name
+ )
+ )
+ return key
+
+
+@utils.register_interface(BlockCipherAlgorithm)
+@utils.register_interface(CipherAlgorithm)
+class AES(object):
+ name = "AES"
+ block_size = 128
+ # 512 added to support AES-256-XTS, which uses 512-bit keys
+ key_sizes = frozenset([128, 192, 256, 512])
+
+ def __init__(self, key):
+ self.key = _verify_key_size(self, key)
+
+ @property
+ def key_size(self):
+ return len(self.key) * 8
+
+
+@utils.register_interface(BlockCipherAlgorithm)
+@utils.register_interface(CipherAlgorithm)
+class Camellia(object):
+ name = "camellia"
+ block_size = 128
+ key_sizes = frozenset([128, 192, 256])
+
+ def __init__(self, key):
+ self.key = _verify_key_size(self, key)
+
+ @property
+ def key_size(self):
+ return len(self.key) * 8
+
+
+@utils.register_interface(BlockCipherAlgorithm)
+@utils.register_interface(CipherAlgorithm)
+class TripleDES(object):
+ name = "3DES"
+ block_size = 64
+ key_sizes = frozenset([64, 128, 192])
+
+ def __init__(self, key):
+ if len(key) == 8:
+ key += key + key
+ elif len(key) == 16:
+ key += key[:8]
+ self.key = _verify_key_size(self, key)
+
+ @property
+ def key_size(self):
+ return len(self.key) * 8
+
+
+@utils.register_interface(BlockCipherAlgorithm)
+@utils.register_interface(CipherAlgorithm)
+class Blowfish(object):
+ name = "Blowfish"
+ block_size = 64
+ key_sizes = frozenset(range(32, 449, 8))
+
+ def __init__(self, key):
+ self.key = _verify_key_size(self, key)
+
+ @property
+ def key_size(self):
+ return len(self.key) * 8
+
+
+@utils.register_interface(BlockCipherAlgorithm)
+@utils.register_interface(CipherAlgorithm)
+class CAST5(object):
+ name = "CAST5"
+ block_size = 64
+ key_sizes = frozenset(range(40, 129, 8))
+
+ def __init__(self, key):
+ self.key = _verify_key_size(self, key)
+
+ @property
+ def key_size(self):
+ return len(self.key) * 8
+
+
+@utils.register_interface(CipherAlgorithm)
+class ARC4(object):
+ name = "RC4"
+ key_sizes = frozenset([40, 56, 64, 80, 128, 160, 192, 256])
+
+ def __init__(self, key):
+ self.key = _verify_key_size(self, key)
+
+ @property
+ def key_size(self):
+ return len(self.key) * 8
+
+
+@utils.register_interface(CipherAlgorithm)
+class IDEA(object):
+ name = "IDEA"
+ block_size = 64
+ key_sizes = frozenset([128])
+
+ def __init__(self, key):
+ self.key = _verify_key_size(self, key)
+
+ @property
+ def key_size(self):
+ return len(self.key) * 8
+
+
+@utils.register_interface(BlockCipherAlgorithm)
+@utils.register_interface(CipherAlgorithm)
+class SEED(object):
+ name = "SEED"
+ block_size = 128
+ key_sizes = frozenset([128])
+
+ def __init__(self, key):
+ self.key = _verify_key_size(self, key)
+
+ @property
+ def key_size(self):
+ return len(self.key) * 8
+
+
+@utils.register_interface(CipherAlgorithm)
+@utils.register_interface(ModeWithNonce)
+class ChaCha20(object):
+ name = "ChaCha20"
+ key_sizes = frozenset([256])
+
+ def __init__(self, key, nonce):
+ self.key = _verify_key_size(self, key)
+ utils._check_byteslike("nonce", nonce)
+
+ if len(nonce) != 16:
+ raise ValueError("nonce must be 128-bits (16 bytes)")
+
+ self._nonce = nonce
+
+ nonce = utils.read_only_property("_nonce")
+
+ @property
+ def key_size(self):
+ return len(self.key) * 8
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/primitives/ciphers/base.py b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/ciphers/base.py
new file mode 100644
index 0000000000..dae425a299
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/ciphers/base.py
@@ -0,0 +1,241 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import abc
+
+import six
+
+from cryptography import utils
+from cryptography.exceptions import (
+ AlreadyFinalized,
+ AlreadyUpdated,
+ NotYetFinalized,
+ UnsupportedAlgorithm,
+ _Reasons,
+)
+from cryptography.hazmat.backends import _get_backend
+from cryptography.hazmat.backends.interfaces import CipherBackend
+from cryptography.hazmat.primitives.ciphers import modes
+
+
+@six.add_metaclass(abc.ABCMeta)
+class CipherAlgorithm(object):
+ @abc.abstractproperty
+ def name(self):
+ """
+ A string naming this mode (e.g. "AES", "Camellia").
+ """
+
+ @abc.abstractproperty
+ def key_size(self):
+ """
+ The size of the key being used as an integer in bits (e.g. 128, 256).
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class BlockCipherAlgorithm(object):
+ @abc.abstractproperty
+ def block_size(self):
+ """
+ The size of a block as an integer in bits (e.g. 64, 128).
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class CipherContext(object):
+ @abc.abstractmethod
+ def update(self, data):
+ """
+ Processes the provided bytes through the cipher and returns the results
+ as bytes.
+ """
+
+ @abc.abstractmethod
+ def update_into(self, data, buf):
+ """
+ Processes the provided bytes and writes the resulting data into the
+ provided buffer. Returns the number of bytes written.
+ """
+
+ @abc.abstractmethod
+ def finalize(self):
+ """
+ Returns the results of processing the final block as bytes.
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class AEADCipherContext(object):
+ @abc.abstractmethod
+ def authenticate_additional_data(self, data):
+ """
+ Authenticates the provided bytes.
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class AEADDecryptionContext(object):
+ @abc.abstractmethod
+ def finalize_with_tag(self, tag):
+ """
+ Returns the results of processing the final block as bytes and allows
+ delayed passing of the authentication tag.
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class AEADEncryptionContext(object):
+ @abc.abstractproperty
+ def tag(self):
+ """
+ Returns tag bytes. This is only available after encryption is
+ finalized.
+ """
+
+
+class Cipher(object):
+ def __init__(self, algorithm, mode, backend=None):
+ backend = _get_backend(backend)
+ if not isinstance(backend, CipherBackend):
+ raise UnsupportedAlgorithm(
+ "Backend object does not implement CipherBackend.",
+ _Reasons.BACKEND_MISSING_INTERFACE,
+ )
+
+ if not isinstance(algorithm, CipherAlgorithm):
+ raise TypeError("Expected interface of CipherAlgorithm.")
+
+ if mode is not None:
+ mode.validate_for_algorithm(algorithm)
+
+ self.algorithm = algorithm
+ self.mode = mode
+ self._backend = backend
+
+ def encryptor(self):
+ if isinstance(self.mode, modes.ModeWithAuthenticationTag):
+ if self.mode.tag is not None:
+ raise ValueError(
+ "Authentication tag must be None when encrypting."
+ )
+ ctx = self._backend.create_symmetric_encryption_ctx(
+ self.algorithm, self.mode
+ )
+ return self._wrap_ctx(ctx, encrypt=True)
+
+ def decryptor(self):
+ ctx = self._backend.create_symmetric_decryption_ctx(
+ self.algorithm, self.mode
+ )
+ return self._wrap_ctx(ctx, encrypt=False)
+
+ def _wrap_ctx(self, ctx, encrypt):
+ if isinstance(self.mode, modes.ModeWithAuthenticationTag):
+ if encrypt:
+ return _AEADEncryptionContext(ctx)
+ else:
+ return _AEADCipherContext(ctx)
+ else:
+ return _CipherContext(ctx)
+
+
+@utils.register_interface(CipherContext)
+class _CipherContext(object):
+ def __init__(self, ctx):
+ self._ctx = ctx
+
+ def update(self, data):
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ return self._ctx.update(data)
+
+ def update_into(self, data, buf):
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ return self._ctx.update_into(data, buf)
+
+ def finalize(self):
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ data = self._ctx.finalize()
+ self._ctx = None
+ return data
+
+
+@utils.register_interface(AEADCipherContext)
+@utils.register_interface(CipherContext)
+@utils.register_interface(AEADDecryptionContext)
+class _AEADCipherContext(object):
+ def __init__(self, ctx):
+ self._ctx = ctx
+ self._bytes_processed = 0
+ self._aad_bytes_processed = 0
+ self._tag = None
+ self._updated = False
+
+ def _check_limit(self, data_size):
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ self._updated = True
+ self._bytes_processed += data_size
+ if self._bytes_processed > self._ctx._mode._MAX_ENCRYPTED_BYTES:
+ raise ValueError(
+ "{} has a maximum encrypted byte limit of {}".format(
+ self._ctx._mode.name, self._ctx._mode._MAX_ENCRYPTED_BYTES
+ )
+ )
+
+ def update(self, data):
+ self._check_limit(len(data))
+ return self._ctx.update(data)
+
+ def update_into(self, data, buf):
+ self._check_limit(len(data))
+ return self._ctx.update_into(data, buf)
+
+ def finalize(self):
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ data = self._ctx.finalize()
+ self._tag = self._ctx.tag
+ self._ctx = None
+ return data
+
+ def finalize_with_tag(self, tag):
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ data = self._ctx.finalize_with_tag(tag)
+ self._tag = self._ctx.tag
+ self._ctx = None
+ return data
+
+ def authenticate_additional_data(self, data):
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ if self._updated:
+ raise AlreadyUpdated("Update has been called on this context.")
+
+ self._aad_bytes_processed += len(data)
+ if self._aad_bytes_processed > self._ctx._mode._MAX_AAD_BYTES:
+ raise ValueError(
+ "{} has a maximum AAD byte limit of {}".format(
+ self._ctx._mode.name, self._ctx._mode._MAX_AAD_BYTES
+ )
+ )
+
+ self._ctx.authenticate_additional_data(data)
+
+
+@utils.register_interface(AEADEncryptionContext)
+class _AEADEncryptionContext(_AEADCipherContext):
+ @property
+ def tag(self):
+ if self._ctx is not None:
+ raise NotYetFinalized(
+ "You must finalize encryption before " "getting the tag."
+ )
+ return self._tag
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/primitives/ciphers/modes.py b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/ciphers/modes.py
new file mode 100644
index 0000000000..0ba0f2b5a1
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/ciphers/modes.py
@@ -0,0 +1,225 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import abc
+
+import six
+
+from cryptography import utils
+
+
+@six.add_metaclass(abc.ABCMeta)
+class Mode(object):
+ @abc.abstractproperty
+ def name(self):
+ """
+ A string naming this mode (e.g. "ECB", "CBC").
+ """
+
+ @abc.abstractmethod
+ def validate_for_algorithm(self, algorithm):
+ """
+ Checks that all the necessary invariants of this (mode, algorithm)
+ combination are met.
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class ModeWithInitializationVector(object):
+ @abc.abstractproperty
+ def initialization_vector(self):
+ """
+ The value of the initialization vector for this mode as bytes.
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class ModeWithTweak(object):
+ @abc.abstractproperty
+ def tweak(self):
+ """
+ The value of the tweak for this mode as bytes.
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class ModeWithNonce(object):
+ @abc.abstractproperty
+ def nonce(self):
+ """
+ The value of the nonce for this mode as bytes.
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class ModeWithAuthenticationTag(object):
+ @abc.abstractproperty
+ def tag(self):
+ """
+ The value of the tag supplied to the constructor of this mode.
+ """
+
+
+def _check_aes_key_length(self, algorithm):
+ if algorithm.key_size > 256 and algorithm.name == "AES":
+ raise ValueError(
+ "Only 128, 192, and 256 bit keys are allowed for this AES mode"
+ )
+
+
+def _check_iv_length(self, algorithm):
+ if len(self.initialization_vector) * 8 != algorithm.block_size:
+ raise ValueError(
+ "Invalid IV size ({}) for {}.".format(
+ len(self.initialization_vector), self.name
+ )
+ )
+
+
+def _check_iv_and_key_length(self, algorithm):
+ _check_aes_key_length(self, algorithm)
+ _check_iv_length(self, algorithm)
+
+
+@utils.register_interface(Mode)
+@utils.register_interface(ModeWithInitializationVector)
+class CBC(object):
+ name = "CBC"
+
+ def __init__(self, initialization_vector):
+ utils._check_byteslike("initialization_vector", initialization_vector)
+ self._initialization_vector = initialization_vector
+
+ initialization_vector = utils.read_only_property("_initialization_vector")
+ validate_for_algorithm = _check_iv_and_key_length
+
+
+@utils.register_interface(Mode)
+@utils.register_interface(ModeWithTweak)
+class XTS(object):
+ name = "XTS"
+
+ def __init__(self, tweak):
+ utils._check_byteslike("tweak", tweak)
+
+ if len(tweak) != 16:
+ raise ValueError("tweak must be 128-bits (16 bytes)")
+
+ self._tweak = tweak
+
+ tweak = utils.read_only_property("_tweak")
+
+ def validate_for_algorithm(self, algorithm):
+ if algorithm.key_size not in (256, 512):
+ raise ValueError(
+ "The XTS specification requires a 256-bit key for AES-128-XTS"
+ " and 512-bit key for AES-256-XTS"
+ )
+
+
+@utils.register_interface(Mode)
+class ECB(object):
+ name = "ECB"
+
+ validate_for_algorithm = _check_aes_key_length
+
+
+@utils.register_interface(Mode)
+@utils.register_interface(ModeWithInitializationVector)
+class OFB(object):
+ name = "OFB"
+
+ def __init__(self, initialization_vector):
+ utils._check_byteslike("initialization_vector", initialization_vector)
+ self._initialization_vector = initialization_vector
+
+ initialization_vector = utils.read_only_property("_initialization_vector")
+ validate_for_algorithm = _check_iv_and_key_length
+
+
+@utils.register_interface(Mode)
+@utils.register_interface(ModeWithInitializationVector)
+class CFB(object):
+ name = "CFB"
+
+ def __init__(self, initialization_vector):
+ utils._check_byteslike("initialization_vector", initialization_vector)
+ self._initialization_vector = initialization_vector
+
+ initialization_vector = utils.read_only_property("_initialization_vector")
+ validate_for_algorithm = _check_iv_and_key_length
+
+
+@utils.register_interface(Mode)
+@utils.register_interface(ModeWithInitializationVector)
+class CFB8(object):
+ name = "CFB8"
+
+ def __init__(self, initialization_vector):
+ utils._check_byteslike("initialization_vector", initialization_vector)
+ self._initialization_vector = initialization_vector
+
+ initialization_vector = utils.read_only_property("_initialization_vector")
+ validate_for_algorithm = _check_iv_and_key_length
+
+
+@utils.register_interface(Mode)
+@utils.register_interface(ModeWithNonce)
+class CTR(object):
+ name = "CTR"
+
+ def __init__(self, nonce):
+ utils._check_byteslike("nonce", nonce)
+ self._nonce = nonce
+
+ nonce = utils.read_only_property("_nonce")
+
+ def validate_for_algorithm(self, algorithm):
+ _check_aes_key_length(self, algorithm)
+ if len(self.nonce) * 8 != algorithm.block_size:
+ raise ValueError(
+ "Invalid nonce size ({}) for {}.".format(
+ len(self.nonce), self.name
+ )
+ )
+
+
+@utils.register_interface(Mode)
+@utils.register_interface(ModeWithInitializationVector)
+@utils.register_interface(ModeWithAuthenticationTag)
+class GCM(object):
+ name = "GCM"
+ _MAX_ENCRYPTED_BYTES = (2 ** 39 - 256) // 8
+ _MAX_AAD_BYTES = (2 ** 64) // 8
+
+ def __init__(self, initialization_vector, tag=None, min_tag_length=16):
+ # OpenSSL 3.0.0 constrains GCM IVs to [64, 1024] bits inclusive
+ # This is a sane limit anyway so we'll enforce it here.
+ utils._check_byteslike("initialization_vector", initialization_vector)
+ if len(initialization_vector) < 8 or len(initialization_vector) > 128:
+ raise ValueError(
+ "initialization_vector must be between 8 and 128 bytes (64 "
+ "and 1024 bits)."
+ )
+ self._initialization_vector = initialization_vector
+ if tag is not None:
+ utils._check_bytes("tag", tag)
+ if min_tag_length < 4:
+ raise ValueError("min_tag_length must be >= 4")
+ if len(tag) < min_tag_length:
+ raise ValueError(
+ "Authentication tag must be {} bytes or longer.".format(
+ min_tag_length
+ )
+ )
+ self._tag = tag
+ self._min_tag_length = min_tag_length
+
+ tag = utils.read_only_property("_tag")
+ initialization_vector = utils.read_only_property("_initialization_vector")
+
+ def validate_for_algorithm(self, algorithm):
+ _check_aes_key_length(self, algorithm)
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/primitives/cmac.py b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/cmac.py
new file mode 100644
index 0000000000..bf962c9069
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/cmac.py
@@ -0,0 +1,64 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+from cryptography import utils
+from cryptography.exceptions import (
+ AlreadyFinalized,
+ UnsupportedAlgorithm,
+ _Reasons,
+)
+from cryptography.hazmat.backends import _get_backend
+from cryptography.hazmat.backends.interfaces import CMACBackend
+from cryptography.hazmat.primitives import ciphers
+
+
+class CMAC(object):
+ def __init__(self, algorithm, backend=None, ctx=None):
+ backend = _get_backend(backend)
+ if not isinstance(backend, CMACBackend):
+ raise UnsupportedAlgorithm(
+ "Backend object does not implement CMACBackend.",
+ _Reasons.BACKEND_MISSING_INTERFACE,
+ )
+
+ if not isinstance(algorithm, ciphers.BlockCipherAlgorithm):
+ raise TypeError("Expected instance of BlockCipherAlgorithm.")
+ self._algorithm = algorithm
+
+ self._backend = backend
+ if ctx is None:
+ self._ctx = self._backend.create_cmac_ctx(self._algorithm)
+ else:
+ self._ctx = ctx
+
+ def update(self, data):
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+
+ utils._check_bytes("data", data)
+ self._ctx.update(data)
+
+ def finalize(self):
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ digest = self._ctx.finalize()
+ self._ctx = None
+ return digest
+
+ def verify(self, signature):
+ utils._check_bytes("signature", signature)
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+
+ ctx, self._ctx = self._ctx, None
+ ctx.verify(signature)
+
+ def copy(self):
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ return CMAC(
+ self._algorithm, backend=self._backend, ctx=self._ctx.copy()
+ )
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/primitives/constant_time.py b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/constant_time.py
new file mode 100644
index 0000000000..7f41b9efa5
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/constant_time.py
@@ -0,0 +1,14 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import hmac
+
+
+def bytes_eq(a, b):
+ if not isinstance(a, bytes) or not isinstance(b, bytes):
+ raise TypeError("a and b must be bytes.")
+
+ return hmac.compare_digest(a, b)
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/primitives/hashes.py b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/hashes.py
new file mode 100644
index 0000000000..18e2bab363
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/hashes.py
@@ -0,0 +1,259 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import abc
+
+import six
+
+from cryptography import utils
+from cryptography.exceptions import (
+ AlreadyFinalized,
+ UnsupportedAlgorithm,
+ _Reasons,
+)
+from cryptography.hazmat.backends import _get_backend
+from cryptography.hazmat.backends.interfaces import HashBackend
+
+
+@six.add_metaclass(abc.ABCMeta)
+class HashAlgorithm(object):
+ @abc.abstractproperty
+ def name(self):
+ """
+ A string naming this algorithm (e.g. "sha256", "md5").
+ """
+
+ @abc.abstractproperty
+ def digest_size(self):
+ """
+ The size of the resulting digest in bytes.
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class HashContext(object):
+ @abc.abstractproperty
+ def algorithm(self):
+ """
+ A HashAlgorithm that will be used by this context.
+ """
+
+ @abc.abstractmethod
+ def update(self, data):
+ """
+ Processes the provided bytes through the hash.
+ """
+
+ @abc.abstractmethod
+ def finalize(self):
+ """
+ Finalizes the hash context and returns the hash digest as bytes.
+ """
+
+ @abc.abstractmethod
+ def copy(self):
+ """
+ Return a HashContext that is a copy of the current context.
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class ExtendableOutputFunction(object):
+ """
+ An interface for extendable output functions.
+ """
+
+
+@utils.register_interface(HashContext)
+class Hash(object):
+ def __init__(self, algorithm, backend=None, ctx=None):
+ backend = _get_backend(backend)
+ if not isinstance(backend, HashBackend):
+ raise UnsupportedAlgorithm(
+ "Backend object does not implement HashBackend.",
+ _Reasons.BACKEND_MISSING_INTERFACE,
+ )
+
+ if not isinstance(algorithm, HashAlgorithm):
+ raise TypeError("Expected instance of hashes.HashAlgorithm.")
+ self._algorithm = algorithm
+
+ self._backend = backend
+
+ if ctx is None:
+ self._ctx = self._backend.create_hash_ctx(self.algorithm)
+ else:
+ self._ctx = ctx
+
+ algorithm = utils.read_only_property("_algorithm")
+
+ def update(self, data):
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ utils._check_byteslike("data", data)
+ self._ctx.update(data)
+
+ def copy(self):
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ return Hash(
+ self.algorithm, backend=self._backend, ctx=self._ctx.copy()
+ )
+
+ def finalize(self):
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ digest = self._ctx.finalize()
+ self._ctx = None
+ return digest
+
+
+@utils.register_interface(HashAlgorithm)
+class SHA1(object):
+ name = "sha1"
+ digest_size = 20
+ block_size = 64
+
+
+@utils.register_interface(HashAlgorithm)
+class SHA512_224(object): # noqa: N801
+ name = "sha512-224"
+ digest_size = 28
+ block_size = 128
+
+
+@utils.register_interface(HashAlgorithm)
+class SHA512_256(object): # noqa: N801
+ name = "sha512-256"
+ digest_size = 32
+ block_size = 128
+
+
+@utils.register_interface(HashAlgorithm)
+class SHA224(object):
+ name = "sha224"
+ digest_size = 28
+ block_size = 64
+
+
+@utils.register_interface(HashAlgorithm)
+class SHA256(object):
+ name = "sha256"
+ digest_size = 32
+ block_size = 64
+
+
+@utils.register_interface(HashAlgorithm)
+class SHA384(object):
+ name = "sha384"
+ digest_size = 48
+ block_size = 128
+
+
+@utils.register_interface(HashAlgorithm)
+class SHA512(object):
+ name = "sha512"
+ digest_size = 64
+ block_size = 128
+
+
+@utils.register_interface(HashAlgorithm)
+class SHA3_224(object): # noqa: N801
+ name = "sha3-224"
+ digest_size = 28
+
+
+@utils.register_interface(HashAlgorithm)
+class SHA3_256(object): # noqa: N801
+ name = "sha3-256"
+ digest_size = 32
+
+
+@utils.register_interface(HashAlgorithm)
+class SHA3_384(object): # noqa: N801
+ name = "sha3-384"
+ digest_size = 48
+
+
+@utils.register_interface(HashAlgorithm)
+class SHA3_512(object): # noqa: N801
+ name = "sha3-512"
+ digest_size = 64
+
+
+@utils.register_interface(HashAlgorithm)
+@utils.register_interface(ExtendableOutputFunction)
+class SHAKE128(object):
+ name = "shake128"
+
+ def __init__(self, digest_size):
+ if not isinstance(digest_size, six.integer_types):
+ raise TypeError("digest_size must be an integer")
+
+ if digest_size < 1:
+ raise ValueError("digest_size must be a positive integer")
+
+ self._digest_size = digest_size
+
+ digest_size = utils.read_only_property("_digest_size")
+
+
+@utils.register_interface(HashAlgorithm)
+@utils.register_interface(ExtendableOutputFunction)
+class SHAKE256(object):
+ name = "shake256"
+
+ def __init__(self, digest_size):
+ if not isinstance(digest_size, six.integer_types):
+ raise TypeError("digest_size must be an integer")
+
+ if digest_size < 1:
+ raise ValueError("digest_size must be a positive integer")
+
+ self._digest_size = digest_size
+
+ digest_size = utils.read_only_property("_digest_size")
+
+
+@utils.register_interface(HashAlgorithm)
+class MD5(object):
+ name = "md5"
+ digest_size = 16
+ block_size = 64
+
+
+@utils.register_interface(HashAlgorithm)
+class BLAKE2b(object):
+ name = "blake2b"
+ _max_digest_size = 64
+ _min_digest_size = 1
+ block_size = 128
+
+ def __init__(self, digest_size):
+
+ if digest_size != 64:
+ raise ValueError("Digest size must be 64")
+
+ self._digest_size = digest_size
+
+ digest_size = utils.read_only_property("_digest_size")
+
+
+@utils.register_interface(HashAlgorithm)
+class BLAKE2s(object):
+ name = "blake2s"
+ block_size = 64
+ _max_digest_size = 32
+ _min_digest_size = 1
+
+ def __init__(self, digest_size):
+
+ if digest_size != 32:
+ raise ValueError("Digest size must be 32")
+
+ self._digest_size = digest_size
+
+ digest_size = utils.read_only_property("_digest_size")
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/primitives/hmac.py b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/hmac.py
new file mode 100644
index 0000000000..8c421dc68d
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/hmac.py
@@ -0,0 +1,70 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+from cryptography import utils
+from cryptography.exceptions import (
+ AlreadyFinalized,
+ UnsupportedAlgorithm,
+ _Reasons,
+)
+from cryptography.hazmat.backends import _get_backend
+from cryptography.hazmat.backends.interfaces import HMACBackend
+from cryptography.hazmat.primitives import hashes
+
+
+@utils.register_interface(hashes.HashContext)
+class HMAC(object):
+ def __init__(self, key, algorithm, backend=None, ctx=None):
+ backend = _get_backend(backend)
+ if not isinstance(backend, HMACBackend):
+ raise UnsupportedAlgorithm(
+ "Backend object does not implement HMACBackend.",
+ _Reasons.BACKEND_MISSING_INTERFACE,
+ )
+
+ if not isinstance(algorithm, hashes.HashAlgorithm):
+ raise TypeError("Expected instance of hashes.HashAlgorithm.")
+ self._algorithm = algorithm
+
+ self._backend = backend
+ self._key = key
+ if ctx is None:
+ self._ctx = self._backend.create_hmac_ctx(key, self.algorithm)
+ else:
+ self._ctx = ctx
+
+ algorithm = utils.read_only_property("_algorithm")
+
+ def update(self, data):
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ utils._check_byteslike("data", data)
+ self._ctx.update(data)
+
+ def copy(self):
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ return HMAC(
+ self._key,
+ self.algorithm,
+ backend=self._backend,
+ ctx=self._ctx.copy(),
+ )
+
+ def finalize(self):
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ digest = self._ctx.finalize()
+ self._ctx = None
+ return digest
+
+ def verify(self, signature):
+ utils._check_bytes("signature", signature)
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+
+ ctx, self._ctx = self._ctx, None
+ ctx.verify(signature)
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/primitives/kdf/__init__.py b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/kdf/__init__.py
new file mode 100644
index 0000000000..2d0724e5da
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/kdf/__init__.py
@@ -0,0 +1,26 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import abc
+
+import six
+
+
+@six.add_metaclass(abc.ABCMeta)
+class KeyDerivationFunction(object):
+ @abc.abstractmethod
+ def derive(self, key_material):
+ """
+ Deterministically generates and returns a new key based on the existing
+ key material.
+ """
+
+ @abc.abstractmethod
+ def verify(self, key_material, expected_key):
+ """
+ Checks whether the key generated by the key material matches the
+ expected derived key. Raises an exception if they do not match.
+ """
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/primitives/kdf/concatkdf.py b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/kdf/concatkdf.py
new file mode 100644
index 0000000000..7cc0324fc4
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/kdf/concatkdf.py
@@ -0,0 +1,131 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import struct
+
+from cryptography import utils
+from cryptography.exceptions import (
+ AlreadyFinalized,
+ InvalidKey,
+ UnsupportedAlgorithm,
+ _Reasons,
+)
+from cryptography.hazmat.backends import _get_backend
+from cryptography.hazmat.backends.interfaces import HMACBackend
+from cryptography.hazmat.backends.interfaces import HashBackend
+from cryptography.hazmat.primitives import constant_time, hashes, hmac
+from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
+
+
+def _int_to_u32be(n):
+ return struct.pack(">I", n)
+
+
+def _common_args_checks(algorithm, length, otherinfo):
+ max_length = algorithm.digest_size * (2 ** 32 - 1)
+ if length > max_length:
+ raise ValueError(
+ "Can not derive keys larger than {} bits.".format(max_length)
+ )
+ if otherinfo is not None:
+ utils._check_bytes("otherinfo", otherinfo)
+
+
+def _concatkdf_derive(key_material, length, auxfn, otherinfo):
+ utils._check_byteslike("key_material", key_material)
+ output = [b""]
+ outlen = 0
+ counter = 1
+
+ while length > outlen:
+ h = auxfn()
+ h.update(_int_to_u32be(counter))
+ h.update(key_material)
+ h.update(otherinfo)
+ output.append(h.finalize())
+ outlen += len(output[-1])
+ counter += 1
+
+ return b"".join(output)[:length]
+
+
+@utils.register_interface(KeyDerivationFunction)
+class ConcatKDFHash(object):
+ def __init__(self, algorithm, length, otherinfo, backend=None):
+ backend = _get_backend(backend)
+
+ _common_args_checks(algorithm, length, otherinfo)
+ self._algorithm = algorithm
+ self._length = length
+ self._otherinfo = otherinfo
+ if self._otherinfo is None:
+ self._otherinfo = b""
+
+ if not isinstance(backend, HashBackend):
+ raise UnsupportedAlgorithm(
+ "Backend object does not implement HashBackend.",
+ _Reasons.BACKEND_MISSING_INTERFACE,
+ )
+ self._backend = backend
+ self._used = False
+
+ def _hash(self):
+ return hashes.Hash(self._algorithm, self._backend)
+
+ def derive(self, key_material):
+ if self._used:
+ raise AlreadyFinalized
+ self._used = True
+ return _concatkdf_derive(
+ key_material, self._length, self._hash, self._otherinfo
+ )
+
+ def verify(self, key_material, expected_key):
+ if not constant_time.bytes_eq(self.derive(key_material), expected_key):
+ raise InvalidKey
+
+
+@utils.register_interface(KeyDerivationFunction)
+class ConcatKDFHMAC(object):
+ def __init__(self, algorithm, length, salt, otherinfo, backend=None):
+ backend = _get_backend(backend)
+
+ _common_args_checks(algorithm, length, otherinfo)
+ self._algorithm = algorithm
+ self._length = length
+ self._otherinfo = otherinfo
+ if self._otherinfo is None:
+ self._otherinfo = b""
+
+ if salt is None:
+ salt = b"\x00" * algorithm.block_size
+ else:
+ utils._check_bytes("salt", salt)
+
+ self._salt = salt
+
+ if not isinstance(backend, HMACBackend):
+ raise UnsupportedAlgorithm(
+ "Backend object does not implement HMACBackend.",
+ _Reasons.BACKEND_MISSING_INTERFACE,
+ )
+ self._backend = backend
+ self._used = False
+
+ def _hmac(self):
+ return hmac.HMAC(self._salt, self._algorithm, self._backend)
+
+ def derive(self, key_material):
+ if self._used:
+ raise AlreadyFinalized
+ self._used = True
+ return _concatkdf_derive(
+ key_material, self._length, self._hmac, self._otherinfo
+ )
+
+ def verify(self, key_material, expected_key):
+ if not constant_time.bytes_eq(self.derive(key_material), expected_key):
+ raise InvalidKey
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/primitives/kdf/hkdf.py b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/kdf/hkdf.py
new file mode 100644
index 0000000000..9bb6bc2132
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/kdf/hkdf.py
@@ -0,0 +1,115 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import six
+
+from cryptography import utils
+from cryptography.exceptions import (
+ AlreadyFinalized,
+ InvalidKey,
+ UnsupportedAlgorithm,
+ _Reasons,
+)
+from cryptography.hazmat.backends import _get_backend
+from cryptography.hazmat.backends.interfaces import HMACBackend
+from cryptography.hazmat.primitives import constant_time, hmac
+from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
+
+
+@utils.register_interface(KeyDerivationFunction)
+class HKDF(object):
+ def __init__(self, algorithm, length, salt, info, backend=None):
+ backend = _get_backend(backend)
+ if not isinstance(backend, HMACBackend):
+ raise UnsupportedAlgorithm(
+ "Backend object does not implement HMACBackend.",
+ _Reasons.BACKEND_MISSING_INTERFACE,
+ )
+
+ self._algorithm = algorithm
+
+ if salt is None:
+ salt = b"\x00" * self._algorithm.digest_size
+ else:
+ utils._check_bytes("salt", salt)
+
+ self._salt = salt
+
+ self._backend = backend
+
+ self._hkdf_expand = HKDFExpand(self._algorithm, length, info, backend)
+
+ def _extract(self, key_material):
+ h = hmac.HMAC(self._salt, self._algorithm, backend=self._backend)
+ h.update(key_material)
+ return h.finalize()
+
+ def derive(self, key_material):
+ utils._check_byteslike("key_material", key_material)
+ return self._hkdf_expand.derive(self._extract(key_material))
+
+ def verify(self, key_material, expected_key):
+ if not constant_time.bytes_eq(self.derive(key_material), expected_key):
+ raise InvalidKey
+
+
+@utils.register_interface(KeyDerivationFunction)
+class HKDFExpand(object):
+ def __init__(self, algorithm, length, info, backend=None):
+ backend = _get_backend(backend)
+ if not isinstance(backend, HMACBackend):
+ raise UnsupportedAlgorithm(
+ "Backend object does not implement HMACBackend.",
+ _Reasons.BACKEND_MISSING_INTERFACE,
+ )
+
+ self._algorithm = algorithm
+
+ self._backend = backend
+
+ max_length = 255 * algorithm.digest_size
+
+ if length > max_length:
+ raise ValueError(
+ "Can not derive keys larger than {} octets.".format(max_length)
+ )
+
+ self._length = length
+
+ if info is None:
+ info = b""
+ else:
+ utils._check_bytes("info", info)
+
+ self._info = info
+
+ self._used = False
+
+ def _expand(self, key_material):
+ output = [b""]
+ counter = 1
+
+ while self._algorithm.digest_size * (len(output) - 1) < self._length:
+ h = hmac.HMAC(key_material, self._algorithm, backend=self._backend)
+ h.update(output[-1])
+ h.update(self._info)
+ h.update(six.int2byte(counter))
+ output.append(h.finalize())
+ counter += 1
+
+ return b"".join(output)[: self._length]
+
+ def derive(self, key_material):
+ utils._check_byteslike("key_material", key_material)
+ if self._used:
+ raise AlreadyFinalized
+
+ self._used = True
+ return self._expand(key_material)
+
+ def verify(self, key_material, expected_key):
+ if not constant_time.bytes_eq(self.derive(key_material), expected_key):
+ raise InvalidKey
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/primitives/kdf/kbkdf.py b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/kdf/kbkdf.py
new file mode 100644
index 0000000000..864337001c
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/kdf/kbkdf.py
@@ -0,0 +1,162 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+from enum import Enum
+
+from six.moves import range
+
+from cryptography import utils
+from cryptography.exceptions import (
+ AlreadyFinalized,
+ InvalidKey,
+ UnsupportedAlgorithm,
+ _Reasons,
+)
+from cryptography.hazmat.backends import _get_backend
+from cryptography.hazmat.backends.interfaces import HMACBackend
+from cryptography.hazmat.primitives import constant_time, hashes, hmac
+from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
+
+
+class Mode(Enum):
+ CounterMode = "ctr"
+
+
+class CounterLocation(Enum):
+ BeforeFixed = "before_fixed"
+ AfterFixed = "after_fixed"
+
+
+@utils.register_interface(KeyDerivationFunction)
+class KBKDFHMAC(object):
+ def __init__(
+ self,
+ algorithm,
+ mode,
+ length,
+ rlen,
+ llen,
+ location,
+ label,
+ context,
+ fixed,
+ backend=None,
+ ):
+ backend = _get_backend(backend)
+ if not isinstance(backend, HMACBackend):
+ raise UnsupportedAlgorithm(
+ "Backend object does not implement HMACBackend.",
+ _Reasons.BACKEND_MISSING_INTERFACE,
+ )
+
+ if not isinstance(algorithm, hashes.HashAlgorithm):
+ raise UnsupportedAlgorithm(
+ "Algorithm supplied is not a supported hash algorithm.",
+ _Reasons.UNSUPPORTED_HASH,
+ )
+
+ if not backend.hmac_supported(algorithm):
+ raise UnsupportedAlgorithm(
+ "Algorithm supplied is not a supported hmac algorithm.",
+ _Reasons.UNSUPPORTED_HASH,
+ )
+
+ if not isinstance(mode, Mode):
+ raise TypeError("mode must be of type Mode")
+
+ if not isinstance(location, CounterLocation):
+ raise TypeError("location must be of type CounterLocation")
+
+ if (label or context) and fixed:
+ raise ValueError(
+ "When supplying fixed data, " "label and context are ignored."
+ )
+
+ if rlen is None or not self._valid_byte_length(rlen):
+ raise ValueError("rlen must be between 1 and 4")
+
+ if llen is None and fixed is None:
+ raise ValueError("Please specify an llen")
+
+ if llen is not None and not isinstance(llen, int):
+ raise TypeError("llen must be an integer")
+
+ if label is None:
+ label = b""
+
+ if context is None:
+ context = b""
+
+ utils._check_bytes("label", label)
+ utils._check_bytes("context", context)
+ self._algorithm = algorithm
+ self._mode = mode
+ self._length = length
+ self._rlen = rlen
+ self._llen = llen
+ self._location = location
+ self._label = label
+ self._context = context
+ self._backend = backend
+ self._used = False
+ self._fixed_data = fixed
+
+ def _valid_byte_length(self, value):
+ if not isinstance(value, int):
+ raise TypeError("value must be of type int")
+
+ value_bin = utils.int_to_bytes(1, value)
+ if not 1 <= len(value_bin) <= 4:
+ return False
+ return True
+
+ def derive(self, key_material):
+ if self._used:
+ raise AlreadyFinalized
+
+ utils._check_byteslike("key_material", key_material)
+ self._used = True
+
+ # inverse floor division (equivalent to ceiling)
+ rounds = -(-self._length // self._algorithm.digest_size)
+
+ output = [b""]
+
+ # For counter mode, the number of iterations shall not be
+ # larger than 2^r-1, where r <= 32 is the binary length of the counter
+ # This ensures that the counter values used as an input to the
+ # PRF will not repeat during a particular call to the KDF function.
+ r_bin = utils.int_to_bytes(1, self._rlen)
+ if rounds > pow(2, len(r_bin) * 8) - 1:
+ raise ValueError("There are too many iterations.")
+
+ for i in range(1, rounds + 1):
+ h = hmac.HMAC(key_material, self._algorithm, backend=self._backend)
+
+ counter = utils.int_to_bytes(i, self._rlen)
+ if self._location == CounterLocation.BeforeFixed:
+ h.update(counter)
+
+ h.update(self._generate_fixed_input())
+
+ if self._location == CounterLocation.AfterFixed:
+ h.update(counter)
+
+ output.append(h.finalize())
+
+ return b"".join(output)[: self._length]
+
+ def _generate_fixed_input(self):
+ if self._fixed_data and isinstance(self._fixed_data, bytes):
+ return self._fixed_data
+
+ l_val = utils.int_to_bytes(self._length * 8, self._llen)
+
+ return b"".join([self._label, b"\x00", self._context, l_val])
+
+ def verify(self, key_material, expected_key):
+ if not constant_time.bytes_eq(self.derive(key_material), expected_key):
+ raise InvalidKey
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/primitives/kdf/pbkdf2.py b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/kdf/pbkdf2.py
new file mode 100644
index 0000000000..5b67d48bba
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/kdf/pbkdf2.py
@@ -0,0 +1,62 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+from cryptography import utils
+from cryptography.exceptions import (
+ AlreadyFinalized,
+ InvalidKey,
+ UnsupportedAlgorithm,
+ _Reasons,
+)
+from cryptography.hazmat.backends import _get_backend
+from cryptography.hazmat.backends.interfaces import PBKDF2HMACBackend
+from cryptography.hazmat.primitives import constant_time
+from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
+
+
+@utils.register_interface(KeyDerivationFunction)
+class PBKDF2HMAC(object):
+ def __init__(self, algorithm, length, salt, iterations, backend=None):
+ backend = _get_backend(backend)
+ if not isinstance(backend, PBKDF2HMACBackend):
+ raise UnsupportedAlgorithm(
+ "Backend object does not implement PBKDF2HMACBackend.",
+ _Reasons.BACKEND_MISSING_INTERFACE,
+ )
+
+ if not backend.pbkdf2_hmac_supported(algorithm):
+ raise UnsupportedAlgorithm(
+ "{} is not supported for PBKDF2 by this backend.".format(
+ algorithm.name
+ ),
+ _Reasons.UNSUPPORTED_HASH,
+ )
+ self._used = False
+ self._algorithm = algorithm
+ self._length = length
+ utils._check_bytes("salt", salt)
+ self._salt = salt
+ self._iterations = iterations
+ self._backend = backend
+
+ def derive(self, key_material):
+ if self._used:
+ raise AlreadyFinalized("PBKDF2 instances can only be used once.")
+ self._used = True
+
+ utils._check_byteslike("key_material", key_material)
+ return self._backend.derive_pbkdf2_hmac(
+ self._algorithm,
+ self._length,
+ self._salt,
+ self._iterations,
+ key_material,
+ )
+
+ def verify(self, key_material, expected_key):
+ derived_key = self.derive(key_material)
+ if not constant_time.bytes_eq(derived_key, expected_key):
+ raise InvalidKey("Keys do not match.")
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/primitives/kdf/scrypt.py b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/kdf/scrypt.py
new file mode 100644
index 0000000000..f028646aa0
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/kdf/scrypt.py
@@ -0,0 +1,68 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import sys
+
+from cryptography import utils
+from cryptography.exceptions import (
+ AlreadyFinalized,
+ InvalidKey,
+ UnsupportedAlgorithm,
+ _Reasons,
+)
+from cryptography.hazmat.backends import _get_backend
+from cryptography.hazmat.backends.interfaces import ScryptBackend
+from cryptography.hazmat.primitives import constant_time
+from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
+
+
+# This is used by the scrypt tests to skip tests that require more memory
+# than the MEM_LIMIT
+_MEM_LIMIT = sys.maxsize // 2
+
+
+@utils.register_interface(KeyDerivationFunction)
+class Scrypt(object):
+ def __init__(self, salt, length, n, r, p, backend=None):
+ backend = _get_backend(backend)
+ if not isinstance(backend, ScryptBackend):
+ raise UnsupportedAlgorithm(
+ "Backend object does not implement ScryptBackend.",
+ _Reasons.BACKEND_MISSING_INTERFACE,
+ )
+
+ self._length = length
+ utils._check_bytes("salt", salt)
+ if n < 2 or (n & (n - 1)) != 0:
+ raise ValueError("n must be greater than 1 and be a power of 2.")
+
+ if r < 1:
+ raise ValueError("r must be greater than or equal to 1.")
+
+ if p < 1:
+ raise ValueError("p must be greater than or equal to 1.")
+
+ self._used = False
+ self._salt = salt
+ self._n = n
+ self._r = r
+ self._p = p
+ self._backend = backend
+
+ def derive(self, key_material):
+ if self._used:
+ raise AlreadyFinalized("Scrypt instances can only be used once.")
+ self._used = True
+
+ utils._check_byteslike("key_material", key_material)
+ return self._backend.derive_scrypt(
+ key_material, self._salt, self._length, self._n, self._r, self._p
+ )
+
+ def verify(self, key_material, expected_key):
+ derived_key = self.derive(key_material)
+ if not constant_time.bytes_eq(derived_key, expected_key):
+ raise InvalidKey("Keys do not match.")
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/primitives/kdf/x963kdf.py b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/kdf/x963kdf.py
new file mode 100644
index 0000000000..1898d526a4
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/kdf/x963kdf.py
@@ -0,0 +1,74 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import struct
+
+from cryptography import utils
+from cryptography.exceptions import (
+ AlreadyFinalized,
+ InvalidKey,
+ UnsupportedAlgorithm,
+ _Reasons,
+)
+from cryptography.hazmat.backends import _get_backend
+from cryptography.hazmat.backends.interfaces import HashBackend
+from cryptography.hazmat.primitives import constant_time, hashes
+from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
+
+
+def _int_to_u32be(n):
+ return struct.pack(">I", n)
+
+
+@utils.register_interface(KeyDerivationFunction)
+class X963KDF(object):
+ def __init__(self, algorithm, length, sharedinfo, backend=None):
+ backend = _get_backend(backend)
+
+ max_len = algorithm.digest_size * (2 ** 32 - 1)
+ if length > max_len:
+ raise ValueError(
+ "Can not derive keys larger than {} bits.".format(max_len)
+ )
+ if sharedinfo is not None:
+ utils._check_bytes("sharedinfo", sharedinfo)
+
+ self._algorithm = algorithm
+ self._length = length
+ self._sharedinfo = sharedinfo
+
+ if not isinstance(backend, HashBackend):
+ raise UnsupportedAlgorithm(
+ "Backend object does not implement HashBackend.",
+ _Reasons.BACKEND_MISSING_INTERFACE,
+ )
+ self._backend = backend
+ self._used = False
+
+ def derive(self, key_material):
+ if self._used:
+ raise AlreadyFinalized
+ self._used = True
+ utils._check_byteslike("key_material", key_material)
+ output = [b""]
+ outlen = 0
+ counter = 1
+
+ while self._length > outlen:
+ h = hashes.Hash(self._algorithm, self._backend)
+ h.update(key_material)
+ h.update(_int_to_u32be(counter))
+ if self._sharedinfo is not None:
+ h.update(self._sharedinfo)
+ output.append(h.finalize())
+ outlen += len(output[-1])
+ counter += 1
+
+ return b"".join(output)[: self._length]
+
+ def verify(self, key_material, expected_key):
+ if not constant_time.bytes_eq(self.derive(key_material), expected_key):
+ raise InvalidKey
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/primitives/keywrap.py b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/keywrap.py
new file mode 100644
index 0000000000..2439cafe6d
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/keywrap.py
@@ -0,0 +1,161 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import struct
+
+from cryptography.hazmat.backends import _get_backend
+from cryptography.hazmat.primitives.ciphers import Cipher
+from cryptography.hazmat.primitives.ciphers.algorithms import AES
+from cryptography.hazmat.primitives.ciphers.modes import ECB
+from cryptography.hazmat.primitives.constant_time import bytes_eq
+
+
+def _wrap_core(wrapping_key, a, r, backend):
+ # RFC 3394 Key Wrap - 2.2.1 (index method)
+ encryptor = Cipher(AES(wrapping_key), ECB(), backend).encryptor()
+ n = len(r)
+ for j in range(6):
+ for i in range(n):
+ # every encryption operation is a discrete 16 byte chunk (because
+ # AES has a 128-bit block size) and since we're using ECB it is
+ # safe to reuse the encryptor for the entire operation
+ b = encryptor.update(a + r[i])
+ # pack/unpack are safe as these are always 64-bit chunks
+ a = struct.pack(
+ ">Q", struct.unpack(">Q", b[:8])[0] ^ ((n * j) + i + 1)
+ )
+ r[i] = b[-8:]
+
+ assert encryptor.finalize() == b""
+
+ return a + b"".join(r)
+
+
+def aes_key_wrap(wrapping_key, key_to_wrap, backend=None):
+ backend = _get_backend(backend)
+ if len(wrapping_key) not in [16, 24, 32]:
+ raise ValueError("The wrapping key must be a valid AES key length")
+
+ if len(key_to_wrap) < 16:
+ raise ValueError("The key to wrap must be at least 16 bytes")
+
+ if len(key_to_wrap) % 8 != 0:
+ raise ValueError("The key to wrap must be a multiple of 8 bytes")
+
+ a = b"\xa6\xa6\xa6\xa6\xa6\xa6\xa6\xa6"
+ r = [key_to_wrap[i : i + 8] for i in range(0, len(key_to_wrap), 8)]
+ return _wrap_core(wrapping_key, a, r, backend)
+
+
+def _unwrap_core(wrapping_key, a, r, backend):
+ # Implement RFC 3394 Key Unwrap - 2.2.2 (index method)
+ decryptor = Cipher(AES(wrapping_key), ECB(), backend).decryptor()
+ n = len(r)
+ for j in reversed(range(6)):
+ for i in reversed(range(n)):
+ # pack/unpack are safe as these are always 64-bit chunks
+ atr = (
+ struct.pack(
+ ">Q", struct.unpack(">Q", a)[0] ^ ((n * j) + i + 1)
+ )
+ + r[i]
+ )
+ # every decryption operation is a discrete 16 byte chunk so
+ # it is safe to reuse the decryptor for the entire operation
+ b = decryptor.update(atr)
+ a = b[:8]
+ r[i] = b[-8:]
+
+ assert decryptor.finalize() == b""
+ return a, r
+
+
+def aes_key_wrap_with_padding(wrapping_key, key_to_wrap, backend=None):
+ backend = _get_backend(backend)
+ if len(wrapping_key) not in [16, 24, 32]:
+ raise ValueError("The wrapping key must be a valid AES key length")
+
+ aiv = b"\xA6\x59\x59\xA6" + struct.pack(">i", len(key_to_wrap))
+ # pad the key to wrap if necessary
+ pad = (8 - (len(key_to_wrap) % 8)) % 8
+ key_to_wrap = key_to_wrap + b"\x00" * pad
+ if len(key_to_wrap) == 8:
+ # RFC 5649 - 4.1 - exactly 8 octets after padding
+ encryptor = Cipher(AES(wrapping_key), ECB(), backend).encryptor()
+ b = encryptor.update(aiv + key_to_wrap)
+ assert encryptor.finalize() == b""
+ return b
+ else:
+ r = [key_to_wrap[i : i + 8] for i in range(0, len(key_to_wrap), 8)]
+ return _wrap_core(wrapping_key, aiv, r, backend)
+
+
+def aes_key_unwrap_with_padding(wrapping_key, wrapped_key, backend=None):
+ backend = _get_backend(backend)
+ if len(wrapped_key) < 16:
+ raise InvalidUnwrap("Must be at least 16 bytes")
+
+ if len(wrapping_key) not in [16, 24, 32]:
+ raise ValueError("The wrapping key must be a valid AES key length")
+
+ if len(wrapped_key) == 16:
+ # RFC 5649 - 4.2 - exactly two 64-bit blocks
+ decryptor = Cipher(AES(wrapping_key), ECB(), backend).decryptor()
+ b = decryptor.update(wrapped_key)
+ assert decryptor.finalize() == b""
+ a = b[:8]
+ data = b[8:]
+ n = 1
+ else:
+ r = [wrapped_key[i : i + 8] for i in range(0, len(wrapped_key), 8)]
+ encrypted_aiv = r.pop(0)
+ n = len(r)
+ a, r = _unwrap_core(wrapping_key, encrypted_aiv, r, backend)
+ data = b"".join(r)
+
+ # 1) Check that MSB(32,A) = A65959A6.
+ # 2) Check that 8*(n-1) < LSB(32,A) <= 8*n. If so, let
+ # MLI = LSB(32,A).
+ # 3) Let b = (8*n)-MLI, and then check that the rightmost b octets of
+ # the output data are zero.
+ (mli,) = struct.unpack(">I", a[4:])
+ b = (8 * n) - mli
+ if (
+ not bytes_eq(a[:4], b"\xa6\x59\x59\xa6")
+ or not 8 * (n - 1) < mli <= 8 * n
+ or (b != 0 and not bytes_eq(data[-b:], b"\x00" * b))
+ ):
+ raise InvalidUnwrap()
+
+ if b == 0:
+ return data
+ else:
+ return data[:-b]
+
+
+def aes_key_unwrap(wrapping_key, wrapped_key, backend=None):
+ backend = _get_backend(backend)
+ if len(wrapped_key) < 24:
+ raise InvalidUnwrap("Must be at least 24 bytes")
+
+ if len(wrapped_key) % 8 != 0:
+ raise InvalidUnwrap("The wrapped key must be a multiple of 8 bytes")
+
+ if len(wrapping_key) not in [16, 24, 32]:
+ raise ValueError("The wrapping key must be a valid AES key length")
+
+ aiv = b"\xa6\xa6\xa6\xa6\xa6\xa6\xa6\xa6"
+ r = [wrapped_key[i : i + 8] for i in range(0, len(wrapped_key), 8)]
+ a = r.pop(0)
+ a, r = _unwrap_core(wrapping_key, a, r, backend)
+ if not bytes_eq(a, aiv):
+ raise InvalidUnwrap()
+
+ return b"".join(r)
+
+
+class InvalidUnwrap(Exception):
+ pass
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/primitives/padding.py b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/padding.py
new file mode 100644
index 0000000000..98abffbc08
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/padding.py
@@ -0,0 +1,214 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import abc
+
+import six
+
+from cryptography import utils
+from cryptography.exceptions import AlreadyFinalized
+from cryptography.hazmat.bindings._padding import lib
+
+
+@six.add_metaclass(abc.ABCMeta)
+class PaddingContext(object):
+ @abc.abstractmethod
+ def update(self, data):
+ """
+ Pads the provided bytes and returns any available data as bytes.
+ """
+
+ @abc.abstractmethod
+ def finalize(self):
+ """
+ Finalize the padding, returns bytes.
+ """
+
+
+def _byte_padding_check(block_size):
+ if not (0 <= block_size <= 2040):
+ raise ValueError("block_size must be in range(0, 2041).")
+
+ if block_size % 8 != 0:
+ raise ValueError("block_size must be a multiple of 8.")
+
+
+def _byte_padding_update(buffer_, data, block_size):
+ if buffer_ is None:
+ raise AlreadyFinalized("Context was already finalized.")
+
+ utils._check_byteslike("data", data)
+
+ # six.PY2: Only coerce non-bytes objects to avoid triggering bad behavior
+ # of future's newbytes type. Unconditionally call bytes() after Python 2
+ # support is gone.
+ buffer_ += data if isinstance(data, bytes) else bytes(data)
+
+ finished_blocks = len(buffer_) // (block_size // 8)
+
+ result = buffer_[: finished_blocks * (block_size // 8)]
+ buffer_ = buffer_[finished_blocks * (block_size // 8) :]
+
+ return buffer_, result
+
+
+def _byte_padding_pad(buffer_, block_size, paddingfn):
+ if buffer_ is None:
+ raise AlreadyFinalized("Context was already finalized.")
+
+ pad_size = block_size // 8 - len(buffer_)
+ return buffer_ + paddingfn(pad_size)
+
+
+def _byte_unpadding_update(buffer_, data, block_size):
+ if buffer_ is None:
+ raise AlreadyFinalized("Context was already finalized.")
+
+ utils._check_byteslike("data", data)
+
+ # six.PY2: Only coerce non-bytes objects to avoid triggering bad behavior
+ # of future's newbytes type. Unconditionally call bytes() after Python 2
+ # support is gone.
+ buffer_ += data if isinstance(data, bytes) else bytes(data)
+
+ finished_blocks = max(len(buffer_) // (block_size // 8) - 1, 0)
+
+ result = buffer_[: finished_blocks * (block_size // 8)]
+ buffer_ = buffer_[finished_blocks * (block_size // 8) :]
+
+ return buffer_, result
+
+
+def _byte_unpadding_check(buffer_, block_size, checkfn):
+ if buffer_ is None:
+ raise AlreadyFinalized("Context was already finalized.")
+
+ if len(buffer_) != block_size // 8:
+ raise ValueError("Invalid padding bytes.")
+
+ valid = checkfn(buffer_, block_size // 8)
+
+ if not valid:
+ raise ValueError("Invalid padding bytes.")
+
+ pad_size = six.indexbytes(buffer_, -1)
+ return buffer_[:-pad_size]
+
+
+class PKCS7(object):
+ def __init__(self, block_size):
+ _byte_padding_check(block_size)
+ self.block_size = block_size
+
+ def padder(self):
+ return _PKCS7PaddingContext(self.block_size)
+
+ def unpadder(self):
+ return _PKCS7UnpaddingContext(self.block_size)
+
+
+@utils.register_interface(PaddingContext)
+class _PKCS7PaddingContext(object):
+ def __init__(self, block_size):
+ self.block_size = block_size
+ # TODO: more copies than necessary, we should use zero-buffer (#193)
+ self._buffer = b""
+
+ def update(self, data):
+ self._buffer, result = _byte_padding_update(
+ self._buffer, data, self.block_size
+ )
+ return result
+
+ def _padding(self, size):
+ return six.int2byte(size) * size
+
+ def finalize(self):
+ result = _byte_padding_pad(
+ self._buffer, self.block_size, self._padding
+ )
+ self._buffer = None
+ return result
+
+
+@utils.register_interface(PaddingContext)
+class _PKCS7UnpaddingContext(object):
+ def __init__(self, block_size):
+ self.block_size = block_size
+ # TODO: more copies than necessary, we should use zero-buffer (#193)
+ self._buffer = b""
+
+ def update(self, data):
+ self._buffer, result = _byte_unpadding_update(
+ self._buffer, data, self.block_size
+ )
+ return result
+
+ def finalize(self):
+ result = _byte_unpadding_check(
+ self._buffer, self.block_size, lib.Cryptography_check_pkcs7_padding
+ )
+ self._buffer = None
+ return result
+
+
+class ANSIX923(object):
+ def __init__(self, block_size):
+ _byte_padding_check(block_size)
+ self.block_size = block_size
+
+ def padder(self):
+ return _ANSIX923PaddingContext(self.block_size)
+
+ def unpadder(self):
+ return _ANSIX923UnpaddingContext(self.block_size)
+
+
+@utils.register_interface(PaddingContext)
+class _ANSIX923PaddingContext(object):
+ def __init__(self, block_size):
+ self.block_size = block_size
+ # TODO: more copies than necessary, we should use zero-buffer (#193)
+ self._buffer = b""
+
+ def update(self, data):
+ self._buffer, result = _byte_padding_update(
+ self._buffer, data, self.block_size
+ )
+ return result
+
+ def _padding(self, size):
+ return six.int2byte(0) * (size - 1) + six.int2byte(size)
+
+ def finalize(self):
+ result = _byte_padding_pad(
+ self._buffer, self.block_size, self._padding
+ )
+ self._buffer = None
+ return result
+
+
+@utils.register_interface(PaddingContext)
+class _ANSIX923UnpaddingContext(object):
+ def __init__(self, block_size):
+ self.block_size = block_size
+ # TODO: more copies than necessary, we should use zero-buffer (#193)
+ self._buffer = b""
+
+ def update(self, data):
+ self._buffer, result = _byte_unpadding_update(
+ self._buffer, data, self.block_size
+ )
+ return result
+
+ def finalize(self):
+ result = _byte_unpadding_check(
+ self._buffer,
+ self.block_size,
+ lib.Cryptography_check_ansix923_padding,
+ )
+ self._buffer = None
+ return result
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/primitives/poly1305.py b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/poly1305.py
new file mode 100644
index 0000000000..6439686202
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/poly1305.py
@@ -0,0 +1,58 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+
+from cryptography import utils
+from cryptography.exceptions import (
+ AlreadyFinalized,
+ UnsupportedAlgorithm,
+ _Reasons,
+)
+
+
+class Poly1305(object):
+ def __init__(self, key):
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not backend.poly1305_supported():
+ raise UnsupportedAlgorithm(
+ "poly1305 is not supported by this version of OpenSSL.",
+ _Reasons.UNSUPPORTED_MAC,
+ )
+ self._ctx = backend.create_poly1305_ctx(key)
+
+ def update(self, data):
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ utils._check_byteslike("data", data)
+ self._ctx.update(data)
+
+ def finalize(self):
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+ mac = self._ctx.finalize()
+ self._ctx = None
+ return mac
+
+ def verify(self, tag):
+ utils._check_bytes("tag", tag)
+ if self._ctx is None:
+ raise AlreadyFinalized("Context was already finalized.")
+
+ ctx, self._ctx = self._ctx, None
+ ctx.verify(tag)
+
+ @classmethod
+ def generate_tag(cls, key, data):
+ p = Poly1305(key)
+ p.update(data)
+ return p.finalize()
+
+ @classmethod
+ def verify_tag(cls, key, data, tag):
+ p = Poly1305(key)
+ p.update(data)
+ p.verify(tag)
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/primitives/serialization/__init__.py b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/serialization/__init__.py
new file mode 100644
index 0000000000..c2f9b014a6
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/serialization/__init__.py
@@ -0,0 +1,44 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+from cryptography.hazmat.primitives.serialization.base import (
+ BestAvailableEncryption,
+ Encoding,
+ KeySerializationEncryption,
+ NoEncryption,
+ ParameterFormat,
+ PrivateFormat,
+ PublicFormat,
+ load_der_parameters,
+ load_der_private_key,
+ load_der_public_key,
+ load_pem_parameters,
+ load_pem_private_key,
+ load_pem_public_key,
+)
+from cryptography.hazmat.primitives.serialization.ssh import (
+ load_ssh_private_key,
+ load_ssh_public_key,
+)
+
+
+__all__ = [
+ "load_der_parameters",
+ "load_der_private_key",
+ "load_der_public_key",
+ "load_pem_parameters",
+ "load_pem_private_key",
+ "load_pem_public_key",
+ "load_ssh_private_key",
+ "load_ssh_public_key",
+ "Encoding",
+ "PrivateFormat",
+ "PublicFormat",
+ "ParameterFormat",
+ "KeySerializationEncryption",
+ "BestAvailableEncryption",
+ "NoEncryption",
+]
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/primitives/serialization/base.py b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/serialization/base.py
new file mode 100644
index 0000000000..fc27235c5c
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/serialization/base.py
@@ -0,0 +1,91 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import abc
+from enum import Enum
+
+import six
+
+from cryptography import utils
+from cryptography.hazmat.backends import _get_backend
+
+
+def load_pem_private_key(data, password, backend=None):
+ backend = _get_backend(backend)
+ return backend.load_pem_private_key(data, password)
+
+
+def load_pem_public_key(data, backend=None):
+ backend = _get_backend(backend)
+ return backend.load_pem_public_key(data)
+
+
+def load_pem_parameters(data, backend=None):
+ backend = _get_backend(backend)
+ return backend.load_pem_parameters(data)
+
+
+def load_der_private_key(data, password, backend=None):
+ backend = _get_backend(backend)
+ return backend.load_der_private_key(data, password)
+
+
+def load_der_public_key(data, backend=None):
+ backend = _get_backend(backend)
+ return backend.load_der_public_key(data)
+
+
+def load_der_parameters(data, backend=None):
+ backend = _get_backend(backend)
+ return backend.load_der_parameters(data)
+
+
+class Encoding(Enum):
+ PEM = "PEM"
+ DER = "DER"
+ OpenSSH = "OpenSSH"
+ Raw = "Raw"
+ X962 = "ANSI X9.62"
+ SMIME = "S/MIME"
+
+
+class PrivateFormat(Enum):
+ PKCS8 = "PKCS8"
+ TraditionalOpenSSL = "TraditionalOpenSSL"
+ Raw = "Raw"
+ OpenSSH = "OpenSSH"
+
+
+class PublicFormat(Enum):
+ SubjectPublicKeyInfo = "X.509 subjectPublicKeyInfo with PKCS#1"
+ PKCS1 = "Raw PKCS#1"
+ OpenSSH = "OpenSSH"
+ Raw = "Raw"
+ CompressedPoint = "X9.62 Compressed Point"
+ UncompressedPoint = "X9.62 Uncompressed Point"
+
+
+class ParameterFormat(Enum):
+ PKCS3 = "PKCS3"
+
+
+@six.add_metaclass(abc.ABCMeta)
+class KeySerializationEncryption(object):
+ pass
+
+
+@utils.register_interface(KeySerializationEncryption)
+class BestAvailableEncryption(object):
+ def __init__(self, password):
+ if not isinstance(password, bytes) or len(password) == 0:
+ raise ValueError("Password must be 1 or more bytes.")
+
+ self.password = password
+
+
+@utils.register_interface(KeySerializationEncryption)
+class NoEncryption(object):
+ pass
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/primitives/serialization/pkcs12.py b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/serialization/pkcs12.py
new file mode 100644
index 0000000000..201f32941c
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/serialization/pkcs12.py
@@ -0,0 +1,50 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+from cryptography import x509
+from cryptography.hazmat.backends import _get_backend
+from cryptography.hazmat.primitives import serialization
+from cryptography.hazmat.primitives.asymmetric import dsa, ec, rsa
+
+
+def load_key_and_certificates(data, password, backend=None):
+ backend = _get_backend(backend)
+ return backend.load_key_and_certificates_from_pkcs12(data, password)
+
+
+def serialize_key_and_certificates(name, key, cert, cas, encryption_algorithm):
+ if key is not None and not isinstance(
+ key,
+ (
+ rsa.RSAPrivateKeyWithSerialization,
+ dsa.DSAPrivateKeyWithSerialization,
+ ec.EllipticCurvePrivateKeyWithSerialization,
+ ),
+ ):
+ raise TypeError("Key must be RSA, DSA, or EllipticCurve private key.")
+ if cert is not None and not isinstance(cert, x509.Certificate):
+ raise TypeError("cert must be a certificate")
+
+ if cas is not None:
+ cas = list(cas)
+ if not all(isinstance(val, x509.Certificate) for val in cas):
+ raise TypeError("all values in cas must be certificates")
+
+ if not isinstance(
+ encryption_algorithm, serialization.KeySerializationEncryption
+ ):
+ raise TypeError(
+ "Key encryption algorithm must be a "
+ "KeySerializationEncryption instance"
+ )
+
+ if key is None and cert is None and not cas:
+ raise ValueError("You must supply at least one of key, cert, or cas")
+
+ backend = _get_backend(None)
+ return backend.serialize_key_and_certificates_to_pkcs12(
+ name, key, cert, cas, encryption_algorithm
+ )
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/primitives/serialization/pkcs7.py b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/serialization/pkcs7.py
new file mode 100644
index 0000000000..1e11e28ef5
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/serialization/pkcs7.py
@@ -0,0 +1,132 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+from enum import Enum
+
+from cryptography import x509
+from cryptography.hazmat.backends import _get_backend
+from cryptography.hazmat.primitives import hashes, serialization
+from cryptography.hazmat.primitives.asymmetric import ec, rsa
+from cryptography.utils import _check_byteslike
+
+
+def load_pem_pkcs7_certificates(data):
+ backend = _get_backend(None)
+ return backend.load_pem_pkcs7_certificates(data)
+
+
+def load_der_pkcs7_certificates(data):
+ backend = _get_backend(None)
+ return backend.load_der_pkcs7_certificates(data)
+
+
+class PKCS7SignatureBuilder(object):
+ def __init__(self, data=None, signers=[], additional_certs=[]):
+ self._data = data
+ self._signers = signers
+ self._additional_certs = additional_certs
+
+ def set_data(self, data):
+ _check_byteslike("data", data)
+ if self._data is not None:
+ raise ValueError("data may only be set once")
+
+ return PKCS7SignatureBuilder(data, self._signers)
+
+ def add_signer(self, certificate, private_key, hash_algorithm):
+ if not isinstance(
+ hash_algorithm,
+ (
+ hashes.SHA1,
+ hashes.SHA224,
+ hashes.SHA256,
+ hashes.SHA384,
+ hashes.SHA512,
+ ),
+ ):
+ raise TypeError(
+ "hash_algorithm must be one of hashes.SHA1, SHA224, "
+ "SHA256, SHA384, or SHA512"
+ )
+ if not isinstance(certificate, x509.Certificate):
+ raise TypeError("certificate must be a x509.Certificate")
+
+ if not isinstance(
+ private_key, (rsa.RSAPrivateKey, ec.EllipticCurvePrivateKey)
+ ):
+ raise TypeError("Only RSA & EC keys are supported at this time.")
+
+ return PKCS7SignatureBuilder(
+ self._data,
+ self._signers + [(certificate, private_key, hash_algorithm)],
+ )
+
+ def add_certificate(self, certificate):
+ if not isinstance(certificate, x509.Certificate):
+ raise TypeError("certificate must be a x509.Certificate")
+
+ return PKCS7SignatureBuilder(
+ self._data, self._signers, self._additional_certs + [certificate]
+ )
+
+ def sign(self, encoding, options, backend=None):
+ if len(self._signers) == 0:
+ raise ValueError("Must have at least one signer")
+ if self._data is None:
+ raise ValueError("You must add data to sign")
+ options = list(options)
+ if not all(isinstance(x, PKCS7Options) for x in options):
+ raise ValueError("options must be from the PKCS7Options enum")
+ if encoding not in (
+ serialization.Encoding.PEM,
+ serialization.Encoding.DER,
+ serialization.Encoding.SMIME,
+ ):
+ raise ValueError(
+ "Must be PEM, DER, or SMIME from the Encoding enum"
+ )
+
+ # Text is a meaningless option unless it is accompanied by
+ # DetachedSignature
+ if (
+ PKCS7Options.Text in options
+ and PKCS7Options.DetachedSignature not in options
+ ):
+ raise ValueError(
+ "When passing the Text option you must also pass "
+ "DetachedSignature"
+ )
+
+ if PKCS7Options.Text in options and encoding in (
+ serialization.Encoding.DER,
+ serialization.Encoding.PEM,
+ ):
+ raise ValueError(
+ "The Text option is only available for SMIME serialization"
+ )
+
+ # No attributes implies no capabilities so we'll error if you try to
+ # pass both.
+ if (
+ PKCS7Options.NoAttributes in options
+ and PKCS7Options.NoCapabilities in options
+ ):
+ raise ValueError(
+ "NoAttributes is a superset of NoCapabilities. Do not pass "
+ "both values."
+ )
+
+ backend = _get_backend(backend)
+ return backend.pkcs7_sign(self, encoding, options)
+
+
+class PKCS7Options(Enum):
+ Text = "Add text/plain MIME type"
+ Binary = "Don't translate input data into canonical MIME format"
+ DetachedSignature = "Don't embed data in the PKCS7 structure"
+ NoCapabilities = "Don't embed SMIME capabilities"
+ NoAttributes = "Don't embed authenticatedAttributes"
+ NoCerts = "Don't embed signer certificate"
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/primitives/serialization/ssh.py b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/serialization/ssh.py
new file mode 100644
index 0000000000..5ecae59f8a
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/serialization/ssh.py
@@ -0,0 +1,683 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import binascii
+import os
+import re
+import struct
+
+import six
+
+from cryptography import utils
+from cryptography.exceptions import UnsupportedAlgorithm
+from cryptography.hazmat.backends import _get_backend
+from cryptography.hazmat.primitives.asymmetric import dsa, ec, ed25519, rsa
+from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
+from cryptography.hazmat.primitives.serialization import (
+ Encoding,
+ NoEncryption,
+ PrivateFormat,
+ PublicFormat,
+)
+
+try:
+ from bcrypt import kdf as _bcrypt_kdf
+
+ _bcrypt_supported = True
+except ImportError:
+ _bcrypt_supported = False
+
+ def _bcrypt_kdf(*args, **kwargs):
+ raise UnsupportedAlgorithm("Need bcrypt module")
+
+
+try:
+ from base64 import encodebytes as _base64_encode
+except ImportError:
+ from base64 import encodestring as _base64_encode
+
+_SSH_ED25519 = b"ssh-ed25519"
+_SSH_RSA = b"ssh-rsa"
+_SSH_DSA = b"ssh-dss"
+_ECDSA_NISTP256 = b"ecdsa-sha2-nistp256"
+_ECDSA_NISTP384 = b"ecdsa-sha2-nistp384"
+_ECDSA_NISTP521 = b"ecdsa-sha2-nistp521"
+_CERT_SUFFIX = b"-cert-v01@openssh.com"
+
+_SSH_PUBKEY_RC = re.compile(br"\A(\S+)[ \t]+(\S+)")
+_SK_MAGIC = b"openssh-key-v1\0"
+_SK_START = b"-----BEGIN OPENSSH PRIVATE KEY-----"
+_SK_END = b"-----END OPENSSH PRIVATE KEY-----"
+_BCRYPT = b"bcrypt"
+_NONE = b"none"
+_DEFAULT_CIPHER = b"aes256-ctr"
+_DEFAULT_ROUNDS = 16
+_MAX_PASSWORD = 72
+
+# re is only way to work on bytes-like data
+_PEM_RC = re.compile(_SK_START + b"(.*?)" + _SK_END, re.DOTALL)
+
+# padding for max blocksize
+_PADDING = memoryview(bytearray(range(1, 1 + 16)))
+
+# ciphers that are actually used in key wrapping
+_SSH_CIPHERS = {
+ b"aes256-ctr": (algorithms.AES, 32, modes.CTR, 16),
+ b"aes256-cbc": (algorithms.AES, 32, modes.CBC, 16),
+}
+
+# map local curve name to key type
+_ECDSA_KEY_TYPE = {
+ "secp256r1": _ECDSA_NISTP256,
+ "secp384r1": _ECDSA_NISTP384,
+ "secp521r1": _ECDSA_NISTP521,
+}
+
+_U32 = struct.Struct(b">I")
+_U64 = struct.Struct(b">Q")
+
+
+def _ecdsa_key_type(public_key):
+ """Return SSH key_type and curve_name for private key."""
+ curve = public_key.curve
+ if curve.name not in _ECDSA_KEY_TYPE:
+ raise ValueError(
+ "Unsupported curve for ssh private key: %r" % curve.name
+ )
+ return _ECDSA_KEY_TYPE[curve.name]
+
+
+def _ssh_pem_encode(data, prefix=_SK_START + b"\n", suffix=_SK_END + b"\n"):
+ return b"".join([prefix, _base64_encode(data), suffix])
+
+
+def _check_block_size(data, block_len):
+ """Require data to be full blocks"""
+ if not data or len(data) % block_len != 0:
+ raise ValueError("Corrupt data: missing padding")
+
+
+def _check_empty(data):
+ """All data should have been parsed."""
+ if data:
+ raise ValueError("Corrupt data: unparsed data")
+
+
+def _init_cipher(ciphername, password, salt, rounds, backend):
+ """Generate key + iv and return cipher."""
+ if not password:
+ raise ValueError("Key is password-protected.")
+
+ algo, key_len, mode, iv_len = _SSH_CIPHERS[ciphername]
+ seed = _bcrypt_kdf(password, salt, key_len + iv_len, rounds, True)
+ return Cipher(algo(seed[:key_len]), mode(seed[key_len:]), backend)
+
+
+def _get_u32(data):
+ """Uint32"""
+ if len(data) < 4:
+ raise ValueError("Invalid data")
+ return _U32.unpack(data[:4])[0], data[4:]
+
+
+def _get_u64(data):
+ """Uint64"""
+ if len(data) < 8:
+ raise ValueError("Invalid data")
+ return _U64.unpack(data[:8])[0], data[8:]
+
+
+def _get_sshstr(data):
+ """Bytes with u32 length prefix"""
+ n, data = _get_u32(data)
+ if n > len(data):
+ raise ValueError("Invalid data")
+ return data[:n], data[n:]
+
+
+def _get_mpint(data):
+ """Big integer."""
+ val, data = _get_sshstr(data)
+ if val and six.indexbytes(val, 0) > 0x7F:
+ raise ValueError("Invalid data")
+ return utils.int_from_bytes(val, "big"), data
+
+
+def _to_mpint(val):
+ """Storage format for signed bigint."""
+ if val < 0:
+ raise ValueError("negative mpint not allowed")
+ if not val:
+ return b""
+ nbytes = (val.bit_length() + 8) // 8
+ return utils.int_to_bytes(val, nbytes)
+
+
+class _FragList(object):
+ """Build recursive structure without data copy."""
+
+ def __init__(self, init=None):
+ self.flist = []
+ if init:
+ self.flist.extend(init)
+
+ def put_raw(self, val):
+ """Add plain bytes"""
+ self.flist.append(val)
+
+ def put_u32(self, val):
+ """Big-endian uint32"""
+ self.flist.append(_U32.pack(val))
+
+ def put_sshstr(self, val):
+ """Bytes prefixed with u32 length"""
+ if isinstance(val, (bytes, memoryview, bytearray)):
+ self.put_u32(len(val))
+ self.flist.append(val)
+ else:
+ self.put_u32(val.size())
+ self.flist.extend(val.flist)
+
+ def put_mpint(self, val):
+ """Big-endian bigint prefixed with u32 length"""
+ self.put_sshstr(_to_mpint(val))
+
+ def size(self):
+ """Current number of bytes"""
+ return sum(map(len, self.flist))
+
+ def render(self, dstbuf, pos=0):
+ """Write into bytearray"""
+ for frag in self.flist:
+ flen = len(frag)
+ start, pos = pos, pos + flen
+ dstbuf[start:pos] = frag
+ return pos
+
+ def tobytes(self):
+ """Return as bytes"""
+ buf = memoryview(bytearray(self.size()))
+ self.render(buf)
+ return buf.tobytes()
+
+
+class _SSHFormatRSA(object):
+ """Format for RSA keys.
+
+ Public:
+ mpint e, n
+ Private:
+ mpint n, e, d, iqmp, p, q
+ """
+
+ def get_public(self, data):
+ """RSA public fields"""
+ e, data = _get_mpint(data)
+ n, data = _get_mpint(data)
+ return (e, n), data
+
+ def load_public(self, key_type, data, backend):
+ """Make RSA public key from data."""
+ (e, n), data = self.get_public(data)
+ public_numbers = rsa.RSAPublicNumbers(e, n)
+ public_key = public_numbers.public_key(backend)
+ return public_key, data
+
+ def load_private(self, data, pubfields, backend):
+ """Make RSA private key from data."""
+ n, data = _get_mpint(data)
+ e, data = _get_mpint(data)
+ d, data = _get_mpint(data)
+ iqmp, data = _get_mpint(data)
+ p, data = _get_mpint(data)
+ q, data = _get_mpint(data)
+
+ if (e, n) != pubfields:
+ raise ValueError("Corrupt data: rsa field mismatch")
+ dmp1 = rsa.rsa_crt_dmp1(d, p)
+ dmq1 = rsa.rsa_crt_dmq1(d, q)
+ public_numbers = rsa.RSAPublicNumbers(e, n)
+ private_numbers = rsa.RSAPrivateNumbers(
+ p, q, d, dmp1, dmq1, iqmp, public_numbers
+ )
+ private_key = private_numbers.private_key(backend)
+ return private_key, data
+
+ def encode_public(self, public_key, f_pub):
+ """Write RSA public key"""
+ pubn = public_key.public_numbers()
+ f_pub.put_mpint(pubn.e)
+ f_pub.put_mpint(pubn.n)
+
+ def encode_private(self, private_key, f_priv):
+ """Write RSA private key"""
+ private_numbers = private_key.private_numbers()
+ public_numbers = private_numbers.public_numbers
+
+ f_priv.put_mpint(public_numbers.n)
+ f_priv.put_mpint(public_numbers.e)
+
+ f_priv.put_mpint(private_numbers.d)
+ f_priv.put_mpint(private_numbers.iqmp)
+ f_priv.put_mpint(private_numbers.p)
+ f_priv.put_mpint(private_numbers.q)
+
+
+class _SSHFormatDSA(object):
+ """Format for DSA keys.
+
+ Public:
+ mpint p, q, g, y
+ Private:
+ mpint p, q, g, y, x
+ """
+
+ def get_public(self, data):
+ """DSA public fields"""
+ p, data = _get_mpint(data)
+ q, data = _get_mpint(data)
+ g, data = _get_mpint(data)
+ y, data = _get_mpint(data)
+ return (p, q, g, y), data
+
+ def load_public(self, key_type, data, backend):
+ """Make DSA public key from data."""
+ (p, q, g, y), data = self.get_public(data)
+ parameter_numbers = dsa.DSAParameterNumbers(p, q, g)
+ public_numbers = dsa.DSAPublicNumbers(y, parameter_numbers)
+ self._validate(public_numbers)
+ public_key = public_numbers.public_key(backend)
+ return public_key, data
+
+ def load_private(self, data, pubfields, backend):
+ """Make DSA private key from data."""
+ (p, q, g, y), data = self.get_public(data)
+ x, data = _get_mpint(data)
+
+ if (p, q, g, y) != pubfields:
+ raise ValueError("Corrupt data: dsa field mismatch")
+ parameter_numbers = dsa.DSAParameterNumbers(p, q, g)
+ public_numbers = dsa.DSAPublicNumbers(y, parameter_numbers)
+ self._validate(public_numbers)
+ private_numbers = dsa.DSAPrivateNumbers(x, public_numbers)
+ private_key = private_numbers.private_key(backend)
+ return private_key, data
+
+ def encode_public(self, public_key, f_pub):
+ """Write DSA public key"""
+ public_numbers = public_key.public_numbers()
+ parameter_numbers = public_numbers.parameter_numbers
+ self._validate(public_numbers)
+
+ f_pub.put_mpint(parameter_numbers.p)
+ f_pub.put_mpint(parameter_numbers.q)
+ f_pub.put_mpint(parameter_numbers.g)
+ f_pub.put_mpint(public_numbers.y)
+
+ def encode_private(self, private_key, f_priv):
+ """Write DSA private key"""
+ self.encode_public(private_key.public_key(), f_priv)
+ f_priv.put_mpint(private_key.private_numbers().x)
+
+ def _validate(self, public_numbers):
+ parameter_numbers = public_numbers.parameter_numbers
+ if parameter_numbers.p.bit_length() != 1024:
+ raise ValueError("SSH supports only 1024 bit DSA keys")
+
+
+class _SSHFormatECDSA(object):
+ """Format for ECDSA keys.
+
+ Public:
+ str curve
+ bytes point
+ Private:
+ str curve
+ bytes point
+ mpint secret
+ """
+
+ def __init__(self, ssh_curve_name, curve):
+ self.ssh_curve_name = ssh_curve_name
+ self.curve = curve
+
+ def get_public(self, data):
+ """ECDSA public fields"""
+ curve, data = _get_sshstr(data)
+ point, data = _get_sshstr(data)
+ if curve != self.ssh_curve_name:
+ raise ValueError("Curve name mismatch")
+ if six.indexbytes(point, 0) != 4:
+ raise NotImplementedError("Need uncompressed point")
+ return (curve, point), data
+
+ def load_public(self, key_type, data, backend):
+ """Make ECDSA public key from data."""
+ (curve_name, point), data = self.get_public(data)
+ public_key = ec.EllipticCurvePublicKey.from_encoded_point(
+ self.curve, point.tobytes()
+ )
+ return public_key, data
+
+ def load_private(self, data, pubfields, backend):
+ """Make ECDSA private key from data."""
+ (curve_name, point), data = self.get_public(data)
+ secret, data = _get_mpint(data)
+
+ if (curve_name, point) != pubfields:
+ raise ValueError("Corrupt data: ecdsa field mismatch")
+ private_key = ec.derive_private_key(secret, self.curve, backend)
+ return private_key, data
+
+ def encode_public(self, public_key, f_pub):
+ """Write ECDSA public key"""
+ point = public_key.public_bytes(
+ Encoding.X962, PublicFormat.UncompressedPoint
+ )
+ f_pub.put_sshstr(self.ssh_curve_name)
+ f_pub.put_sshstr(point)
+
+ def encode_private(self, private_key, f_priv):
+ """Write ECDSA private key"""
+ public_key = private_key.public_key()
+ private_numbers = private_key.private_numbers()
+
+ self.encode_public(public_key, f_priv)
+ f_priv.put_mpint(private_numbers.private_value)
+
+
+class _SSHFormatEd25519(object):
+ """Format for Ed25519 keys.
+
+ Public:
+ bytes point
+ Private:
+ bytes point
+ bytes secret_and_point
+ """
+
+ def get_public(self, data):
+ """Ed25519 public fields"""
+ point, data = _get_sshstr(data)
+ return (point,), data
+
+ def load_public(self, key_type, data, backend):
+ """Make Ed25519 public key from data."""
+ (point,), data = self.get_public(data)
+ public_key = ed25519.Ed25519PublicKey.from_public_bytes(
+ point.tobytes()
+ )
+ return public_key, data
+
+ def load_private(self, data, pubfields, backend):
+ """Make Ed25519 private key from data."""
+ (point,), data = self.get_public(data)
+ keypair, data = _get_sshstr(data)
+
+ secret = keypair[:32]
+ point2 = keypair[32:]
+ if point != point2 or (point,) != pubfields:
+ raise ValueError("Corrupt data: ed25519 field mismatch")
+ private_key = ed25519.Ed25519PrivateKey.from_private_bytes(secret)
+ return private_key, data
+
+ def encode_public(self, public_key, f_pub):
+ """Write Ed25519 public key"""
+ raw_public_key = public_key.public_bytes(
+ Encoding.Raw, PublicFormat.Raw
+ )
+ f_pub.put_sshstr(raw_public_key)
+
+ def encode_private(self, private_key, f_priv):
+ """Write Ed25519 private key"""
+ public_key = private_key.public_key()
+ raw_private_key = private_key.private_bytes(
+ Encoding.Raw, PrivateFormat.Raw, NoEncryption()
+ )
+ raw_public_key = public_key.public_bytes(
+ Encoding.Raw, PublicFormat.Raw
+ )
+ f_keypair = _FragList([raw_private_key, raw_public_key])
+
+ self.encode_public(public_key, f_priv)
+ f_priv.put_sshstr(f_keypair)
+
+
+_KEY_FORMATS = {
+ _SSH_RSA: _SSHFormatRSA(),
+ _SSH_DSA: _SSHFormatDSA(),
+ _SSH_ED25519: _SSHFormatEd25519(),
+ _ECDSA_NISTP256: _SSHFormatECDSA(b"nistp256", ec.SECP256R1()),
+ _ECDSA_NISTP384: _SSHFormatECDSA(b"nistp384", ec.SECP384R1()),
+ _ECDSA_NISTP521: _SSHFormatECDSA(b"nistp521", ec.SECP521R1()),
+}
+
+
+def _lookup_kformat(key_type):
+ """Return valid format or throw error"""
+ if not isinstance(key_type, bytes):
+ key_type = memoryview(key_type).tobytes()
+ if key_type in _KEY_FORMATS:
+ return _KEY_FORMATS[key_type]
+ raise UnsupportedAlgorithm("Unsupported key type: %r" % key_type)
+
+
+def load_ssh_private_key(data, password, backend=None):
+ """Load private key from OpenSSH custom encoding."""
+ utils._check_byteslike("data", data)
+ backend = _get_backend(backend)
+ if password is not None:
+ utils._check_bytes("password", password)
+
+ m = _PEM_RC.search(data)
+ if not m:
+ raise ValueError("Not OpenSSH private key format")
+ p1 = m.start(1)
+ p2 = m.end(1)
+ data = binascii.a2b_base64(memoryview(data)[p1:p2])
+ if not data.startswith(_SK_MAGIC):
+ raise ValueError("Not OpenSSH private key format")
+ data = memoryview(data)[len(_SK_MAGIC) :]
+
+ # parse header
+ ciphername, data = _get_sshstr(data)
+ kdfname, data = _get_sshstr(data)
+ kdfoptions, data = _get_sshstr(data)
+ nkeys, data = _get_u32(data)
+ if nkeys != 1:
+ raise ValueError("Only one key supported")
+
+ # load public key data
+ pubdata, data = _get_sshstr(data)
+ pub_key_type, pubdata = _get_sshstr(pubdata)
+ kformat = _lookup_kformat(pub_key_type)
+ pubfields, pubdata = kformat.get_public(pubdata)
+ _check_empty(pubdata)
+
+ # load secret data
+ edata, data = _get_sshstr(data)
+ _check_empty(data)
+
+ if (ciphername, kdfname) != (_NONE, _NONE):
+ ciphername = ciphername.tobytes()
+ if ciphername not in _SSH_CIPHERS:
+ raise UnsupportedAlgorithm("Unsupported cipher: %r" % ciphername)
+ if kdfname != _BCRYPT:
+ raise UnsupportedAlgorithm("Unsupported KDF: %r" % kdfname)
+ blklen = _SSH_CIPHERS[ciphername][3]
+ _check_block_size(edata, blklen)
+ salt, kbuf = _get_sshstr(kdfoptions)
+ rounds, kbuf = _get_u32(kbuf)
+ _check_empty(kbuf)
+ ciph = _init_cipher(
+ ciphername, password, salt.tobytes(), rounds, backend
+ )
+ edata = memoryview(ciph.decryptor().update(edata))
+ else:
+ blklen = 8
+ _check_block_size(edata, blklen)
+ ck1, edata = _get_u32(edata)
+ ck2, edata = _get_u32(edata)
+ if ck1 != ck2:
+ raise ValueError("Corrupt data: broken checksum")
+
+ # load per-key struct
+ key_type, edata = _get_sshstr(edata)
+ if key_type != pub_key_type:
+ raise ValueError("Corrupt data: key type mismatch")
+ private_key, edata = kformat.load_private(edata, pubfields, backend)
+ comment, edata = _get_sshstr(edata)
+
+ # yes, SSH does padding check *after* all other parsing is done.
+ # need to follow as it writes zero-byte padding too.
+ if edata != _PADDING[: len(edata)]:
+ raise ValueError("Corrupt data: invalid padding")
+
+ return private_key
+
+
+def serialize_ssh_private_key(private_key, password=None):
+ """Serialize private key with OpenSSH custom encoding."""
+ if password is not None:
+ utils._check_bytes("password", password)
+ if password and len(password) > _MAX_PASSWORD:
+ raise ValueError(
+ "Passwords longer than 72 bytes are not supported by "
+ "OpenSSH private key format"
+ )
+
+ if isinstance(private_key, ec.EllipticCurvePrivateKey):
+ key_type = _ecdsa_key_type(private_key.public_key())
+ elif isinstance(private_key, rsa.RSAPrivateKey):
+ key_type = _SSH_RSA
+ elif isinstance(private_key, dsa.DSAPrivateKey):
+ key_type = _SSH_DSA
+ elif isinstance(private_key, ed25519.Ed25519PrivateKey):
+ key_type = _SSH_ED25519
+ else:
+ raise ValueError("Unsupported key type")
+ kformat = _lookup_kformat(key_type)
+
+ # setup parameters
+ f_kdfoptions = _FragList()
+ if password:
+ ciphername = _DEFAULT_CIPHER
+ blklen = _SSH_CIPHERS[ciphername][3]
+ kdfname = _BCRYPT
+ rounds = _DEFAULT_ROUNDS
+ salt = os.urandom(16)
+ f_kdfoptions.put_sshstr(salt)
+ f_kdfoptions.put_u32(rounds)
+ backend = _get_backend(None)
+ ciph = _init_cipher(ciphername, password, salt, rounds, backend)
+ else:
+ ciphername = kdfname = _NONE
+ blklen = 8
+ ciph = None
+ nkeys = 1
+ checkval = os.urandom(4)
+ comment = b""
+
+ # encode public and private parts together
+ f_public_key = _FragList()
+ f_public_key.put_sshstr(key_type)
+ kformat.encode_public(private_key.public_key(), f_public_key)
+
+ f_secrets = _FragList([checkval, checkval])
+ f_secrets.put_sshstr(key_type)
+ kformat.encode_private(private_key, f_secrets)
+ f_secrets.put_sshstr(comment)
+ f_secrets.put_raw(_PADDING[: blklen - (f_secrets.size() % blklen)])
+
+ # top-level structure
+ f_main = _FragList()
+ f_main.put_raw(_SK_MAGIC)
+ f_main.put_sshstr(ciphername)
+ f_main.put_sshstr(kdfname)
+ f_main.put_sshstr(f_kdfoptions)
+ f_main.put_u32(nkeys)
+ f_main.put_sshstr(f_public_key)
+ f_main.put_sshstr(f_secrets)
+
+ # copy result info bytearray
+ slen = f_secrets.size()
+ mlen = f_main.size()
+ buf = memoryview(bytearray(mlen + blklen))
+ f_main.render(buf)
+ ofs = mlen - slen
+
+ # encrypt in-place
+ if ciph is not None:
+ ciph.encryptor().update_into(buf[ofs:mlen], buf[ofs:])
+
+ txt = _ssh_pem_encode(buf[:mlen])
+ buf[ofs:mlen] = bytearray(slen)
+ return txt
+
+
+def load_ssh_public_key(data, backend=None):
+ """Load public key from OpenSSH one-line format."""
+ backend = _get_backend(backend)
+ utils._check_byteslike("data", data)
+
+ m = _SSH_PUBKEY_RC.match(data)
+ if not m:
+ raise ValueError("Invalid line format")
+ key_type = orig_key_type = m.group(1)
+ key_body = m.group(2)
+ with_cert = False
+ if _CERT_SUFFIX == key_type[-len(_CERT_SUFFIX) :]:
+ with_cert = True
+ key_type = key_type[: -len(_CERT_SUFFIX)]
+ kformat = _lookup_kformat(key_type)
+
+ try:
+ data = memoryview(binascii.a2b_base64(key_body))
+ except (TypeError, binascii.Error):
+ raise ValueError("Invalid key format")
+
+ inner_key_type, data = _get_sshstr(data)
+ if inner_key_type != orig_key_type:
+ raise ValueError("Invalid key format")
+ if with_cert:
+ nonce, data = _get_sshstr(data)
+ public_key, data = kformat.load_public(key_type, data, backend)
+ if with_cert:
+ serial, data = _get_u64(data)
+ cctype, data = _get_u32(data)
+ key_id, data = _get_sshstr(data)
+ principals, data = _get_sshstr(data)
+ valid_after, data = _get_u64(data)
+ valid_before, data = _get_u64(data)
+ crit_options, data = _get_sshstr(data)
+ extensions, data = _get_sshstr(data)
+ reserved, data = _get_sshstr(data)
+ sig_key, data = _get_sshstr(data)
+ signature, data = _get_sshstr(data)
+ _check_empty(data)
+ return public_key
+
+
+def serialize_ssh_public_key(public_key):
+ """One-line public key format for OpenSSH"""
+ if isinstance(public_key, ec.EllipticCurvePublicKey):
+ key_type = _ecdsa_key_type(public_key)
+ elif isinstance(public_key, rsa.RSAPublicKey):
+ key_type = _SSH_RSA
+ elif isinstance(public_key, dsa.DSAPublicKey):
+ key_type = _SSH_DSA
+ elif isinstance(public_key, ed25519.Ed25519PublicKey):
+ key_type = _SSH_ED25519
+ else:
+ raise ValueError("Unsupported key type")
+ kformat = _lookup_kformat(key_type)
+
+ f_pub = _FragList()
+ f_pub.put_sshstr(key_type)
+ kformat.encode_public(public_key, f_pub)
+
+ pub = binascii.b2a_base64(f_pub.tobytes()).strip()
+ return b"".join([key_type, b" ", pub])
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/primitives/twofactor/__init__.py b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/twofactor/__init__.py
new file mode 100644
index 0000000000..e71f9e67a3
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/twofactor/__init__.py
@@ -0,0 +1,9 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+
+class InvalidToken(Exception):
+ pass
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/primitives/twofactor/hotp.py b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/twofactor/hotp.py
new file mode 100644
index 0000000000..c00eec0e54
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/twofactor/hotp.py
@@ -0,0 +1,69 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import struct
+
+import six
+
+from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
+from cryptography.hazmat.backends import _get_backend
+from cryptography.hazmat.backends.interfaces import HMACBackend
+from cryptography.hazmat.primitives import constant_time, hmac
+from cryptography.hazmat.primitives.hashes import SHA1, SHA256, SHA512
+from cryptography.hazmat.primitives.twofactor import InvalidToken
+from cryptography.hazmat.primitives.twofactor.utils import _generate_uri
+
+
+class HOTP(object):
+ def __init__(
+ self, key, length, algorithm, backend=None, enforce_key_length=True
+ ):
+ backend = _get_backend(backend)
+ if not isinstance(backend, HMACBackend):
+ raise UnsupportedAlgorithm(
+ "Backend object does not implement HMACBackend.",
+ _Reasons.BACKEND_MISSING_INTERFACE,
+ )
+
+ if len(key) < 16 and enforce_key_length is True:
+ raise ValueError("Key length has to be at least 128 bits.")
+
+ if not isinstance(length, six.integer_types):
+ raise TypeError("Length parameter must be an integer type.")
+
+ if length < 6 or length > 8:
+ raise ValueError("Length of HOTP has to be between 6 to 8.")
+
+ if not isinstance(algorithm, (SHA1, SHA256, SHA512)):
+ raise TypeError("Algorithm must be SHA1, SHA256 or SHA512.")
+
+ self._key = key
+ self._length = length
+ self._algorithm = algorithm
+ self._backend = backend
+
+ def generate(self, counter):
+ truncated_value = self._dynamic_truncate(counter)
+ hotp = truncated_value % (10 ** self._length)
+ return "{0:0{1}}".format(hotp, self._length).encode()
+
+ def verify(self, hotp, counter):
+ if not constant_time.bytes_eq(self.generate(counter), hotp):
+ raise InvalidToken("Supplied HOTP value does not match.")
+
+ def _dynamic_truncate(self, counter):
+ ctx = hmac.HMAC(self._key, self._algorithm, self._backend)
+ ctx.update(struct.pack(">Q", counter))
+ hmac_value = ctx.finalize()
+
+ offset = six.indexbytes(hmac_value, len(hmac_value) - 1) & 0b1111
+ p = hmac_value[offset : offset + 4]
+ return struct.unpack(">I", p)[0] & 0x7FFFFFFF
+
+ def get_provisioning_uri(self, account_name, counter, issuer):
+ return _generate_uri(
+ self, "hotp", account_name, issuer, [("counter", int(counter))]
+ )
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/primitives/twofactor/totp.py b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/twofactor/totp.py
new file mode 100644
index 0000000000..d59539b3f9
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/twofactor/totp.py
@@ -0,0 +1,51 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
+from cryptography.hazmat.backends import _get_backend
+from cryptography.hazmat.backends.interfaces import HMACBackend
+from cryptography.hazmat.primitives import constant_time
+from cryptography.hazmat.primitives.twofactor import InvalidToken
+from cryptography.hazmat.primitives.twofactor.hotp import HOTP
+from cryptography.hazmat.primitives.twofactor.utils import _generate_uri
+
+
+class TOTP(object):
+ def __init__(
+ self,
+ key,
+ length,
+ algorithm,
+ time_step,
+ backend=None,
+ enforce_key_length=True,
+ ):
+ backend = _get_backend(backend)
+ if not isinstance(backend, HMACBackend):
+ raise UnsupportedAlgorithm(
+ "Backend object does not implement HMACBackend.",
+ _Reasons.BACKEND_MISSING_INTERFACE,
+ )
+
+ self._time_step = time_step
+ self._hotp = HOTP(key, length, algorithm, backend, enforce_key_length)
+
+ def generate(self, time):
+ counter = int(time / self._time_step)
+ return self._hotp.generate(counter)
+
+ def verify(self, totp, time):
+ if not constant_time.bytes_eq(self.generate(time), totp):
+ raise InvalidToken("Supplied TOTP value does not match.")
+
+ def get_provisioning_uri(self, account_name, issuer):
+ return _generate_uri(
+ self._hotp,
+ "totp",
+ account_name,
+ issuer,
+ [("period", int(self._time_step))],
+ )
diff --git a/contrib/python/cryptography/py2/cryptography/hazmat/primitives/twofactor/utils.py b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/twofactor/utils.py
new file mode 100644
index 0000000000..0afa1ccc04
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/hazmat/primitives/twofactor/utils.py
@@ -0,0 +1,33 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import base64
+
+from six.moves.urllib.parse import quote, urlencode
+
+
+def _generate_uri(hotp, type_name, account_name, issuer, extra_parameters):
+ parameters = [
+ ("digits", hotp._length),
+ ("secret", base64.b32encode(hotp._key)),
+ ("algorithm", hotp._algorithm.name.upper()),
+ ]
+
+ if issuer is not None:
+ parameters.append(("issuer", issuer))
+
+ parameters.extend(extra_parameters)
+
+ uriparts = {
+ "type": type_name,
+ "label": (
+ "%s:%s" % (quote(issuer), quote(account_name))
+ if issuer
+ else quote(account_name)
+ ),
+ "parameters": urlencode(parameters),
+ }
+ return "otpauth://{type}/{label}?{parameters}".format(**uriparts)
diff --git a/contrib/python/cryptography/py2/cryptography/utils.py b/contrib/python/cryptography/py2/cryptography/utils.py
new file mode 100644
index 0000000000..bdb3dbf477
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/utils.py
@@ -0,0 +1,171 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import abc
+import binascii
+import inspect
+import sys
+import warnings
+
+
+# We use a UserWarning subclass, instead of DeprecationWarning, because CPython
+# decided deprecation warnings should be invisble by default.
+class CryptographyDeprecationWarning(UserWarning):
+ pass
+
+
+# Several APIs were deprecated with no specific end-of-life date because of the
+# ubiquity of their use. They should not be removed until we agree on when that
+# cycle ends.
+PersistentlyDeprecated2017 = CryptographyDeprecationWarning
+PersistentlyDeprecated2019 = CryptographyDeprecationWarning
+
+
+def _check_bytes(name, value):
+ if not isinstance(value, bytes):
+ raise TypeError("{} must be bytes".format(name))
+
+
+def _check_byteslike(name, value):
+ try:
+ memoryview(value)
+ except TypeError:
+ raise TypeError("{} must be bytes-like".format(name))
+
+
+def read_only_property(name):
+ return property(lambda self: getattr(self, name))
+
+
+def register_interface(iface):
+ def register_decorator(klass):
+ verify_interface(iface, klass)
+ iface.register(klass)
+ return klass
+
+ return register_decorator
+
+
+def register_interface_if(predicate, iface):
+ def register_decorator(klass):
+ if predicate:
+ verify_interface(iface, klass)
+ iface.register(klass)
+ return klass
+
+ return register_decorator
+
+
+if hasattr(int, "from_bytes"):
+ int_from_bytes = int.from_bytes
+else:
+
+ def int_from_bytes(data, byteorder, signed=False):
+ assert byteorder == "big"
+ assert not signed
+
+ return int(binascii.hexlify(data), 16)
+
+
+if hasattr(int, "to_bytes"):
+
+ def int_to_bytes(integer, length=None):
+ return integer.to_bytes(
+ length or (integer.bit_length() + 7) // 8 or 1, "big"
+ )
+
+
+else:
+
+ def int_to_bytes(integer, length=None):
+ hex_string = "%x" % integer
+ if length is None:
+ n = len(hex_string)
+ else:
+ n = length * 2
+ return binascii.unhexlify(hex_string.zfill(n + (n & 1)))
+
+
+class InterfaceNotImplemented(Exception):
+ pass
+
+
+if hasattr(inspect, "signature"):
+ signature = inspect.signature
+else:
+ signature = inspect.getargspec
+
+
+def verify_interface(iface, klass):
+ for method in iface.__abstractmethods__:
+ if not hasattr(klass, method):
+ raise InterfaceNotImplemented(
+ "{} is missing a {!r} method".format(klass, method)
+ )
+ if isinstance(getattr(iface, method), abc.abstractproperty):
+ # Can't properly verify these yet.
+ continue
+ sig = signature(getattr(iface, method))
+ actual = signature(getattr(klass, method))
+ if sig != actual:
+ raise InterfaceNotImplemented(
+ "{}.{}'s signature differs from the expected. Expected: "
+ "{!r}. Received: {!r}".format(klass, method, sig, actual)
+ )
+
+
+class _DeprecatedValue(object):
+ def __init__(self, value, message, warning_class):
+ self.value = value
+ self.message = message
+ self.warning_class = warning_class
+
+
+class _ModuleWithDeprecations(object):
+ def __init__(self, module):
+ self.__dict__["_module"] = module
+
+ def __getattr__(self, attr):
+ obj = getattr(self._module, attr)
+ if isinstance(obj, _DeprecatedValue):
+ warnings.warn(obj.message, obj.warning_class, stacklevel=2)
+ obj = obj.value
+ return obj
+
+ def __setattr__(self, attr, value):
+ setattr(self._module, attr, value)
+
+ def __delattr__(self, attr):
+ obj = getattr(self._module, attr)
+ if isinstance(obj, _DeprecatedValue):
+ warnings.warn(obj.message, obj.warning_class, stacklevel=2)
+
+ delattr(self._module, attr)
+
+ def __dir__(self):
+ return ["_module"] + dir(self._module)
+
+
+def deprecated(value, module_name, message, warning_class):
+ module = sys.modules[module_name]
+ if not isinstance(module, _ModuleWithDeprecations):
+ sys.modules[module_name] = _ModuleWithDeprecations(module)
+ return _DeprecatedValue(value, message, warning_class)
+
+
+def cached_property(func):
+ cached_name = "_cached_{}".format(func)
+ sentinel = object()
+
+ def inner(instance):
+ cache = getattr(instance, cached_name, sentinel)
+ if cache is not sentinel:
+ return cache
+ result = func(instance)
+ setattr(instance, cached_name, result)
+ return result
+
+ return property(inner)
diff --git a/contrib/python/cryptography/py2/cryptography/x509/__init__.py b/contrib/python/cryptography/py2/cryptography/x509/__init__.py
new file mode 100644
index 0000000000..69630e4cba
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/x509/__init__.py
@@ -0,0 +1,248 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+from cryptography.x509 import certificate_transparency
+from cryptography.x509.base import (
+ AttributeNotFound,
+ Certificate,
+ CertificateBuilder,
+ CertificateRevocationList,
+ CertificateRevocationListBuilder,
+ CertificateSigningRequest,
+ CertificateSigningRequestBuilder,
+ InvalidVersion,
+ RevokedCertificate,
+ RevokedCertificateBuilder,
+ Version,
+ load_der_x509_certificate,
+ load_der_x509_crl,
+ load_der_x509_csr,
+ load_pem_x509_certificate,
+ load_pem_x509_crl,
+ load_pem_x509_csr,
+ random_serial_number,
+)
+from cryptography.x509.extensions import (
+ AccessDescription,
+ AuthorityInformationAccess,
+ AuthorityKeyIdentifier,
+ BasicConstraints,
+ CRLDistributionPoints,
+ CRLNumber,
+ CRLReason,
+ CertificateIssuer,
+ CertificatePolicies,
+ DeltaCRLIndicator,
+ DistributionPoint,
+ DuplicateExtension,
+ ExtendedKeyUsage,
+ Extension,
+ ExtensionNotFound,
+ ExtensionType,
+ Extensions,
+ FreshestCRL,
+ GeneralNames,
+ InhibitAnyPolicy,
+ InvalidityDate,
+ IssuerAlternativeName,
+ IssuingDistributionPoint,
+ KeyUsage,
+ NameConstraints,
+ NoticeReference,
+ OCSPNoCheck,
+ OCSPNonce,
+ PolicyConstraints,
+ PolicyInformation,
+ PrecertPoison,
+ PrecertificateSignedCertificateTimestamps,
+ ReasonFlags,
+ SignedCertificateTimestamps,
+ SubjectAlternativeName,
+ SubjectInformationAccess,
+ SubjectKeyIdentifier,
+ TLSFeature,
+ TLSFeatureType,
+ UnrecognizedExtension,
+ UserNotice,
+)
+from cryptography.x509.general_name import (
+ DNSName,
+ DirectoryName,
+ GeneralName,
+ IPAddress,
+ OtherName,
+ RFC822Name,
+ RegisteredID,
+ UniformResourceIdentifier,
+ UnsupportedGeneralNameType,
+ _GENERAL_NAMES,
+)
+from cryptography.x509.name import (
+ Name,
+ NameAttribute,
+ RelativeDistinguishedName,
+)
+from cryptography.x509.oid import (
+ AuthorityInformationAccessOID,
+ CRLEntryExtensionOID,
+ CertificatePoliciesOID,
+ ExtendedKeyUsageOID,
+ ExtensionOID,
+ NameOID,
+ ObjectIdentifier,
+ SignatureAlgorithmOID,
+ _SIG_OIDS_TO_HASH,
+)
+
+
+OID_AUTHORITY_INFORMATION_ACCESS = ExtensionOID.AUTHORITY_INFORMATION_ACCESS
+OID_AUTHORITY_KEY_IDENTIFIER = ExtensionOID.AUTHORITY_KEY_IDENTIFIER
+OID_BASIC_CONSTRAINTS = ExtensionOID.BASIC_CONSTRAINTS
+OID_CERTIFICATE_POLICIES = ExtensionOID.CERTIFICATE_POLICIES
+OID_CRL_DISTRIBUTION_POINTS = ExtensionOID.CRL_DISTRIBUTION_POINTS
+OID_EXTENDED_KEY_USAGE = ExtensionOID.EXTENDED_KEY_USAGE
+OID_FRESHEST_CRL = ExtensionOID.FRESHEST_CRL
+OID_INHIBIT_ANY_POLICY = ExtensionOID.INHIBIT_ANY_POLICY
+OID_ISSUER_ALTERNATIVE_NAME = ExtensionOID.ISSUER_ALTERNATIVE_NAME
+OID_KEY_USAGE = ExtensionOID.KEY_USAGE
+OID_NAME_CONSTRAINTS = ExtensionOID.NAME_CONSTRAINTS
+OID_OCSP_NO_CHECK = ExtensionOID.OCSP_NO_CHECK
+OID_POLICY_CONSTRAINTS = ExtensionOID.POLICY_CONSTRAINTS
+OID_POLICY_MAPPINGS = ExtensionOID.POLICY_MAPPINGS
+OID_SUBJECT_ALTERNATIVE_NAME = ExtensionOID.SUBJECT_ALTERNATIVE_NAME
+OID_SUBJECT_DIRECTORY_ATTRIBUTES = ExtensionOID.SUBJECT_DIRECTORY_ATTRIBUTES
+OID_SUBJECT_INFORMATION_ACCESS = ExtensionOID.SUBJECT_INFORMATION_ACCESS
+OID_SUBJECT_KEY_IDENTIFIER = ExtensionOID.SUBJECT_KEY_IDENTIFIER
+
+OID_DSA_WITH_SHA1 = SignatureAlgorithmOID.DSA_WITH_SHA1
+OID_DSA_WITH_SHA224 = SignatureAlgorithmOID.DSA_WITH_SHA224
+OID_DSA_WITH_SHA256 = SignatureAlgorithmOID.DSA_WITH_SHA256
+OID_ECDSA_WITH_SHA1 = SignatureAlgorithmOID.ECDSA_WITH_SHA1
+OID_ECDSA_WITH_SHA224 = SignatureAlgorithmOID.ECDSA_WITH_SHA224
+OID_ECDSA_WITH_SHA256 = SignatureAlgorithmOID.ECDSA_WITH_SHA256
+OID_ECDSA_WITH_SHA384 = SignatureAlgorithmOID.ECDSA_WITH_SHA384
+OID_ECDSA_WITH_SHA512 = SignatureAlgorithmOID.ECDSA_WITH_SHA512
+OID_RSA_WITH_MD5 = SignatureAlgorithmOID.RSA_WITH_MD5
+OID_RSA_WITH_SHA1 = SignatureAlgorithmOID.RSA_WITH_SHA1
+OID_RSA_WITH_SHA224 = SignatureAlgorithmOID.RSA_WITH_SHA224
+OID_RSA_WITH_SHA256 = SignatureAlgorithmOID.RSA_WITH_SHA256
+OID_RSA_WITH_SHA384 = SignatureAlgorithmOID.RSA_WITH_SHA384
+OID_RSA_WITH_SHA512 = SignatureAlgorithmOID.RSA_WITH_SHA512
+OID_RSASSA_PSS = SignatureAlgorithmOID.RSASSA_PSS
+
+OID_COMMON_NAME = NameOID.COMMON_NAME
+OID_COUNTRY_NAME = NameOID.COUNTRY_NAME
+OID_DOMAIN_COMPONENT = NameOID.DOMAIN_COMPONENT
+OID_DN_QUALIFIER = NameOID.DN_QUALIFIER
+OID_EMAIL_ADDRESS = NameOID.EMAIL_ADDRESS
+OID_GENERATION_QUALIFIER = NameOID.GENERATION_QUALIFIER
+OID_GIVEN_NAME = NameOID.GIVEN_NAME
+OID_LOCALITY_NAME = NameOID.LOCALITY_NAME
+OID_ORGANIZATIONAL_UNIT_NAME = NameOID.ORGANIZATIONAL_UNIT_NAME
+OID_ORGANIZATION_NAME = NameOID.ORGANIZATION_NAME
+OID_PSEUDONYM = NameOID.PSEUDONYM
+OID_SERIAL_NUMBER = NameOID.SERIAL_NUMBER
+OID_STATE_OR_PROVINCE_NAME = NameOID.STATE_OR_PROVINCE_NAME
+OID_SURNAME = NameOID.SURNAME
+OID_TITLE = NameOID.TITLE
+
+OID_CLIENT_AUTH = ExtendedKeyUsageOID.CLIENT_AUTH
+OID_CODE_SIGNING = ExtendedKeyUsageOID.CODE_SIGNING
+OID_EMAIL_PROTECTION = ExtendedKeyUsageOID.EMAIL_PROTECTION
+OID_OCSP_SIGNING = ExtendedKeyUsageOID.OCSP_SIGNING
+OID_SERVER_AUTH = ExtendedKeyUsageOID.SERVER_AUTH
+OID_TIME_STAMPING = ExtendedKeyUsageOID.TIME_STAMPING
+
+OID_ANY_POLICY = CertificatePoliciesOID.ANY_POLICY
+OID_CPS_QUALIFIER = CertificatePoliciesOID.CPS_QUALIFIER
+OID_CPS_USER_NOTICE = CertificatePoliciesOID.CPS_USER_NOTICE
+
+OID_CERTIFICATE_ISSUER = CRLEntryExtensionOID.CERTIFICATE_ISSUER
+OID_CRL_REASON = CRLEntryExtensionOID.CRL_REASON
+OID_INVALIDITY_DATE = CRLEntryExtensionOID.INVALIDITY_DATE
+
+OID_CA_ISSUERS = AuthorityInformationAccessOID.CA_ISSUERS
+OID_OCSP = AuthorityInformationAccessOID.OCSP
+
+__all__ = [
+ "certificate_transparency",
+ "load_pem_x509_certificate",
+ "load_der_x509_certificate",
+ "load_pem_x509_csr",
+ "load_der_x509_csr",
+ "load_pem_x509_crl",
+ "load_der_x509_crl",
+ "random_serial_number",
+ "AttributeNotFound",
+ "InvalidVersion",
+ "DeltaCRLIndicator",
+ "DuplicateExtension",
+ "ExtensionNotFound",
+ "UnsupportedGeneralNameType",
+ "NameAttribute",
+ "Name",
+ "RelativeDistinguishedName",
+ "ObjectIdentifier",
+ "ExtensionType",
+ "Extensions",
+ "Extension",
+ "ExtendedKeyUsage",
+ "FreshestCRL",
+ "IssuingDistributionPoint",
+ "TLSFeature",
+ "TLSFeatureType",
+ "OCSPNoCheck",
+ "BasicConstraints",
+ "CRLNumber",
+ "KeyUsage",
+ "AuthorityInformationAccess",
+ "SubjectInformationAccess",
+ "AccessDescription",
+ "CertificatePolicies",
+ "PolicyInformation",
+ "UserNotice",
+ "NoticeReference",
+ "SubjectKeyIdentifier",
+ "NameConstraints",
+ "CRLDistributionPoints",
+ "DistributionPoint",
+ "ReasonFlags",
+ "InhibitAnyPolicy",
+ "SubjectAlternativeName",
+ "IssuerAlternativeName",
+ "AuthorityKeyIdentifier",
+ "GeneralNames",
+ "GeneralName",
+ "RFC822Name",
+ "DNSName",
+ "UniformResourceIdentifier",
+ "RegisteredID",
+ "DirectoryName",
+ "IPAddress",
+ "OtherName",
+ "Certificate",
+ "CertificateRevocationList",
+ "CertificateRevocationListBuilder",
+ "CertificateSigningRequest",
+ "RevokedCertificate",
+ "RevokedCertificateBuilder",
+ "CertificateSigningRequestBuilder",
+ "CertificateBuilder",
+ "Version",
+ "_SIG_OIDS_TO_HASH",
+ "OID_CA_ISSUERS",
+ "OID_OCSP",
+ "_GENERAL_NAMES",
+ "CertificateIssuer",
+ "CRLReason",
+ "InvalidityDate",
+ "UnrecognizedExtension",
+ "PolicyConstraints",
+ "PrecertificateSignedCertificateTimestamps",
+ "PrecertPoison",
+ "OCSPNonce",
+ "SignedCertificateTimestamps",
+]
diff --git a/contrib/python/cryptography/py2/cryptography/x509/base.py b/contrib/python/cryptography/py2/cryptography/x509/base.py
new file mode 100644
index 0000000000..f3bc872b94
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/x509/base.py
@@ -0,0 +1,892 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import abc
+import datetime
+import os
+from enum import Enum
+
+import six
+
+from cryptography import utils
+from cryptography.hazmat.backends import _get_backend
+from cryptography.hazmat.primitives.asymmetric import (
+ dsa,
+ ec,
+ ed25519,
+ ed448,
+ rsa,
+)
+from cryptography.x509.extensions import Extension, ExtensionType
+from cryptography.x509.name import Name
+from cryptography.x509.oid import ObjectIdentifier
+
+
+_EARLIEST_UTC_TIME = datetime.datetime(1950, 1, 1)
+
+
+class AttributeNotFound(Exception):
+ def __init__(self, msg, oid):
+ super(AttributeNotFound, self).__init__(msg)
+ self.oid = oid
+
+
+def _reject_duplicate_extension(extension, extensions):
+ # This is quadratic in the number of extensions
+ for e in extensions:
+ if e.oid == extension.oid:
+ raise ValueError("This extension has already been set.")
+
+
+def _reject_duplicate_attribute(oid, attributes):
+ # This is quadratic in the number of attributes
+ for attr_oid, _ in attributes:
+ if attr_oid == oid:
+ raise ValueError("This attribute has already been set.")
+
+
+def _convert_to_naive_utc_time(time):
+ """Normalizes a datetime to a naive datetime in UTC.
+
+ time -- datetime to normalize. Assumed to be in UTC if not timezone
+ aware.
+ """
+ if time.tzinfo is not None:
+ offset = time.utcoffset()
+ offset = offset if offset else datetime.timedelta()
+ return time.replace(tzinfo=None) - offset
+ else:
+ return time
+
+
+class Version(Enum):
+ v1 = 0
+ v3 = 2
+
+
+def load_pem_x509_certificate(data, backend=None):
+ backend = _get_backend(backend)
+ return backend.load_pem_x509_certificate(data)
+
+
+def load_der_x509_certificate(data, backend=None):
+ backend = _get_backend(backend)
+ return backend.load_der_x509_certificate(data)
+
+
+def load_pem_x509_csr(data, backend=None):
+ backend = _get_backend(backend)
+ return backend.load_pem_x509_csr(data)
+
+
+def load_der_x509_csr(data, backend=None):
+ backend = _get_backend(backend)
+ return backend.load_der_x509_csr(data)
+
+
+def load_pem_x509_crl(data, backend=None):
+ backend = _get_backend(backend)
+ return backend.load_pem_x509_crl(data)
+
+
+def load_der_x509_crl(data, backend=None):
+ backend = _get_backend(backend)
+ return backend.load_der_x509_crl(data)
+
+
+class InvalidVersion(Exception):
+ def __init__(self, msg, parsed_version):
+ super(InvalidVersion, self).__init__(msg)
+ self.parsed_version = parsed_version
+
+
+@six.add_metaclass(abc.ABCMeta)
+class Certificate(object):
+ @abc.abstractmethod
+ def fingerprint(self, algorithm):
+ """
+ Returns bytes using digest passed.
+ """
+
+ @abc.abstractproperty
+ def serial_number(self):
+ """
+ Returns certificate serial number
+ """
+
+ @abc.abstractproperty
+ def version(self):
+ """
+ Returns the certificate version
+ """
+
+ @abc.abstractmethod
+ def public_key(self):
+ """
+ Returns the public key
+ """
+
+ @abc.abstractproperty
+ def not_valid_before(self):
+ """
+ Not before time (represented as UTC datetime)
+ """
+
+ @abc.abstractproperty
+ def not_valid_after(self):
+ """
+ Not after time (represented as UTC datetime)
+ """
+
+ @abc.abstractproperty
+ def issuer(self):
+ """
+ Returns the issuer name object.
+ """
+
+ @abc.abstractproperty
+ def subject(self):
+ """
+ Returns the subject name object.
+ """
+
+ @abc.abstractproperty
+ def signature_hash_algorithm(self):
+ """
+ Returns a HashAlgorithm corresponding to the type of the digest signed
+ in the certificate.
+ """
+
+ @abc.abstractproperty
+ def signature_algorithm_oid(self):
+ """
+ Returns the ObjectIdentifier of the signature algorithm.
+ """
+
+ @abc.abstractproperty
+ def extensions(self):
+ """
+ Returns an Extensions object.
+ """
+
+ @abc.abstractproperty
+ def signature(self):
+ """
+ Returns the signature bytes.
+ """
+
+ @abc.abstractproperty
+ def tbs_certificate_bytes(self):
+ """
+ Returns the tbsCertificate payload bytes as defined in RFC 5280.
+ """
+
+ @abc.abstractmethod
+ def __eq__(self, other):
+ """
+ Checks equality.
+ """
+
+ @abc.abstractmethod
+ def __ne__(self, other):
+ """
+ Checks not equal.
+ """
+
+ @abc.abstractmethod
+ def __hash__(self):
+ """
+ Computes a hash.
+ """
+
+ @abc.abstractmethod
+ def public_bytes(self, encoding):
+ """
+ Serializes the certificate to PEM or DER format.
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class CertificateRevocationList(object):
+ @abc.abstractmethod
+ def public_bytes(self, encoding):
+ """
+ Serializes the CRL to PEM or DER format.
+ """
+
+ @abc.abstractmethod
+ def fingerprint(self, algorithm):
+ """
+ Returns bytes using digest passed.
+ """
+
+ @abc.abstractmethod
+ def get_revoked_certificate_by_serial_number(self, serial_number):
+ """
+ Returns an instance of RevokedCertificate or None if the serial_number
+ is not in the CRL.
+ """
+
+ @abc.abstractproperty
+ def signature_hash_algorithm(self):
+ """
+ Returns a HashAlgorithm corresponding to the type of the digest signed
+ in the certificate.
+ """
+
+ @abc.abstractproperty
+ def signature_algorithm_oid(self):
+ """
+ Returns the ObjectIdentifier of the signature algorithm.
+ """
+
+ @abc.abstractproperty
+ def issuer(self):
+ """
+ Returns the X509Name with the issuer of this CRL.
+ """
+
+ @abc.abstractproperty
+ def next_update(self):
+ """
+ Returns the date of next update for this CRL.
+ """
+
+ @abc.abstractproperty
+ def last_update(self):
+ """
+ Returns the date of last update for this CRL.
+ """
+
+ @abc.abstractproperty
+ def extensions(self):
+ """
+ Returns an Extensions object containing a list of CRL extensions.
+ """
+
+ @abc.abstractproperty
+ def signature(self):
+ """
+ Returns the signature bytes.
+ """
+
+ @abc.abstractproperty
+ def tbs_certlist_bytes(self):
+ """
+ Returns the tbsCertList payload bytes as defined in RFC 5280.
+ """
+
+ @abc.abstractmethod
+ def __eq__(self, other):
+ """
+ Checks equality.
+ """
+
+ @abc.abstractmethod
+ def __ne__(self, other):
+ """
+ Checks not equal.
+ """
+
+ @abc.abstractmethod
+ def __len__(self):
+ """
+ Number of revoked certificates in the CRL.
+ """
+
+ @abc.abstractmethod
+ def __getitem__(self, idx):
+ """
+ Returns a revoked certificate (or slice of revoked certificates).
+ """
+
+ @abc.abstractmethod
+ def __iter__(self):
+ """
+ Iterator over the revoked certificates
+ """
+
+ @abc.abstractmethod
+ def is_signature_valid(self, public_key):
+ """
+ Verifies signature of revocation list against given public key.
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class CertificateSigningRequest(object):
+ @abc.abstractmethod
+ def __eq__(self, other):
+ """
+ Checks equality.
+ """
+
+ @abc.abstractmethod
+ def __ne__(self, other):
+ """
+ Checks not equal.
+ """
+
+ @abc.abstractmethod
+ def __hash__(self):
+ """
+ Computes a hash.
+ """
+
+ @abc.abstractmethod
+ def public_key(self):
+ """
+ Returns the public key
+ """
+
+ @abc.abstractproperty
+ def subject(self):
+ """
+ Returns the subject name object.
+ """
+
+ @abc.abstractproperty
+ def signature_hash_algorithm(self):
+ """
+ Returns a HashAlgorithm corresponding to the type of the digest signed
+ in the certificate.
+ """
+
+ @abc.abstractproperty
+ def signature_algorithm_oid(self):
+ """
+ Returns the ObjectIdentifier of the signature algorithm.
+ """
+
+ @abc.abstractproperty
+ def extensions(self):
+ """
+ Returns the extensions in the signing request.
+ """
+
+ @abc.abstractmethod
+ def public_bytes(self, encoding):
+ """
+ Encodes the request to PEM or DER format.
+ """
+
+ @abc.abstractproperty
+ def signature(self):
+ """
+ Returns the signature bytes.
+ """
+
+ @abc.abstractproperty
+ def tbs_certrequest_bytes(self):
+ """
+ Returns the PKCS#10 CertificationRequestInfo bytes as defined in RFC
+ 2986.
+ """
+
+ @abc.abstractproperty
+ def is_signature_valid(self):
+ """
+ Verifies signature of signing request.
+ """
+
+ @abc.abstractproperty
+ def get_attribute_for_oid(self):
+ """
+ Get the attribute value for a given OID.
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class RevokedCertificate(object):
+ @abc.abstractproperty
+ def serial_number(self):
+ """
+ Returns the serial number of the revoked certificate.
+ """
+
+ @abc.abstractproperty
+ def revocation_date(self):
+ """
+ Returns the date of when this certificate was revoked.
+ """
+
+ @abc.abstractproperty
+ def extensions(self):
+ """
+ Returns an Extensions object containing a list of Revoked extensions.
+ """
+
+
+class CertificateSigningRequestBuilder(object):
+ def __init__(self, subject_name=None, extensions=[], attributes=[]):
+ """
+ Creates an empty X.509 certificate request (v1).
+ """
+ self._subject_name = subject_name
+ self._extensions = extensions
+ self._attributes = attributes
+
+ def subject_name(self, name):
+ """
+ Sets the certificate requestor's distinguished name.
+ """
+ if not isinstance(name, Name):
+ raise TypeError("Expecting x509.Name object.")
+ if self._subject_name is not None:
+ raise ValueError("The subject name may only be set once.")
+ return CertificateSigningRequestBuilder(
+ name, self._extensions, self._attributes
+ )
+
+ def add_extension(self, extension, critical):
+ """
+ Adds an X.509 extension to the certificate request.
+ """
+ if not isinstance(extension, ExtensionType):
+ raise TypeError("extension must be an ExtensionType")
+
+ extension = Extension(extension.oid, critical, extension)
+ _reject_duplicate_extension(extension, self._extensions)
+
+ return CertificateSigningRequestBuilder(
+ self._subject_name,
+ self._extensions + [extension],
+ self._attributes,
+ )
+
+ def add_attribute(self, oid, value):
+ """
+ Adds an X.509 attribute with an OID and associated value.
+ """
+ if not isinstance(oid, ObjectIdentifier):
+ raise TypeError("oid must be an ObjectIdentifier")
+
+ if not isinstance(value, bytes):
+ raise TypeError("value must be bytes")
+
+ _reject_duplicate_attribute(oid, self._attributes)
+
+ return CertificateSigningRequestBuilder(
+ self._subject_name,
+ self._extensions,
+ self._attributes + [(oid, value)],
+ )
+
+ def sign(self, private_key, algorithm, backend=None):
+ """
+ Signs the request using the requestor's private key.
+ """
+ backend = _get_backend(backend)
+ if self._subject_name is None:
+ raise ValueError("A CertificateSigningRequest must have a subject")
+ return backend.create_x509_csr(self, private_key, algorithm)
+
+
+class CertificateBuilder(object):
+ def __init__(
+ self,
+ issuer_name=None,
+ subject_name=None,
+ public_key=None,
+ serial_number=None,
+ not_valid_before=None,
+ not_valid_after=None,
+ extensions=[],
+ ):
+ self._version = Version.v3
+ self._issuer_name = issuer_name
+ self._subject_name = subject_name
+ self._public_key = public_key
+ self._serial_number = serial_number
+ self._not_valid_before = not_valid_before
+ self._not_valid_after = not_valid_after
+ self._extensions = extensions
+
+ def issuer_name(self, name):
+ """
+ Sets the CA's distinguished name.
+ """
+ if not isinstance(name, Name):
+ raise TypeError("Expecting x509.Name object.")
+ if self._issuer_name is not None:
+ raise ValueError("The issuer name may only be set once.")
+ return CertificateBuilder(
+ name,
+ self._subject_name,
+ self._public_key,
+ self._serial_number,
+ self._not_valid_before,
+ self._not_valid_after,
+ self._extensions,
+ )
+
+ def subject_name(self, name):
+ """
+ Sets the requestor's distinguished name.
+ """
+ if not isinstance(name, Name):
+ raise TypeError("Expecting x509.Name object.")
+ if self._subject_name is not None:
+ raise ValueError("The subject name may only be set once.")
+ return CertificateBuilder(
+ self._issuer_name,
+ name,
+ self._public_key,
+ self._serial_number,
+ self._not_valid_before,
+ self._not_valid_after,
+ self._extensions,
+ )
+
+ def public_key(self, key):
+ """
+ Sets the requestor's public key (as found in the signing request).
+ """
+ if not isinstance(
+ key,
+ (
+ dsa.DSAPublicKey,
+ rsa.RSAPublicKey,
+ ec.EllipticCurvePublicKey,
+ ed25519.Ed25519PublicKey,
+ ed448.Ed448PublicKey,
+ ),
+ ):
+ raise TypeError(
+ "Expecting one of DSAPublicKey, RSAPublicKey,"
+ " EllipticCurvePublicKey, Ed25519PublicKey or"
+ " Ed448PublicKey."
+ )
+ if self._public_key is not None:
+ raise ValueError("The public key may only be set once.")
+ return CertificateBuilder(
+ self._issuer_name,
+ self._subject_name,
+ key,
+ self._serial_number,
+ self._not_valid_before,
+ self._not_valid_after,
+ self._extensions,
+ )
+
+ def serial_number(self, number):
+ """
+ Sets the certificate serial number.
+ """
+ if not isinstance(number, six.integer_types):
+ raise TypeError("Serial number must be of integral type.")
+ if self._serial_number is not None:
+ raise ValueError("The serial number may only be set once.")
+ if number <= 0:
+ raise ValueError("The serial number should be positive.")
+
+ # ASN.1 integers are always signed, so most significant bit must be
+ # zero.
+ if number.bit_length() >= 160: # As defined in RFC 5280
+ raise ValueError(
+ "The serial number should not be more than 159 " "bits."
+ )
+ return CertificateBuilder(
+ self._issuer_name,
+ self._subject_name,
+ self._public_key,
+ number,
+ self._not_valid_before,
+ self._not_valid_after,
+ self._extensions,
+ )
+
+ def not_valid_before(self, time):
+ """
+ Sets the certificate activation time.
+ """
+ if not isinstance(time, datetime.datetime):
+ raise TypeError("Expecting datetime object.")
+ if self._not_valid_before is not None:
+ raise ValueError("The not valid before may only be set once.")
+ time = _convert_to_naive_utc_time(time)
+ if time < _EARLIEST_UTC_TIME:
+ raise ValueError(
+ "The not valid before date must be on or after"
+ " 1950 January 1)."
+ )
+ if self._not_valid_after is not None and time > self._not_valid_after:
+ raise ValueError(
+ "The not valid before date must be before the not valid after "
+ "date."
+ )
+ return CertificateBuilder(
+ self._issuer_name,
+ self._subject_name,
+ self._public_key,
+ self._serial_number,
+ time,
+ self._not_valid_after,
+ self._extensions,
+ )
+
+ def not_valid_after(self, time):
+ """
+ Sets the certificate expiration time.
+ """
+ if not isinstance(time, datetime.datetime):
+ raise TypeError("Expecting datetime object.")
+ if self._not_valid_after is not None:
+ raise ValueError("The not valid after may only be set once.")
+ time = _convert_to_naive_utc_time(time)
+ if time < _EARLIEST_UTC_TIME:
+ raise ValueError(
+ "The not valid after date must be on or after"
+ " 1950 January 1."
+ )
+ if (
+ self._not_valid_before is not None
+ and time < self._not_valid_before
+ ):
+ raise ValueError(
+ "The not valid after date must be after the not valid before "
+ "date."
+ )
+ return CertificateBuilder(
+ self._issuer_name,
+ self._subject_name,
+ self._public_key,
+ self._serial_number,
+ self._not_valid_before,
+ time,
+ self._extensions,
+ )
+
+ def add_extension(self, extension, critical):
+ """
+ Adds an X.509 extension to the certificate.
+ """
+ if not isinstance(extension, ExtensionType):
+ raise TypeError("extension must be an ExtensionType")
+
+ extension = Extension(extension.oid, critical, extension)
+ _reject_duplicate_extension(extension, self._extensions)
+
+ return CertificateBuilder(
+ self._issuer_name,
+ self._subject_name,
+ self._public_key,
+ self._serial_number,
+ self._not_valid_before,
+ self._not_valid_after,
+ self._extensions + [extension],
+ )
+
+ def sign(self, private_key, algorithm, backend=None):
+ """
+ Signs the certificate using the CA's private key.
+ """
+ backend = _get_backend(backend)
+ if self._subject_name is None:
+ raise ValueError("A certificate must have a subject name")
+
+ if self._issuer_name is None:
+ raise ValueError("A certificate must have an issuer name")
+
+ if self._serial_number is None:
+ raise ValueError("A certificate must have a serial number")
+
+ if self._not_valid_before is None:
+ raise ValueError("A certificate must have a not valid before time")
+
+ if self._not_valid_after is None:
+ raise ValueError("A certificate must have a not valid after time")
+
+ if self._public_key is None:
+ raise ValueError("A certificate must have a public key")
+
+ return backend.create_x509_certificate(self, private_key, algorithm)
+
+
+class CertificateRevocationListBuilder(object):
+ def __init__(
+ self,
+ issuer_name=None,
+ last_update=None,
+ next_update=None,
+ extensions=[],
+ revoked_certificates=[],
+ ):
+ self._issuer_name = issuer_name
+ self._last_update = last_update
+ self._next_update = next_update
+ self._extensions = extensions
+ self._revoked_certificates = revoked_certificates
+
+ def issuer_name(self, issuer_name):
+ if not isinstance(issuer_name, Name):
+ raise TypeError("Expecting x509.Name object.")
+ if self._issuer_name is not None:
+ raise ValueError("The issuer name may only be set once.")
+ return CertificateRevocationListBuilder(
+ issuer_name,
+ self._last_update,
+ self._next_update,
+ self._extensions,
+ self._revoked_certificates,
+ )
+
+ def last_update(self, last_update):
+ if not isinstance(last_update, datetime.datetime):
+ raise TypeError("Expecting datetime object.")
+ if self._last_update is not None:
+ raise ValueError("Last update may only be set once.")
+ last_update = _convert_to_naive_utc_time(last_update)
+ if last_update < _EARLIEST_UTC_TIME:
+ raise ValueError(
+ "The last update date must be on or after" " 1950 January 1."
+ )
+ if self._next_update is not None and last_update > self._next_update:
+ raise ValueError(
+ "The last update date must be before the next update date."
+ )
+ return CertificateRevocationListBuilder(
+ self._issuer_name,
+ last_update,
+ self._next_update,
+ self._extensions,
+ self._revoked_certificates,
+ )
+
+ def next_update(self, next_update):
+ if not isinstance(next_update, datetime.datetime):
+ raise TypeError("Expecting datetime object.")
+ if self._next_update is not None:
+ raise ValueError("Last update may only be set once.")
+ next_update = _convert_to_naive_utc_time(next_update)
+ if next_update < _EARLIEST_UTC_TIME:
+ raise ValueError(
+ "The last update date must be on or after" " 1950 January 1."
+ )
+ if self._last_update is not None and next_update < self._last_update:
+ raise ValueError(
+ "The next update date must be after the last update date."
+ )
+ return CertificateRevocationListBuilder(
+ self._issuer_name,
+ self._last_update,
+ next_update,
+ self._extensions,
+ self._revoked_certificates,
+ )
+
+ def add_extension(self, extension, critical):
+ """
+ Adds an X.509 extension to the certificate revocation list.
+ """
+ if not isinstance(extension, ExtensionType):
+ raise TypeError("extension must be an ExtensionType")
+
+ extension = Extension(extension.oid, critical, extension)
+ _reject_duplicate_extension(extension, self._extensions)
+ return CertificateRevocationListBuilder(
+ self._issuer_name,
+ self._last_update,
+ self._next_update,
+ self._extensions + [extension],
+ self._revoked_certificates,
+ )
+
+ def add_revoked_certificate(self, revoked_certificate):
+ """
+ Adds a revoked certificate to the CRL.
+ """
+ if not isinstance(revoked_certificate, RevokedCertificate):
+ raise TypeError("Must be an instance of RevokedCertificate")
+
+ return CertificateRevocationListBuilder(
+ self._issuer_name,
+ self._last_update,
+ self._next_update,
+ self._extensions,
+ self._revoked_certificates + [revoked_certificate],
+ )
+
+ def sign(self, private_key, algorithm, backend=None):
+ backend = _get_backend(backend)
+ if self._issuer_name is None:
+ raise ValueError("A CRL must have an issuer name")
+
+ if self._last_update is None:
+ raise ValueError("A CRL must have a last update time")
+
+ if self._next_update is None:
+ raise ValueError("A CRL must have a next update time")
+
+ return backend.create_x509_crl(self, private_key, algorithm)
+
+
+class RevokedCertificateBuilder(object):
+ def __init__(
+ self, serial_number=None, revocation_date=None, extensions=[]
+ ):
+ self._serial_number = serial_number
+ self._revocation_date = revocation_date
+ self._extensions = extensions
+
+ def serial_number(self, number):
+ if not isinstance(number, six.integer_types):
+ raise TypeError("Serial number must be of integral type.")
+ if self._serial_number is not None:
+ raise ValueError("The serial number may only be set once.")
+ if number <= 0:
+ raise ValueError("The serial number should be positive")
+
+ # ASN.1 integers are always signed, so most significant bit must be
+ # zero.
+ if number.bit_length() >= 160: # As defined in RFC 5280
+ raise ValueError(
+ "The serial number should not be more than 159 " "bits."
+ )
+ return RevokedCertificateBuilder(
+ number, self._revocation_date, self._extensions
+ )
+
+ def revocation_date(self, time):
+ if not isinstance(time, datetime.datetime):
+ raise TypeError("Expecting datetime object.")
+ if self._revocation_date is not None:
+ raise ValueError("The revocation date may only be set once.")
+ time = _convert_to_naive_utc_time(time)
+ if time < _EARLIEST_UTC_TIME:
+ raise ValueError(
+ "The revocation date must be on or after" " 1950 January 1."
+ )
+ return RevokedCertificateBuilder(
+ self._serial_number, time, self._extensions
+ )
+
+ def add_extension(self, extension, critical):
+ if not isinstance(extension, ExtensionType):
+ raise TypeError("extension must be an ExtensionType")
+
+ extension = Extension(extension.oid, critical, extension)
+ _reject_duplicate_extension(extension, self._extensions)
+ return RevokedCertificateBuilder(
+ self._serial_number,
+ self._revocation_date,
+ self._extensions + [extension],
+ )
+
+ def build(self, backend=None):
+ backend = _get_backend(backend)
+ if self._serial_number is None:
+ raise ValueError("A revoked certificate must have a serial number")
+ if self._revocation_date is None:
+ raise ValueError(
+ "A revoked certificate must have a revocation date"
+ )
+
+ return backend.create_x509_revoked_certificate(self)
+
+
+def random_serial_number():
+ return utils.int_from_bytes(os.urandom(20), "big") >> 1
diff --git a/contrib/python/cryptography/py2/cryptography/x509/certificate_transparency.py b/contrib/python/cryptography/py2/cryptography/x509/certificate_transparency.py
new file mode 100644
index 0000000000..d00fe81269
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/x509/certificate_transparency.py
@@ -0,0 +1,46 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import abc
+from enum import Enum
+
+import six
+
+
+class LogEntryType(Enum):
+ X509_CERTIFICATE = 0
+ PRE_CERTIFICATE = 1
+
+
+class Version(Enum):
+ v1 = 0
+
+
+@six.add_metaclass(abc.ABCMeta)
+class SignedCertificateTimestamp(object):
+ @abc.abstractproperty
+ def version(self):
+ """
+ Returns the SCT version.
+ """
+
+ @abc.abstractproperty
+ def log_id(self):
+ """
+ Returns an identifier indicating which log this SCT is for.
+ """
+
+ @abc.abstractproperty
+ def timestamp(self):
+ """
+ Returns the timestamp for this SCT.
+ """
+
+ @abc.abstractproperty
+ def entry_type(self):
+ """
+ Returns whether this is an SCT for a certificate or pre-certificate.
+ """
diff --git a/contrib/python/cryptography/py2/cryptography/x509/extensions.py b/contrib/python/cryptography/py2/cryptography/x509/extensions.py
new file mode 100644
index 0000000000..130ba69b87
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/x509/extensions.py
@@ -0,0 +1,1702 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import abc
+import datetime
+import hashlib
+import ipaddress
+from enum import Enum
+
+import six
+
+from cryptography import utils
+from cryptography.hazmat._der import (
+ BIT_STRING,
+ DERReader,
+ OBJECT_IDENTIFIER,
+ SEQUENCE,
+)
+from cryptography.hazmat.primitives import constant_time, serialization
+from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePublicKey
+from cryptography.hazmat.primitives.asymmetric.rsa import RSAPublicKey
+from cryptography.x509.certificate_transparency import (
+ SignedCertificateTimestamp,
+)
+from cryptography.x509.general_name import GeneralName, IPAddress, OtherName
+from cryptography.x509.name import RelativeDistinguishedName
+from cryptography.x509.oid import (
+ CRLEntryExtensionOID,
+ ExtensionOID,
+ OCSPExtensionOID,
+ ObjectIdentifier,
+)
+
+
+def _key_identifier_from_public_key(public_key):
+ if isinstance(public_key, RSAPublicKey):
+ data = public_key.public_bytes(
+ serialization.Encoding.DER,
+ serialization.PublicFormat.PKCS1,
+ )
+ elif isinstance(public_key, EllipticCurvePublicKey):
+ data = public_key.public_bytes(
+ serialization.Encoding.X962,
+ serialization.PublicFormat.UncompressedPoint,
+ )
+ else:
+ # This is a very slow way to do this.
+ serialized = public_key.public_bytes(
+ serialization.Encoding.DER,
+ serialization.PublicFormat.SubjectPublicKeyInfo,
+ )
+
+ reader = DERReader(serialized)
+ with reader.read_single_element(SEQUENCE) as public_key_info:
+ algorithm = public_key_info.read_element(SEQUENCE)
+ public_key = public_key_info.read_element(BIT_STRING)
+
+ # Double-check the algorithm structure.
+ with algorithm:
+ algorithm.read_element(OBJECT_IDENTIFIER)
+ if not algorithm.is_empty():
+ # Skip the optional parameters field.
+ algorithm.read_any_element()
+
+ # BIT STRING contents begin with the number of padding bytes added. It
+ # must be zero for SubjectPublicKeyInfo structures.
+ if public_key.read_byte() != 0:
+ raise ValueError("Invalid public key encoding")
+
+ data = public_key.data
+
+ return hashlib.sha1(data).digest()
+
+
+def _make_sequence_methods(field_name):
+ def len_method(self):
+ return len(getattr(self, field_name))
+
+ def iter_method(self):
+ return iter(getattr(self, field_name))
+
+ def getitem_method(self, idx):
+ return getattr(self, field_name)[idx]
+
+ return len_method, iter_method, getitem_method
+
+
+class DuplicateExtension(Exception):
+ def __init__(self, msg, oid):
+ super(DuplicateExtension, self).__init__(msg)
+ self.oid = oid
+
+
+class ExtensionNotFound(Exception):
+ def __init__(self, msg, oid):
+ super(ExtensionNotFound, self).__init__(msg)
+ self.oid = oid
+
+
+@six.add_metaclass(abc.ABCMeta)
+class ExtensionType(object):
+ @abc.abstractproperty
+ def oid(self):
+ """
+ Returns the oid associated with the given extension type.
+ """
+
+
+class Extensions(object):
+ def __init__(self, extensions):
+ self._extensions = extensions
+
+ def get_extension_for_oid(self, oid):
+ for ext in self:
+ if ext.oid == oid:
+ return ext
+
+ raise ExtensionNotFound("No {} extension was found".format(oid), oid)
+
+ def get_extension_for_class(self, extclass):
+ if extclass is UnrecognizedExtension:
+ raise TypeError(
+ "UnrecognizedExtension can't be used with "
+ "get_extension_for_class because more than one instance of the"
+ " class may be present."
+ )
+
+ for ext in self:
+ if isinstance(ext.value, extclass):
+ return ext
+
+ raise ExtensionNotFound(
+ "No {} extension was found".format(extclass), extclass.oid
+ )
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_extensions")
+
+ def __repr__(self):
+ return "<Extensions({})>".format(self._extensions)
+
+
+@utils.register_interface(ExtensionType)
+class CRLNumber(object):
+ oid = ExtensionOID.CRL_NUMBER
+
+ def __init__(self, crl_number):
+ if not isinstance(crl_number, six.integer_types):
+ raise TypeError("crl_number must be an integer")
+
+ self._crl_number = crl_number
+
+ def __eq__(self, other):
+ if not isinstance(other, CRLNumber):
+ return NotImplemented
+
+ return self.crl_number == other.crl_number
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash(self.crl_number)
+
+ def __repr__(self):
+ return "<CRLNumber({})>".format(self.crl_number)
+
+ crl_number = utils.read_only_property("_crl_number")
+
+
+@utils.register_interface(ExtensionType)
+class AuthorityKeyIdentifier(object):
+ oid = ExtensionOID.AUTHORITY_KEY_IDENTIFIER
+
+ def __init__(
+ self,
+ key_identifier,
+ authority_cert_issuer,
+ authority_cert_serial_number,
+ ):
+ if (authority_cert_issuer is None) != (
+ authority_cert_serial_number is None
+ ):
+ raise ValueError(
+ "authority_cert_issuer and authority_cert_serial_number "
+ "must both be present or both None"
+ )
+
+ if authority_cert_issuer is not None:
+ authority_cert_issuer = list(authority_cert_issuer)
+ if not all(
+ isinstance(x, GeneralName) for x in authority_cert_issuer
+ ):
+ raise TypeError(
+ "authority_cert_issuer must be a list of GeneralName "
+ "objects"
+ )
+
+ if authority_cert_serial_number is not None and not isinstance(
+ authority_cert_serial_number, six.integer_types
+ ):
+ raise TypeError("authority_cert_serial_number must be an integer")
+
+ self._key_identifier = key_identifier
+ self._authority_cert_issuer = authority_cert_issuer
+ self._authority_cert_serial_number = authority_cert_serial_number
+
+ @classmethod
+ def from_issuer_public_key(cls, public_key):
+ digest = _key_identifier_from_public_key(public_key)
+ return cls(
+ key_identifier=digest,
+ authority_cert_issuer=None,
+ authority_cert_serial_number=None,
+ )
+
+ @classmethod
+ def from_issuer_subject_key_identifier(cls, ski):
+ return cls(
+ key_identifier=ski.digest,
+ authority_cert_issuer=None,
+ authority_cert_serial_number=None,
+ )
+
+ def __repr__(self):
+ return (
+ "<AuthorityKeyIdentifier(key_identifier={0.key_identifier!r}, "
+ "authority_cert_issuer={0.authority_cert_issuer}, "
+ "authority_cert_serial_number={0.authority_cert_serial_number}"
+ ")>".format(self)
+ )
+
+ def __eq__(self, other):
+ if not isinstance(other, AuthorityKeyIdentifier):
+ return NotImplemented
+
+ return (
+ self.key_identifier == other.key_identifier
+ and self.authority_cert_issuer == other.authority_cert_issuer
+ and self.authority_cert_serial_number
+ == other.authority_cert_serial_number
+ )
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ if self.authority_cert_issuer is None:
+ aci = None
+ else:
+ aci = tuple(self.authority_cert_issuer)
+ return hash(
+ (self.key_identifier, aci, self.authority_cert_serial_number)
+ )
+
+ key_identifier = utils.read_only_property("_key_identifier")
+ authority_cert_issuer = utils.read_only_property("_authority_cert_issuer")
+ authority_cert_serial_number = utils.read_only_property(
+ "_authority_cert_serial_number"
+ )
+
+
+@utils.register_interface(ExtensionType)
+class SubjectKeyIdentifier(object):
+ oid = ExtensionOID.SUBJECT_KEY_IDENTIFIER
+
+ def __init__(self, digest):
+ self._digest = digest
+
+ @classmethod
+ def from_public_key(cls, public_key):
+ return cls(_key_identifier_from_public_key(public_key))
+
+ digest = utils.read_only_property("_digest")
+
+ def __repr__(self):
+ return "<SubjectKeyIdentifier(digest={0!r})>".format(self.digest)
+
+ def __eq__(self, other):
+ if not isinstance(other, SubjectKeyIdentifier):
+ return NotImplemented
+
+ return constant_time.bytes_eq(self.digest, other.digest)
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash(self.digest)
+
+
+@utils.register_interface(ExtensionType)
+class AuthorityInformationAccess(object):
+ oid = ExtensionOID.AUTHORITY_INFORMATION_ACCESS
+
+ def __init__(self, descriptions):
+ descriptions = list(descriptions)
+ if not all(isinstance(x, AccessDescription) for x in descriptions):
+ raise TypeError(
+ "Every item in the descriptions list must be an "
+ "AccessDescription"
+ )
+
+ self._descriptions = descriptions
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_descriptions")
+
+ def __repr__(self):
+ return "<AuthorityInformationAccess({})>".format(self._descriptions)
+
+ def __eq__(self, other):
+ if not isinstance(other, AuthorityInformationAccess):
+ return NotImplemented
+
+ return self._descriptions == other._descriptions
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash(tuple(self._descriptions))
+
+
+@utils.register_interface(ExtensionType)
+class SubjectInformationAccess(object):
+ oid = ExtensionOID.SUBJECT_INFORMATION_ACCESS
+
+ def __init__(self, descriptions):
+ descriptions = list(descriptions)
+ if not all(isinstance(x, AccessDescription) for x in descriptions):
+ raise TypeError(
+ "Every item in the descriptions list must be an "
+ "AccessDescription"
+ )
+
+ self._descriptions = descriptions
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_descriptions")
+
+ def __repr__(self):
+ return "<SubjectInformationAccess({})>".format(self._descriptions)
+
+ def __eq__(self, other):
+ if not isinstance(other, SubjectInformationAccess):
+ return NotImplemented
+
+ return self._descriptions == other._descriptions
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash(tuple(self._descriptions))
+
+
+class AccessDescription(object):
+ def __init__(self, access_method, access_location):
+ if not isinstance(access_method, ObjectIdentifier):
+ raise TypeError("access_method must be an ObjectIdentifier")
+
+ if not isinstance(access_location, GeneralName):
+ raise TypeError("access_location must be a GeneralName")
+
+ self._access_method = access_method
+ self._access_location = access_location
+
+ def __repr__(self):
+ return (
+ "<AccessDescription(access_method={0.access_method}, access_locati"
+ "on={0.access_location})>".format(self)
+ )
+
+ def __eq__(self, other):
+ if not isinstance(other, AccessDescription):
+ return NotImplemented
+
+ return (
+ self.access_method == other.access_method
+ and self.access_location == other.access_location
+ )
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash((self.access_method, self.access_location))
+
+ access_method = utils.read_only_property("_access_method")
+ access_location = utils.read_only_property("_access_location")
+
+
+@utils.register_interface(ExtensionType)
+class BasicConstraints(object):
+ oid = ExtensionOID.BASIC_CONSTRAINTS
+
+ def __init__(self, ca, path_length):
+ if not isinstance(ca, bool):
+ raise TypeError("ca must be a boolean value")
+
+ if path_length is not None and not ca:
+ raise ValueError("path_length must be None when ca is False")
+
+ if path_length is not None and (
+ not isinstance(path_length, six.integer_types) or path_length < 0
+ ):
+ raise TypeError(
+ "path_length must be a non-negative integer or None"
+ )
+
+ self._ca = ca
+ self._path_length = path_length
+
+ ca = utils.read_only_property("_ca")
+ path_length = utils.read_only_property("_path_length")
+
+ def __repr__(self):
+ return (
+ "<BasicConstraints(ca={0.ca}, " "path_length={0.path_length})>"
+ ).format(self)
+
+ def __eq__(self, other):
+ if not isinstance(other, BasicConstraints):
+ return NotImplemented
+
+ return self.ca == other.ca and self.path_length == other.path_length
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash((self.ca, self.path_length))
+
+
+@utils.register_interface(ExtensionType)
+class DeltaCRLIndicator(object):
+ oid = ExtensionOID.DELTA_CRL_INDICATOR
+
+ def __init__(self, crl_number):
+ if not isinstance(crl_number, six.integer_types):
+ raise TypeError("crl_number must be an integer")
+
+ self._crl_number = crl_number
+
+ crl_number = utils.read_only_property("_crl_number")
+
+ def __eq__(self, other):
+ if not isinstance(other, DeltaCRLIndicator):
+ return NotImplemented
+
+ return self.crl_number == other.crl_number
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash(self.crl_number)
+
+ def __repr__(self):
+ return "<DeltaCRLIndicator(crl_number={0.crl_number})>".format(self)
+
+
+@utils.register_interface(ExtensionType)
+class CRLDistributionPoints(object):
+ oid = ExtensionOID.CRL_DISTRIBUTION_POINTS
+
+ def __init__(self, distribution_points):
+ distribution_points = list(distribution_points)
+ if not all(
+ isinstance(x, DistributionPoint) for x in distribution_points
+ ):
+ raise TypeError(
+ "distribution_points must be a list of DistributionPoint "
+ "objects"
+ )
+
+ self._distribution_points = distribution_points
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods(
+ "_distribution_points"
+ )
+
+ def __repr__(self):
+ return "<CRLDistributionPoints({})>".format(self._distribution_points)
+
+ def __eq__(self, other):
+ if not isinstance(other, CRLDistributionPoints):
+ return NotImplemented
+
+ return self._distribution_points == other._distribution_points
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash(tuple(self._distribution_points))
+
+
+@utils.register_interface(ExtensionType)
+class FreshestCRL(object):
+ oid = ExtensionOID.FRESHEST_CRL
+
+ def __init__(self, distribution_points):
+ distribution_points = list(distribution_points)
+ if not all(
+ isinstance(x, DistributionPoint) for x in distribution_points
+ ):
+ raise TypeError(
+ "distribution_points must be a list of DistributionPoint "
+ "objects"
+ )
+
+ self._distribution_points = distribution_points
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods(
+ "_distribution_points"
+ )
+
+ def __repr__(self):
+ return "<FreshestCRL({})>".format(self._distribution_points)
+
+ def __eq__(self, other):
+ if not isinstance(other, FreshestCRL):
+ return NotImplemented
+
+ return self._distribution_points == other._distribution_points
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash(tuple(self._distribution_points))
+
+
+class DistributionPoint(object):
+ def __init__(self, full_name, relative_name, reasons, crl_issuer):
+ if full_name and relative_name:
+ raise ValueError(
+ "You cannot provide both full_name and relative_name, at "
+ "least one must be None."
+ )
+
+ if full_name:
+ full_name = list(full_name)
+ if not all(isinstance(x, GeneralName) for x in full_name):
+ raise TypeError(
+ "full_name must be a list of GeneralName objects"
+ )
+
+ if relative_name:
+ if not isinstance(relative_name, RelativeDistinguishedName):
+ raise TypeError(
+ "relative_name must be a RelativeDistinguishedName"
+ )
+
+ if crl_issuer:
+ crl_issuer = list(crl_issuer)
+ if not all(isinstance(x, GeneralName) for x in crl_issuer):
+ raise TypeError(
+ "crl_issuer must be None or a list of general names"
+ )
+
+ if reasons and (
+ not isinstance(reasons, frozenset)
+ or not all(isinstance(x, ReasonFlags) for x in reasons)
+ ):
+ raise TypeError("reasons must be None or frozenset of ReasonFlags")
+
+ if reasons and (
+ ReasonFlags.unspecified in reasons
+ or ReasonFlags.remove_from_crl in reasons
+ ):
+ raise ValueError(
+ "unspecified and remove_from_crl are not valid reasons in a "
+ "DistributionPoint"
+ )
+
+ if reasons and not crl_issuer and not (full_name or relative_name):
+ raise ValueError(
+ "You must supply crl_issuer, full_name, or relative_name when "
+ "reasons is not None"
+ )
+
+ self._full_name = full_name
+ self._relative_name = relative_name
+ self._reasons = reasons
+ self._crl_issuer = crl_issuer
+
+ def __repr__(self):
+ return (
+ "<DistributionPoint(full_name={0.full_name}, relative_name={0.rela"
+ "tive_name}, reasons={0.reasons}, "
+ "crl_issuer={0.crl_issuer})>".format(self)
+ )
+
+ def __eq__(self, other):
+ if not isinstance(other, DistributionPoint):
+ return NotImplemented
+
+ return (
+ self.full_name == other.full_name
+ and self.relative_name == other.relative_name
+ and self.reasons == other.reasons
+ and self.crl_issuer == other.crl_issuer
+ )
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ if self.full_name is not None:
+ fn = tuple(self.full_name)
+ else:
+ fn = None
+
+ if self.crl_issuer is not None:
+ crl_issuer = tuple(self.crl_issuer)
+ else:
+ crl_issuer = None
+
+ return hash((fn, self.relative_name, self.reasons, crl_issuer))
+
+ full_name = utils.read_only_property("_full_name")
+ relative_name = utils.read_only_property("_relative_name")
+ reasons = utils.read_only_property("_reasons")
+ crl_issuer = utils.read_only_property("_crl_issuer")
+
+
+class ReasonFlags(Enum):
+ unspecified = "unspecified"
+ key_compromise = "keyCompromise"
+ ca_compromise = "cACompromise"
+ affiliation_changed = "affiliationChanged"
+ superseded = "superseded"
+ cessation_of_operation = "cessationOfOperation"
+ certificate_hold = "certificateHold"
+ privilege_withdrawn = "privilegeWithdrawn"
+ aa_compromise = "aACompromise"
+ remove_from_crl = "removeFromCRL"
+
+
+@utils.register_interface(ExtensionType)
+class PolicyConstraints(object):
+ oid = ExtensionOID.POLICY_CONSTRAINTS
+
+ def __init__(self, require_explicit_policy, inhibit_policy_mapping):
+ if require_explicit_policy is not None and not isinstance(
+ require_explicit_policy, six.integer_types
+ ):
+ raise TypeError(
+ "require_explicit_policy must be a non-negative integer or "
+ "None"
+ )
+
+ if inhibit_policy_mapping is not None and not isinstance(
+ inhibit_policy_mapping, six.integer_types
+ ):
+ raise TypeError(
+ "inhibit_policy_mapping must be a non-negative integer or None"
+ )
+
+ if inhibit_policy_mapping is None and require_explicit_policy is None:
+ raise ValueError(
+ "At least one of require_explicit_policy and "
+ "inhibit_policy_mapping must not be None"
+ )
+
+ self._require_explicit_policy = require_explicit_policy
+ self._inhibit_policy_mapping = inhibit_policy_mapping
+
+ def __repr__(self):
+ return (
+ u"<PolicyConstraints(require_explicit_policy={0.require_explicit"
+ u"_policy}, inhibit_policy_mapping={0.inhibit_policy_"
+ u"mapping})>".format(self)
+ )
+
+ def __eq__(self, other):
+ if not isinstance(other, PolicyConstraints):
+ return NotImplemented
+
+ return (
+ self.require_explicit_policy == other.require_explicit_policy
+ and self.inhibit_policy_mapping == other.inhibit_policy_mapping
+ )
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash(
+ (self.require_explicit_policy, self.inhibit_policy_mapping)
+ )
+
+ require_explicit_policy = utils.read_only_property(
+ "_require_explicit_policy"
+ )
+ inhibit_policy_mapping = utils.read_only_property(
+ "_inhibit_policy_mapping"
+ )
+
+
+@utils.register_interface(ExtensionType)
+class CertificatePolicies(object):
+ oid = ExtensionOID.CERTIFICATE_POLICIES
+
+ def __init__(self, policies):
+ policies = list(policies)
+ if not all(isinstance(x, PolicyInformation) for x in policies):
+ raise TypeError(
+ "Every item in the policies list must be a "
+ "PolicyInformation"
+ )
+
+ self._policies = policies
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_policies")
+
+ def __repr__(self):
+ return "<CertificatePolicies({})>".format(self._policies)
+
+ def __eq__(self, other):
+ if not isinstance(other, CertificatePolicies):
+ return NotImplemented
+
+ return self._policies == other._policies
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash(tuple(self._policies))
+
+
+class PolicyInformation(object):
+ def __init__(self, policy_identifier, policy_qualifiers):
+ if not isinstance(policy_identifier, ObjectIdentifier):
+ raise TypeError("policy_identifier must be an ObjectIdentifier")
+
+ self._policy_identifier = policy_identifier
+
+ if policy_qualifiers:
+ policy_qualifiers = list(policy_qualifiers)
+ if not all(
+ isinstance(x, (six.text_type, UserNotice))
+ for x in policy_qualifiers
+ ):
+ raise TypeError(
+ "policy_qualifiers must be a list of strings and/or "
+ "UserNotice objects or None"
+ )
+
+ self._policy_qualifiers = policy_qualifiers
+
+ def __repr__(self):
+ return (
+ "<PolicyInformation(policy_identifier={0.policy_identifier}, polic"
+ "y_qualifiers={0.policy_qualifiers})>".format(self)
+ )
+
+ def __eq__(self, other):
+ if not isinstance(other, PolicyInformation):
+ return NotImplemented
+
+ return (
+ self.policy_identifier == other.policy_identifier
+ and self.policy_qualifiers == other.policy_qualifiers
+ )
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ if self.policy_qualifiers is not None:
+ pq = tuple(self.policy_qualifiers)
+ else:
+ pq = None
+
+ return hash((self.policy_identifier, pq))
+
+ policy_identifier = utils.read_only_property("_policy_identifier")
+ policy_qualifiers = utils.read_only_property("_policy_qualifiers")
+
+
+class UserNotice(object):
+ def __init__(self, notice_reference, explicit_text):
+ if notice_reference and not isinstance(
+ notice_reference, NoticeReference
+ ):
+ raise TypeError(
+ "notice_reference must be None or a NoticeReference"
+ )
+
+ self._notice_reference = notice_reference
+ self._explicit_text = explicit_text
+
+ def __repr__(self):
+ return (
+ "<UserNotice(notice_reference={0.notice_reference}, explicit_text="
+ "{0.explicit_text!r})>".format(self)
+ )
+
+ def __eq__(self, other):
+ if not isinstance(other, UserNotice):
+ return NotImplemented
+
+ return (
+ self.notice_reference == other.notice_reference
+ and self.explicit_text == other.explicit_text
+ )
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash((self.notice_reference, self.explicit_text))
+
+ notice_reference = utils.read_only_property("_notice_reference")
+ explicit_text = utils.read_only_property("_explicit_text")
+
+
+class NoticeReference(object):
+ def __init__(self, organization, notice_numbers):
+ self._organization = organization
+ notice_numbers = list(notice_numbers)
+ if not all(isinstance(x, int) for x in notice_numbers):
+ raise TypeError("notice_numbers must be a list of integers")
+
+ self._notice_numbers = notice_numbers
+
+ def __repr__(self):
+ return (
+ "<NoticeReference(organization={0.organization!r}, notice_numbers="
+ "{0.notice_numbers})>".format(self)
+ )
+
+ def __eq__(self, other):
+ if not isinstance(other, NoticeReference):
+ return NotImplemented
+
+ return (
+ self.organization == other.organization
+ and self.notice_numbers == other.notice_numbers
+ )
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash((self.organization, tuple(self.notice_numbers)))
+
+ organization = utils.read_only_property("_organization")
+ notice_numbers = utils.read_only_property("_notice_numbers")
+
+
+@utils.register_interface(ExtensionType)
+class ExtendedKeyUsage(object):
+ oid = ExtensionOID.EXTENDED_KEY_USAGE
+
+ def __init__(self, usages):
+ usages = list(usages)
+ if not all(isinstance(x, ObjectIdentifier) for x in usages):
+ raise TypeError(
+ "Every item in the usages list must be an ObjectIdentifier"
+ )
+
+ self._usages = usages
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_usages")
+
+ def __repr__(self):
+ return "<ExtendedKeyUsage({})>".format(self._usages)
+
+ def __eq__(self, other):
+ if not isinstance(other, ExtendedKeyUsage):
+ return NotImplemented
+
+ return self._usages == other._usages
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash(tuple(self._usages))
+
+
+@utils.register_interface(ExtensionType)
+class OCSPNoCheck(object):
+ oid = ExtensionOID.OCSP_NO_CHECK
+
+ def __eq__(self, other):
+ if not isinstance(other, OCSPNoCheck):
+ return NotImplemented
+
+ return True
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash(OCSPNoCheck)
+
+ def __repr__(self):
+ return "<OCSPNoCheck()>"
+
+
+@utils.register_interface(ExtensionType)
+class PrecertPoison(object):
+ oid = ExtensionOID.PRECERT_POISON
+
+ def __eq__(self, other):
+ if not isinstance(other, PrecertPoison):
+ return NotImplemented
+
+ return True
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash(PrecertPoison)
+
+ def __repr__(self):
+ return "<PrecertPoison()>"
+
+
+@utils.register_interface(ExtensionType)
+class TLSFeature(object):
+ oid = ExtensionOID.TLS_FEATURE
+
+ def __init__(self, features):
+ features = list(features)
+ if (
+ not all(isinstance(x, TLSFeatureType) for x in features)
+ or len(features) == 0
+ ):
+ raise TypeError(
+ "features must be a list of elements from the TLSFeatureType "
+ "enum"
+ )
+
+ self._features = features
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_features")
+
+ def __repr__(self):
+ return "<TLSFeature(features={0._features})>".format(self)
+
+ def __eq__(self, other):
+ if not isinstance(other, TLSFeature):
+ return NotImplemented
+
+ return self._features == other._features
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash(tuple(self._features))
+
+
+class TLSFeatureType(Enum):
+ # status_request is defined in RFC 6066 and is used for what is commonly
+ # called OCSP Must-Staple when present in the TLS Feature extension in an
+ # X.509 certificate.
+ status_request = 5
+ # status_request_v2 is defined in RFC 6961 and allows multiple OCSP
+ # responses to be provided. It is not currently in use by clients or
+ # servers.
+ status_request_v2 = 17
+
+
+_TLS_FEATURE_TYPE_TO_ENUM = {x.value: x for x in TLSFeatureType}
+
+
+@utils.register_interface(ExtensionType)
+class InhibitAnyPolicy(object):
+ oid = ExtensionOID.INHIBIT_ANY_POLICY
+
+ def __init__(self, skip_certs):
+ if not isinstance(skip_certs, six.integer_types):
+ raise TypeError("skip_certs must be an integer")
+
+ if skip_certs < 0:
+ raise ValueError("skip_certs must be a non-negative integer")
+
+ self._skip_certs = skip_certs
+
+ def __repr__(self):
+ return "<InhibitAnyPolicy(skip_certs={0.skip_certs})>".format(self)
+
+ def __eq__(self, other):
+ if not isinstance(other, InhibitAnyPolicy):
+ return NotImplemented
+
+ return self.skip_certs == other.skip_certs
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash(self.skip_certs)
+
+ skip_certs = utils.read_only_property("_skip_certs")
+
+
+@utils.register_interface(ExtensionType)
+class KeyUsage(object):
+ oid = ExtensionOID.KEY_USAGE
+
+ def __init__(
+ self,
+ digital_signature,
+ content_commitment,
+ key_encipherment,
+ data_encipherment,
+ key_agreement,
+ key_cert_sign,
+ crl_sign,
+ encipher_only,
+ decipher_only,
+ ):
+ if not key_agreement and (encipher_only or decipher_only):
+ raise ValueError(
+ "encipher_only and decipher_only can only be true when "
+ "key_agreement is true"
+ )
+
+ self._digital_signature = digital_signature
+ self._content_commitment = content_commitment
+ self._key_encipherment = key_encipherment
+ self._data_encipherment = data_encipherment
+ self._key_agreement = key_agreement
+ self._key_cert_sign = key_cert_sign
+ self._crl_sign = crl_sign
+ self._encipher_only = encipher_only
+ self._decipher_only = decipher_only
+
+ digital_signature = utils.read_only_property("_digital_signature")
+ content_commitment = utils.read_only_property("_content_commitment")
+ key_encipherment = utils.read_only_property("_key_encipherment")
+ data_encipherment = utils.read_only_property("_data_encipherment")
+ key_agreement = utils.read_only_property("_key_agreement")
+ key_cert_sign = utils.read_only_property("_key_cert_sign")
+ crl_sign = utils.read_only_property("_crl_sign")
+
+ @property
+ def encipher_only(self):
+ if not self.key_agreement:
+ raise ValueError(
+ "encipher_only is undefined unless key_agreement is true"
+ )
+ else:
+ return self._encipher_only
+
+ @property
+ def decipher_only(self):
+ if not self.key_agreement:
+ raise ValueError(
+ "decipher_only is undefined unless key_agreement is true"
+ )
+ else:
+ return self._decipher_only
+
+ def __repr__(self):
+ try:
+ encipher_only = self.encipher_only
+ decipher_only = self.decipher_only
+ except ValueError:
+ # Users found None confusing because even though encipher/decipher
+ # have no meaning unless key_agreement is true, to construct an
+ # instance of the class you still need to pass False.
+ encipher_only = False
+ decipher_only = False
+
+ return (
+ "<KeyUsage(digital_signature={0.digital_signature}, "
+ "content_commitment={0.content_commitment}, "
+ "key_encipherment={0.key_encipherment}, "
+ "data_encipherment={0.data_encipherment}, "
+ "key_agreement={0.key_agreement}, "
+ "key_cert_sign={0.key_cert_sign}, crl_sign={0.crl_sign}, "
+ "encipher_only={1}, decipher_only={2})>"
+ ).format(self, encipher_only, decipher_only)
+
+ def __eq__(self, other):
+ if not isinstance(other, KeyUsage):
+ return NotImplemented
+
+ return (
+ self.digital_signature == other.digital_signature
+ and self.content_commitment == other.content_commitment
+ and self.key_encipherment == other.key_encipherment
+ and self.data_encipherment == other.data_encipherment
+ and self.key_agreement == other.key_agreement
+ and self.key_cert_sign == other.key_cert_sign
+ and self.crl_sign == other.crl_sign
+ and self._encipher_only == other._encipher_only
+ and self._decipher_only == other._decipher_only
+ )
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash(
+ (
+ self.digital_signature,
+ self.content_commitment,
+ self.key_encipherment,
+ self.data_encipherment,
+ self.key_agreement,
+ self.key_cert_sign,
+ self.crl_sign,
+ self._encipher_only,
+ self._decipher_only,
+ )
+ )
+
+
+@utils.register_interface(ExtensionType)
+class NameConstraints(object):
+ oid = ExtensionOID.NAME_CONSTRAINTS
+
+ def __init__(self, permitted_subtrees, excluded_subtrees):
+ if permitted_subtrees is not None:
+ permitted_subtrees = list(permitted_subtrees)
+ if not all(isinstance(x, GeneralName) for x in permitted_subtrees):
+ raise TypeError(
+ "permitted_subtrees must be a list of GeneralName objects "
+ "or None"
+ )
+
+ self._validate_ip_name(permitted_subtrees)
+
+ if excluded_subtrees is not None:
+ excluded_subtrees = list(excluded_subtrees)
+ if not all(isinstance(x, GeneralName) for x in excluded_subtrees):
+ raise TypeError(
+ "excluded_subtrees must be a list of GeneralName objects "
+ "or None"
+ )
+
+ self._validate_ip_name(excluded_subtrees)
+
+ if permitted_subtrees is None and excluded_subtrees is None:
+ raise ValueError(
+ "At least one of permitted_subtrees and excluded_subtrees "
+ "must not be None"
+ )
+
+ self._permitted_subtrees = permitted_subtrees
+ self._excluded_subtrees = excluded_subtrees
+
+ def __eq__(self, other):
+ if not isinstance(other, NameConstraints):
+ return NotImplemented
+
+ return (
+ self.excluded_subtrees == other.excluded_subtrees
+ and self.permitted_subtrees == other.permitted_subtrees
+ )
+
+ def __ne__(self, other):
+ return not self == other
+
+ def _validate_ip_name(self, tree):
+ if any(
+ isinstance(name, IPAddress)
+ and not isinstance(
+ name.value, (ipaddress.IPv4Network, ipaddress.IPv6Network)
+ )
+ for name in tree
+ ):
+ raise TypeError(
+ "IPAddress name constraints must be an IPv4Network or"
+ " IPv6Network object"
+ )
+
+ def __repr__(self):
+ return (
+ u"<NameConstraints(permitted_subtrees={0.permitted_subtrees}, "
+ u"excluded_subtrees={0.excluded_subtrees})>".format(self)
+ )
+
+ def __hash__(self):
+ if self.permitted_subtrees is not None:
+ ps = tuple(self.permitted_subtrees)
+ else:
+ ps = None
+
+ if self.excluded_subtrees is not None:
+ es = tuple(self.excluded_subtrees)
+ else:
+ es = None
+
+ return hash((ps, es))
+
+ permitted_subtrees = utils.read_only_property("_permitted_subtrees")
+ excluded_subtrees = utils.read_only_property("_excluded_subtrees")
+
+
+class Extension(object):
+ def __init__(self, oid, critical, value):
+ if not isinstance(oid, ObjectIdentifier):
+ raise TypeError(
+ "oid argument must be an ObjectIdentifier instance."
+ )
+
+ if not isinstance(critical, bool):
+ raise TypeError("critical must be a boolean value")
+
+ self._oid = oid
+ self._critical = critical
+ self._value = value
+
+ oid = utils.read_only_property("_oid")
+ critical = utils.read_only_property("_critical")
+ value = utils.read_only_property("_value")
+
+ def __repr__(self):
+ return (
+ "<Extension(oid={0.oid}, critical={0.critical}, "
+ "value={0.value})>"
+ ).format(self)
+
+ def __eq__(self, other):
+ if not isinstance(other, Extension):
+ return NotImplemented
+
+ return (
+ self.oid == other.oid
+ and self.critical == other.critical
+ and self.value == other.value
+ )
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash((self.oid, self.critical, self.value))
+
+
+class GeneralNames(object):
+ def __init__(self, general_names):
+ general_names = list(general_names)
+ if not all(isinstance(x, GeneralName) for x in general_names):
+ raise TypeError(
+ "Every item in the general_names list must be an "
+ "object conforming to the GeneralName interface"
+ )
+
+ self._general_names = general_names
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names")
+
+ def get_values_for_type(self, type):
+ # Return the value of each GeneralName, except for OtherName instances
+ # which we return directly because it has two important properties not
+ # just one value.
+ objs = (i for i in self if isinstance(i, type))
+ if type != OtherName:
+ objs = (i.value for i in objs)
+ return list(objs)
+
+ def __repr__(self):
+ return "<GeneralNames({})>".format(self._general_names)
+
+ def __eq__(self, other):
+ if not isinstance(other, GeneralNames):
+ return NotImplemented
+
+ return self._general_names == other._general_names
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash(tuple(self._general_names))
+
+
+@utils.register_interface(ExtensionType)
+class SubjectAlternativeName(object):
+ oid = ExtensionOID.SUBJECT_ALTERNATIVE_NAME
+
+ def __init__(self, general_names):
+ self._general_names = GeneralNames(general_names)
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names")
+
+ def get_values_for_type(self, type):
+ return self._general_names.get_values_for_type(type)
+
+ def __repr__(self):
+ return "<SubjectAlternativeName({})>".format(self._general_names)
+
+ def __eq__(self, other):
+ if not isinstance(other, SubjectAlternativeName):
+ return NotImplemented
+
+ return self._general_names == other._general_names
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash(self._general_names)
+
+
+@utils.register_interface(ExtensionType)
+class IssuerAlternativeName(object):
+ oid = ExtensionOID.ISSUER_ALTERNATIVE_NAME
+
+ def __init__(self, general_names):
+ self._general_names = GeneralNames(general_names)
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names")
+
+ def get_values_for_type(self, type):
+ return self._general_names.get_values_for_type(type)
+
+ def __repr__(self):
+ return "<IssuerAlternativeName({})>".format(self._general_names)
+
+ def __eq__(self, other):
+ if not isinstance(other, IssuerAlternativeName):
+ return NotImplemented
+
+ return self._general_names == other._general_names
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash(self._general_names)
+
+
+@utils.register_interface(ExtensionType)
+class CertificateIssuer(object):
+ oid = CRLEntryExtensionOID.CERTIFICATE_ISSUER
+
+ def __init__(self, general_names):
+ self._general_names = GeneralNames(general_names)
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods("_general_names")
+
+ def get_values_for_type(self, type):
+ return self._general_names.get_values_for_type(type)
+
+ def __repr__(self):
+ return "<CertificateIssuer({})>".format(self._general_names)
+
+ def __eq__(self, other):
+ if not isinstance(other, CertificateIssuer):
+ return NotImplemented
+
+ return self._general_names == other._general_names
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash(self._general_names)
+
+
+@utils.register_interface(ExtensionType)
+class CRLReason(object):
+ oid = CRLEntryExtensionOID.CRL_REASON
+
+ def __init__(self, reason):
+ if not isinstance(reason, ReasonFlags):
+ raise TypeError("reason must be an element from ReasonFlags")
+
+ self._reason = reason
+
+ def __repr__(self):
+ return "<CRLReason(reason={})>".format(self._reason)
+
+ def __eq__(self, other):
+ if not isinstance(other, CRLReason):
+ return NotImplemented
+
+ return self.reason == other.reason
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash(self.reason)
+
+ reason = utils.read_only_property("_reason")
+
+
+@utils.register_interface(ExtensionType)
+class InvalidityDate(object):
+ oid = CRLEntryExtensionOID.INVALIDITY_DATE
+
+ def __init__(self, invalidity_date):
+ if not isinstance(invalidity_date, datetime.datetime):
+ raise TypeError("invalidity_date must be a datetime.datetime")
+
+ self._invalidity_date = invalidity_date
+
+ def __repr__(self):
+ return "<InvalidityDate(invalidity_date={})>".format(
+ self._invalidity_date
+ )
+
+ def __eq__(self, other):
+ if not isinstance(other, InvalidityDate):
+ return NotImplemented
+
+ return self.invalidity_date == other.invalidity_date
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash(self.invalidity_date)
+
+ invalidity_date = utils.read_only_property("_invalidity_date")
+
+
+@utils.register_interface(ExtensionType)
+class PrecertificateSignedCertificateTimestamps(object):
+ oid = ExtensionOID.PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS
+
+ def __init__(self, signed_certificate_timestamps):
+ signed_certificate_timestamps = list(signed_certificate_timestamps)
+ if not all(
+ isinstance(sct, SignedCertificateTimestamp)
+ for sct in signed_certificate_timestamps
+ ):
+ raise TypeError(
+ "Every item in the signed_certificate_timestamps list must be "
+ "a SignedCertificateTimestamp"
+ )
+ self._signed_certificate_timestamps = signed_certificate_timestamps
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods(
+ "_signed_certificate_timestamps"
+ )
+
+ def __repr__(self):
+ return "<PrecertificateSignedCertificateTimestamps({})>".format(
+ list(self)
+ )
+
+ def __hash__(self):
+ return hash(tuple(self._signed_certificate_timestamps))
+
+ def __eq__(self, other):
+ if not isinstance(other, PrecertificateSignedCertificateTimestamps):
+ return NotImplemented
+
+ return (
+ self._signed_certificate_timestamps
+ == other._signed_certificate_timestamps
+ )
+
+ def __ne__(self, other):
+ return not self == other
+
+
+@utils.register_interface(ExtensionType)
+class SignedCertificateTimestamps(object):
+ oid = ExtensionOID.SIGNED_CERTIFICATE_TIMESTAMPS
+
+ def __init__(self, signed_certificate_timestamps):
+ signed_certificate_timestamps = list(signed_certificate_timestamps)
+ if not all(
+ isinstance(sct, SignedCertificateTimestamp)
+ for sct in signed_certificate_timestamps
+ ):
+ raise TypeError(
+ "Every item in the signed_certificate_timestamps list must be "
+ "a SignedCertificateTimestamp"
+ )
+ self._signed_certificate_timestamps = signed_certificate_timestamps
+
+ __len__, __iter__, __getitem__ = _make_sequence_methods(
+ "_signed_certificate_timestamps"
+ )
+
+ def __repr__(self):
+ return "<SignedCertificateTimestamps({})>".format(list(self))
+
+ def __hash__(self):
+ return hash(tuple(self._signed_certificate_timestamps))
+
+ def __eq__(self, other):
+ if not isinstance(other, SignedCertificateTimestamps):
+ return NotImplemented
+
+ return (
+ self._signed_certificate_timestamps
+ == other._signed_certificate_timestamps
+ )
+
+ def __ne__(self, other):
+ return not self == other
+
+
+@utils.register_interface(ExtensionType)
+class OCSPNonce(object):
+ oid = OCSPExtensionOID.NONCE
+
+ def __init__(self, nonce):
+ if not isinstance(nonce, bytes):
+ raise TypeError("nonce must be bytes")
+
+ self._nonce = nonce
+
+ def __eq__(self, other):
+ if not isinstance(other, OCSPNonce):
+ return NotImplemented
+
+ return self.nonce == other.nonce
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash(self.nonce)
+
+ def __repr__(self):
+ return "<OCSPNonce(nonce={0.nonce!r})>".format(self)
+
+ nonce = utils.read_only_property("_nonce")
+
+
+@utils.register_interface(ExtensionType)
+class IssuingDistributionPoint(object):
+ oid = ExtensionOID.ISSUING_DISTRIBUTION_POINT
+
+ def __init__(
+ self,
+ full_name,
+ relative_name,
+ only_contains_user_certs,
+ only_contains_ca_certs,
+ only_some_reasons,
+ indirect_crl,
+ only_contains_attribute_certs,
+ ):
+ if only_some_reasons and (
+ not isinstance(only_some_reasons, frozenset)
+ or not all(isinstance(x, ReasonFlags) for x in only_some_reasons)
+ ):
+ raise TypeError(
+ "only_some_reasons must be None or frozenset of ReasonFlags"
+ )
+
+ if only_some_reasons and (
+ ReasonFlags.unspecified in only_some_reasons
+ or ReasonFlags.remove_from_crl in only_some_reasons
+ ):
+ raise ValueError(
+ "unspecified and remove_from_crl are not valid reasons in an "
+ "IssuingDistributionPoint"
+ )
+
+ if not (
+ isinstance(only_contains_user_certs, bool)
+ and isinstance(only_contains_ca_certs, bool)
+ and isinstance(indirect_crl, bool)
+ and isinstance(only_contains_attribute_certs, bool)
+ ):
+ raise TypeError(
+ "only_contains_user_certs, only_contains_ca_certs, "
+ "indirect_crl and only_contains_attribute_certs "
+ "must all be boolean."
+ )
+
+ crl_constraints = [
+ only_contains_user_certs,
+ only_contains_ca_certs,
+ indirect_crl,
+ only_contains_attribute_certs,
+ ]
+
+ if len([x for x in crl_constraints if x]) > 1:
+ raise ValueError(
+ "Only one of the following can be set to True: "
+ "only_contains_user_certs, only_contains_ca_certs, "
+ "indirect_crl, only_contains_attribute_certs"
+ )
+
+ if not any(
+ [
+ only_contains_user_certs,
+ only_contains_ca_certs,
+ indirect_crl,
+ only_contains_attribute_certs,
+ full_name,
+ relative_name,
+ only_some_reasons,
+ ]
+ ):
+ raise ValueError(
+ "Cannot create empty extension: "
+ "if only_contains_user_certs, only_contains_ca_certs, "
+ "indirect_crl, and only_contains_attribute_certs are all False"
+ ", then either full_name, relative_name, or only_some_reasons "
+ "must have a value."
+ )
+
+ self._only_contains_user_certs = only_contains_user_certs
+ self._only_contains_ca_certs = only_contains_ca_certs
+ self._indirect_crl = indirect_crl
+ self._only_contains_attribute_certs = only_contains_attribute_certs
+ self._only_some_reasons = only_some_reasons
+ self._full_name = full_name
+ self._relative_name = relative_name
+
+ def __repr__(self):
+ return (
+ "<IssuingDistributionPoint(full_name={0.full_name}, "
+ "relative_name={0.relative_name}, "
+ "only_contains_user_certs={0.only_contains_user_certs}, "
+ "only_contains_ca_certs={0.only_contains_ca_certs}, "
+ "only_some_reasons={0.only_some_reasons}, "
+ "indirect_crl={0.indirect_crl}, "
+ "only_contains_attribute_certs="
+ "{0.only_contains_attribute_certs})>".format(self)
+ )
+
+ def __eq__(self, other):
+ if not isinstance(other, IssuingDistributionPoint):
+ return NotImplemented
+
+ return (
+ self.full_name == other.full_name
+ and self.relative_name == other.relative_name
+ and self.only_contains_user_certs == other.only_contains_user_certs
+ and self.only_contains_ca_certs == other.only_contains_ca_certs
+ and self.only_some_reasons == other.only_some_reasons
+ and self.indirect_crl == other.indirect_crl
+ and self.only_contains_attribute_certs
+ == other.only_contains_attribute_certs
+ )
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash(
+ (
+ self.full_name,
+ self.relative_name,
+ self.only_contains_user_certs,
+ self.only_contains_ca_certs,
+ self.only_some_reasons,
+ self.indirect_crl,
+ self.only_contains_attribute_certs,
+ )
+ )
+
+ full_name = utils.read_only_property("_full_name")
+ relative_name = utils.read_only_property("_relative_name")
+ only_contains_user_certs = utils.read_only_property(
+ "_only_contains_user_certs"
+ )
+ only_contains_ca_certs = utils.read_only_property(
+ "_only_contains_ca_certs"
+ )
+ only_some_reasons = utils.read_only_property("_only_some_reasons")
+ indirect_crl = utils.read_only_property("_indirect_crl")
+ only_contains_attribute_certs = utils.read_only_property(
+ "_only_contains_attribute_certs"
+ )
+
+
+@utils.register_interface(ExtensionType)
+class UnrecognizedExtension(object):
+ def __init__(self, oid, value):
+ if not isinstance(oid, ObjectIdentifier):
+ raise TypeError("oid must be an ObjectIdentifier")
+ self._oid = oid
+ self._value = value
+
+ oid = utils.read_only_property("_oid")
+ value = utils.read_only_property("_value")
+
+ def __repr__(self):
+ return (
+ "<UnrecognizedExtension(oid={0.oid}, "
+ "value={0.value!r})>".format(self)
+ )
+
+ def __eq__(self, other):
+ if not isinstance(other, UnrecognizedExtension):
+ return NotImplemented
+
+ return self.oid == other.oid and self.value == other.value
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash((self.oid, self.value))
diff --git a/contrib/python/cryptography/py2/cryptography/x509/general_name.py b/contrib/python/cryptography/py2/cryptography/x509/general_name.py
new file mode 100644
index 0000000000..9be9d8c991
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/x509/general_name.py
@@ -0,0 +1,294 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import abc
+import ipaddress
+from email.utils import parseaddr
+
+import six
+
+from cryptography import utils
+from cryptography.x509.name import Name
+from cryptography.x509.oid import ObjectIdentifier
+
+
+_GENERAL_NAMES = {
+ 0: "otherName",
+ 1: "rfc822Name",
+ 2: "dNSName",
+ 3: "x400Address",
+ 4: "directoryName",
+ 5: "ediPartyName",
+ 6: "uniformResourceIdentifier",
+ 7: "iPAddress",
+ 8: "registeredID",
+}
+
+
+class UnsupportedGeneralNameType(Exception):
+ def __init__(self, msg, type):
+ super(UnsupportedGeneralNameType, self).__init__(msg)
+ self.type = type
+
+
+@six.add_metaclass(abc.ABCMeta)
+class GeneralName(object):
+ @abc.abstractproperty
+ def value(self):
+ """
+ Return the value of the object
+ """
+
+
+@utils.register_interface(GeneralName)
+class RFC822Name(object):
+ def __init__(self, value):
+ if isinstance(value, six.text_type):
+ try:
+ value.encode("ascii")
+ except UnicodeEncodeError:
+ raise ValueError(
+ "RFC822Name values should be passed as an A-label string. "
+ "This means unicode characters should be encoded via "
+ "a library like idna."
+ )
+ else:
+ raise TypeError("value must be string")
+
+ name, address = parseaddr(value)
+ if name or not address:
+ # parseaddr has found a name (e.g. Name <email>) or the entire
+ # value is an empty string.
+ raise ValueError("Invalid rfc822name value")
+
+ self._value = value
+
+ value = utils.read_only_property("_value")
+
+ @classmethod
+ def _init_without_validation(cls, value):
+ instance = cls.__new__(cls)
+ instance._value = value
+ return instance
+
+ def __repr__(self):
+ return "<RFC822Name(value={0!r})>".format(self.value)
+
+ def __eq__(self, other):
+ if not isinstance(other, RFC822Name):
+ return NotImplemented
+
+ return self.value == other.value
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash(self.value)
+
+
+@utils.register_interface(GeneralName)
+class DNSName(object):
+ def __init__(self, value):
+ if isinstance(value, six.text_type):
+ try:
+ value.encode("ascii")
+ except UnicodeEncodeError:
+ raise ValueError(
+ "DNSName values should be passed as an A-label string. "
+ "This means unicode characters should be encoded via "
+ "a library like idna."
+ )
+ else:
+ raise TypeError("value must be string")
+
+ self._value = value
+
+ value = utils.read_only_property("_value")
+
+ @classmethod
+ def _init_without_validation(cls, value):
+ instance = cls.__new__(cls)
+ instance._value = value
+ return instance
+
+ def __repr__(self):
+ return "<DNSName(value={0!r})>".format(self.value)
+
+ def __eq__(self, other):
+ if not isinstance(other, DNSName):
+ return NotImplemented
+
+ return self.value == other.value
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash(self.value)
+
+
+@utils.register_interface(GeneralName)
+class UniformResourceIdentifier(object):
+ def __init__(self, value):
+ if isinstance(value, six.text_type):
+ try:
+ value.encode("ascii")
+ except UnicodeEncodeError:
+ raise ValueError(
+ "URI values should be passed as an A-label string. "
+ "This means unicode characters should be encoded via "
+ "a library like idna."
+ )
+ else:
+ raise TypeError("value must be string")
+
+ self._value = value
+
+ value = utils.read_only_property("_value")
+
+ @classmethod
+ def _init_without_validation(cls, value):
+ instance = cls.__new__(cls)
+ instance._value = value
+ return instance
+
+ def __repr__(self):
+ return "<UniformResourceIdentifier(value={0!r})>".format(self.value)
+
+ def __eq__(self, other):
+ if not isinstance(other, UniformResourceIdentifier):
+ return NotImplemented
+
+ return self.value == other.value
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash(self.value)
+
+
+@utils.register_interface(GeneralName)
+class DirectoryName(object):
+ def __init__(self, value):
+ if not isinstance(value, Name):
+ raise TypeError("value must be a Name")
+
+ self._value = value
+
+ value = utils.read_only_property("_value")
+
+ def __repr__(self):
+ return "<DirectoryName(value={})>".format(self.value)
+
+ def __eq__(self, other):
+ if not isinstance(other, DirectoryName):
+ return NotImplemented
+
+ return self.value == other.value
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash(self.value)
+
+
+@utils.register_interface(GeneralName)
+class RegisteredID(object):
+ def __init__(self, value):
+ if not isinstance(value, ObjectIdentifier):
+ raise TypeError("value must be an ObjectIdentifier")
+
+ self._value = value
+
+ value = utils.read_only_property("_value")
+
+ def __repr__(self):
+ return "<RegisteredID(value={})>".format(self.value)
+
+ def __eq__(self, other):
+ if not isinstance(other, RegisteredID):
+ return NotImplemented
+
+ return self.value == other.value
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash(self.value)
+
+
+@utils.register_interface(GeneralName)
+class IPAddress(object):
+ def __init__(self, value):
+ if not isinstance(
+ value,
+ (
+ ipaddress.IPv4Address,
+ ipaddress.IPv6Address,
+ ipaddress.IPv4Network,
+ ipaddress.IPv6Network,
+ ),
+ ):
+ raise TypeError(
+ "value must be an instance of ipaddress.IPv4Address, "
+ "ipaddress.IPv6Address, ipaddress.IPv4Network, or "
+ "ipaddress.IPv6Network"
+ )
+
+ self._value = value
+
+ value = utils.read_only_property("_value")
+
+ def __repr__(self):
+ return "<IPAddress(value={})>".format(self.value)
+
+ def __eq__(self, other):
+ if not isinstance(other, IPAddress):
+ return NotImplemented
+
+ return self.value == other.value
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash(self.value)
+
+
+@utils.register_interface(GeneralName)
+class OtherName(object):
+ def __init__(self, type_id, value):
+ if not isinstance(type_id, ObjectIdentifier):
+ raise TypeError("type_id must be an ObjectIdentifier")
+ if not isinstance(value, bytes):
+ raise TypeError("value must be a binary string")
+
+ self._type_id = type_id
+ self._value = value
+
+ type_id = utils.read_only_property("_type_id")
+ value = utils.read_only_property("_value")
+
+ def __repr__(self):
+ return "<OtherName(type_id={}, value={!r})>".format(
+ self.type_id, self.value
+ )
+
+ def __eq__(self, other):
+ if not isinstance(other, OtherName):
+ return NotImplemented
+
+ return self.type_id == other.type_id and self.value == other.value
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash((self.type_id, self.value))
diff --git a/contrib/python/cryptography/py2/cryptography/x509/name.py b/contrib/python/cryptography/py2/cryptography/x509/name.py
new file mode 100644
index 0000000000..0be876a0ed
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/x509/name.py
@@ -0,0 +1,261 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+from enum import Enum
+
+import six
+
+from cryptography import utils
+from cryptography.hazmat.backends import _get_backend
+from cryptography.x509.oid import NameOID, ObjectIdentifier
+
+
+class _ASN1Type(Enum):
+ UTF8String = 12
+ NumericString = 18
+ PrintableString = 19
+ T61String = 20
+ IA5String = 22
+ UTCTime = 23
+ GeneralizedTime = 24
+ VisibleString = 26
+ UniversalString = 28
+ BMPString = 30
+
+
+_ASN1_TYPE_TO_ENUM = {i.value: i for i in _ASN1Type}
+_SENTINEL = object()
+_NAMEOID_DEFAULT_TYPE = {
+ NameOID.COUNTRY_NAME: _ASN1Type.PrintableString,
+ NameOID.JURISDICTION_COUNTRY_NAME: _ASN1Type.PrintableString,
+ NameOID.SERIAL_NUMBER: _ASN1Type.PrintableString,
+ NameOID.DN_QUALIFIER: _ASN1Type.PrintableString,
+ NameOID.EMAIL_ADDRESS: _ASN1Type.IA5String,
+ NameOID.DOMAIN_COMPONENT: _ASN1Type.IA5String,
+}
+
+#: Short attribute names from RFC 4514:
+#: https://tools.ietf.org/html/rfc4514#page-7
+_NAMEOID_TO_NAME = {
+ NameOID.COMMON_NAME: "CN",
+ NameOID.LOCALITY_NAME: "L",
+ NameOID.STATE_OR_PROVINCE_NAME: "ST",
+ NameOID.ORGANIZATION_NAME: "O",
+ NameOID.ORGANIZATIONAL_UNIT_NAME: "OU",
+ NameOID.COUNTRY_NAME: "C",
+ NameOID.STREET_ADDRESS: "STREET",
+ NameOID.DOMAIN_COMPONENT: "DC",
+ NameOID.USER_ID: "UID",
+}
+
+
+def _escape_dn_value(val):
+ """Escape special characters in RFC4514 Distinguished Name value."""
+
+ if not val:
+ return ""
+
+ # See https://tools.ietf.org/html/rfc4514#section-2.4
+ val = val.replace("\\", "\\\\")
+ val = val.replace('"', '\\"')
+ val = val.replace("+", "\\+")
+ val = val.replace(",", "\\,")
+ val = val.replace(";", "\\;")
+ val = val.replace("<", "\\<")
+ val = val.replace(">", "\\>")
+ val = val.replace("\0", "\\00")
+
+ if val[0] in ("#", " "):
+ val = "\\" + val
+ if val[-1] == " ":
+ val = val[:-1] + "\\ "
+
+ return val
+
+
+class NameAttribute(object):
+ def __init__(self, oid, value, _type=_SENTINEL):
+ if not isinstance(oid, ObjectIdentifier):
+ raise TypeError(
+ "oid argument must be an ObjectIdentifier instance."
+ )
+
+ if not isinstance(value, six.text_type):
+ raise TypeError("value argument must be a text type.")
+
+ if (
+ oid == NameOID.COUNTRY_NAME
+ or oid == NameOID.JURISDICTION_COUNTRY_NAME
+ ):
+ if len(value.encode("utf8")) != 2:
+ raise ValueError(
+ "Country name must be a 2 character country code"
+ )
+
+ # The appropriate ASN1 string type varies by OID and is defined across
+ # multiple RFCs including 2459, 3280, and 5280. In general UTF8String
+ # is preferred (2459), but 3280 and 5280 specify several OIDs with
+ # alternate types. This means when we see the sentinel value we need
+ # to look up whether the OID has a non-UTF8 type. If it does, set it
+ # to that. Otherwise, UTF8!
+ if _type == _SENTINEL:
+ _type = _NAMEOID_DEFAULT_TYPE.get(oid, _ASN1Type.UTF8String)
+
+ if not isinstance(_type, _ASN1Type):
+ raise TypeError("_type must be from the _ASN1Type enum")
+
+ self._oid = oid
+ self._value = value
+ self._type = _type
+
+ oid = utils.read_only_property("_oid")
+ value = utils.read_only_property("_value")
+
+ def rfc4514_string(self):
+ """
+ Format as RFC4514 Distinguished Name string.
+
+ Use short attribute name if available, otherwise fall back to OID
+ dotted string.
+ """
+ key = _NAMEOID_TO_NAME.get(self.oid, self.oid.dotted_string)
+ return "%s=%s" % (key, _escape_dn_value(self.value))
+
+ def __eq__(self, other):
+ if not isinstance(other, NameAttribute):
+ return NotImplemented
+
+ return self.oid == other.oid and self.value == other.value
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash((self.oid, self.value))
+
+ def __repr__(self):
+ return "<NameAttribute(oid={0.oid}, value={0.value!r})>".format(self)
+
+
+class RelativeDistinguishedName(object):
+ def __init__(self, attributes):
+ attributes = list(attributes)
+ if not attributes:
+ raise ValueError("a relative distinguished name cannot be empty")
+ if not all(isinstance(x, NameAttribute) for x in attributes):
+ raise TypeError("attributes must be an iterable of NameAttribute")
+
+ # Keep list and frozenset to preserve attribute order where it matters
+ self._attributes = attributes
+ self._attribute_set = frozenset(attributes)
+
+ if len(self._attribute_set) != len(attributes):
+ raise ValueError("duplicate attributes are not allowed")
+
+ def get_attributes_for_oid(self, oid):
+ return [i for i in self if i.oid == oid]
+
+ def rfc4514_string(self):
+ """
+ Format as RFC4514 Distinguished Name string.
+
+ Within each RDN, attributes are joined by '+', although that is rarely
+ used in certificates.
+ """
+ return "+".join(attr.rfc4514_string() for attr in self._attributes)
+
+ def __eq__(self, other):
+ if not isinstance(other, RelativeDistinguishedName):
+ return NotImplemented
+
+ return self._attribute_set == other._attribute_set
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ return hash(self._attribute_set)
+
+ def __iter__(self):
+ return iter(self._attributes)
+
+ def __len__(self):
+ return len(self._attributes)
+
+ def __repr__(self):
+ return "<RelativeDistinguishedName({})>".format(self.rfc4514_string())
+
+
+class Name(object):
+ def __init__(self, attributes):
+ attributes = list(attributes)
+ if all(isinstance(x, NameAttribute) for x in attributes):
+ self._attributes = [
+ RelativeDistinguishedName([x]) for x in attributes
+ ]
+ elif all(isinstance(x, RelativeDistinguishedName) for x in attributes):
+ self._attributes = attributes
+ else:
+ raise TypeError(
+ "attributes must be a list of NameAttribute"
+ " or a list RelativeDistinguishedName"
+ )
+
+ def rfc4514_string(self):
+ """
+ Format as RFC4514 Distinguished Name string.
+ For example 'CN=foobar.com,O=Foo Corp,C=US'
+
+ An X.509 name is a two-level structure: a list of sets of attributes.
+ Each list element is separated by ',' and within each list element, set
+ elements are separated by '+'. The latter is almost never used in
+ real world certificates. According to RFC4514 section 2.1 the
+ RDNSequence must be reversed when converting to string representation.
+ """
+ return ",".join(
+ attr.rfc4514_string() for attr in reversed(self._attributes)
+ )
+
+ def get_attributes_for_oid(self, oid):
+ return [i for i in self if i.oid == oid]
+
+ @property
+ def rdns(self):
+ return self._attributes
+
+ def public_bytes(self, backend=None):
+ backend = _get_backend(backend)
+ return backend.x509_name_bytes(self)
+
+ def __eq__(self, other):
+ if not isinstance(other, Name):
+ return NotImplemented
+
+ return self._attributes == other._attributes
+
+ def __ne__(self, other):
+ return not self == other
+
+ def __hash__(self):
+ # TODO: this is relatively expensive, if this looks like a bottleneck
+ # for you, consider optimizing!
+ return hash(tuple(self._attributes))
+
+ def __iter__(self):
+ for rdn in self._attributes:
+ for ava in rdn:
+ yield ava
+
+ def __len__(self):
+ return sum(len(rdn) for rdn in self._attributes)
+
+ def __repr__(self):
+ rdns = ",".join(attr.rfc4514_string() for attr in self._attributes)
+
+ if six.PY2:
+ return "<Name({})>".format(rdns.encode("utf8"))
+ else:
+ return "<Name({})>".format(rdns)
diff --git a/contrib/python/cryptography/py2/cryptography/x509/ocsp.py b/contrib/python/cryptography/py2/cryptography/x509/ocsp.py
new file mode 100644
index 0000000000..f8e27224ec
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/x509/ocsp.py
@@ -0,0 +1,467 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+import abc
+import datetime
+from enum import Enum
+
+import six
+
+from cryptography import x509
+from cryptography.hazmat.primitives import hashes
+from cryptography.x509.base import (
+ _EARLIEST_UTC_TIME,
+ _convert_to_naive_utc_time,
+ _reject_duplicate_extension,
+)
+
+
+_OIDS_TO_HASH = {
+ "1.3.14.3.2.26": hashes.SHA1(),
+ "2.16.840.1.101.3.4.2.4": hashes.SHA224(),
+ "2.16.840.1.101.3.4.2.1": hashes.SHA256(),
+ "2.16.840.1.101.3.4.2.2": hashes.SHA384(),
+ "2.16.840.1.101.3.4.2.3": hashes.SHA512(),
+}
+
+
+class OCSPResponderEncoding(Enum):
+ HASH = "By Hash"
+ NAME = "By Name"
+
+
+class OCSPResponseStatus(Enum):
+ SUCCESSFUL = 0
+ MALFORMED_REQUEST = 1
+ INTERNAL_ERROR = 2
+ TRY_LATER = 3
+ SIG_REQUIRED = 5
+ UNAUTHORIZED = 6
+
+
+_RESPONSE_STATUS_TO_ENUM = {x.value: x for x in OCSPResponseStatus}
+_ALLOWED_HASHES = (
+ hashes.SHA1,
+ hashes.SHA224,
+ hashes.SHA256,
+ hashes.SHA384,
+ hashes.SHA512,
+)
+
+
+def _verify_algorithm(algorithm):
+ if not isinstance(algorithm, _ALLOWED_HASHES):
+ raise ValueError(
+ "Algorithm must be SHA1, SHA224, SHA256, SHA384, or SHA512"
+ )
+
+
+class OCSPCertStatus(Enum):
+ GOOD = 0
+ REVOKED = 1
+ UNKNOWN = 2
+
+
+_CERT_STATUS_TO_ENUM = {x.value: x for x in OCSPCertStatus}
+
+
+def load_der_ocsp_request(data):
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ return backend.load_der_ocsp_request(data)
+
+
+def load_der_ocsp_response(data):
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ return backend.load_der_ocsp_response(data)
+
+
+class OCSPRequestBuilder(object):
+ def __init__(self, request=None, extensions=[]):
+ self._request = request
+ self._extensions = extensions
+
+ def add_certificate(self, cert, issuer, algorithm):
+ if self._request is not None:
+ raise ValueError("Only one certificate can be added to a request")
+
+ _verify_algorithm(algorithm)
+ if not isinstance(cert, x509.Certificate) or not isinstance(
+ issuer, x509.Certificate
+ ):
+ raise TypeError("cert and issuer must be a Certificate")
+
+ return OCSPRequestBuilder((cert, issuer, algorithm), self._extensions)
+
+ def add_extension(self, extension, critical):
+ if not isinstance(extension, x509.ExtensionType):
+ raise TypeError("extension must be an ExtensionType")
+
+ extension = x509.Extension(extension.oid, critical, extension)
+ _reject_duplicate_extension(extension, self._extensions)
+
+ return OCSPRequestBuilder(
+ self._request, self._extensions + [extension]
+ )
+
+ def build(self):
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if self._request is None:
+ raise ValueError("You must add a certificate before building")
+
+ return backend.create_ocsp_request(self)
+
+
+class _SingleResponse(object):
+ def __init__(
+ self,
+ cert,
+ issuer,
+ algorithm,
+ cert_status,
+ this_update,
+ next_update,
+ revocation_time,
+ revocation_reason,
+ ):
+ if not isinstance(cert, x509.Certificate) or not isinstance(
+ issuer, x509.Certificate
+ ):
+ raise TypeError("cert and issuer must be a Certificate")
+
+ _verify_algorithm(algorithm)
+ if not isinstance(this_update, datetime.datetime):
+ raise TypeError("this_update must be a datetime object")
+ if next_update is not None and not isinstance(
+ next_update, datetime.datetime
+ ):
+ raise TypeError("next_update must be a datetime object or None")
+
+ self._cert = cert
+ self._issuer = issuer
+ self._algorithm = algorithm
+ self._this_update = this_update
+ self._next_update = next_update
+
+ if not isinstance(cert_status, OCSPCertStatus):
+ raise TypeError(
+ "cert_status must be an item from the OCSPCertStatus enum"
+ )
+ if cert_status is not OCSPCertStatus.REVOKED:
+ if revocation_time is not None:
+ raise ValueError(
+ "revocation_time can only be provided if the certificate "
+ "is revoked"
+ )
+ if revocation_reason is not None:
+ raise ValueError(
+ "revocation_reason can only be provided if the certificate"
+ " is revoked"
+ )
+ else:
+ if not isinstance(revocation_time, datetime.datetime):
+ raise TypeError("revocation_time must be a datetime object")
+
+ revocation_time = _convert_to_naive_utc_time(revocation_time)
+ if revocation_time < _EARLIEST_UTC_TIME:
+ raise ValueError(
+ "The revocation_time must be on or after"
+ " 1950 January 1."
+ )
+
+ if revocation_reason is not None and not isinstance(
+ revocation_reason, x509.ReasonFlags
+ ):
+ raise TypeError(
+ "revocation_reason must be an item from the ReasonFlags "
+ "enum or None"
+ )
+
+ self._cert_status = cert_status
+ self._revocation_time = revocation_time
+ self._revocation_reason = revocation_reason
+
+
+class OCSPResponseBuilder(object):
+ def __init__(
+ self, response=None, responder_id=None, certs=None, extensions=[]
+ ):
+ self._response = response
+ self._responder_id = responder_id
+ self._certs = certs
+ self._extensions = extensions
+
+ def add_response(
+ self,
+ cert,
+ issuer,
+ algorithm,
+ cert_status,
+ this_update,
+ next_update,
+ revocation_time,
+ revocation_reason,
+ ):
+ if self._response is not None:
+ raise ValueError("Only one response per OCSPResponse.")
+
+ singleresp = _SingleResponse(
+ cert,
+ issuer,
+ algorithm,
+ cert_status,
+ this_update,
+ next_update,
+ revocation_time,
+ revocation_reason,
+ )
+ return OCSPResponseBuilder(
+ singleresp,
+ self._responder_id,
+ self._certs,
+ self._extensions,
+ )
+
+ def responder_id(self, encoding, responder_cert):
+ if self._responder_id is not None:
+ raise ValueError("responder_id can only be set once")
+ if not isinstance(responder_cert, x509.Certificate):
+ raise TypeError("responder_cert must be a Certificate")
+ if not isinstance(encoding, OCSPResponderEncoding):
+ raise TypeError(
+ "encoding must be an element from OCSPResponderEncoding"
+ )
+
+ return OCSPResponseBuilder(
+ self._response,
+ (responder_cert, encoding),
+ self._certs,
+ self._extensions,
+ )
+
+ def certificates(self, certs):
+ if self._certs is not None:
+ raise ValueError("certificates may only be set once")
+ certs = list(certs)
+ if len(certs) == 0:
+ raise ValueError("certs must not be an empty list")
+ if not all(isinstance(x, x509.Certificate) for x in certs):
+ raise TypeError("certs must be a list of Certificates")
+ return OCSPResponseBuilder(
+ self._response,
+ self._responder_id,
+ certs,
+ self._extensions,
+ )
+
+ def add_extension(self, extension, critical):
+ if not isinstance(extension, x509.ExtensionType):
+ raise TypeError("extension must be an ExtensionType")
+
+ extension = x509.Extension(extension.oid, critical, extension)
+ _reject_duplicate_extension(extension, self._extensions)
+
+ return OCSPResponseBuilder(
+ self._response,
+ self._responder_id,
+ self._certs,
+ self._extensions + [extension],
+ )
+
+ def sign(self, private_key, algorithm):
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if self._response is None:
+ raise ValueError("You must add a response before signing")
+ if self._responder_id is None:
+ raise ValueError("You must add a responder_id before signing")
+
+ return backend.create_ocsp_response(
+ OCSPResponseStatus.SUCCESSFUL, self, private_key, algorithm
+ )
+
+ @classmethod
+ def build_unsuccessful(cls, response_status):
+ from cryptography.hazmat.backends.openssl.backend import backend
+
+ if not isinstance(response_status, OCSPResponseStatus):
+ raise TypeError(
+ "response_status must be an item from OCSPResponseStatus"
+ )
+ if response_status is OCSPResponseStatus.SUCCESSFUL:
+ raise ValueError("response_status cannot be SUCCESSFUL")
+
+ return backend.create_ocsp_response(response_status, None, None, None)
+
+
+@six.add_metaclass(abc.ABCMeta)
+class OCSPRequest(object):
+ @abc.abstractproperty
+ def issuer_key_hash(self):
+ """
+ The hash of the issuer public key
+ """
+
+ @abc.abstractproperty
+ def issuer_name_hash(self):
+ """
+ The hash of the issuer name
+ """
+
+ @abc.abstractproperty
+ def hash_algorithm(self):
+ """
+ The hash algorithm used in the issuer name and key hashes
+ """
+
+ @abc.abstractproperty
+ def serial_number(self):
+ """
+ The serial number of the cert whose status is being checked
+ """
+
+ @abc.abstractmethod
+ def public_bytes(self, encoding):
+ """
+ Serializes the request to DER
+ """
+
+ @abc.abstractproperty
+ def extensions(self):
+ """
+ The list of request extensions. Not single request extensions.
+ """
+
+
+@six.add_metaclass(abc.ABCMeta)
+class OCSPResponse(object):
+ @abc.abstractproperty
+ def response_status(self):
+ """
+ The status of the response. This is a value from the OCSPResponseStatus
+ enumeration
+ """
+
+ @abc.abstractproperty
+ def signature_algorithm_oid(self):
+ """
+ The ObjectIdentifier of the signature algorithm
+ """
+
+ @abc.abstractproperty
+ def signature_hash_algorithm(self):
+ """
+ Returns a HashAlgorithm corresponding to the type of the digest signed
+ """
+
+ @abc.abstractproperty
+ def signature(self):
+ """
+ The signature bytes
+ """
+
+ @abc.abstractproperty
+ def tbs_response_bytes(self):
+ """
+ The tbsResponseData bytes
+ """
+
+ @abc.abstractproperty
+ def certificates(self):
+ """
+ A list of certificates used to help build a chain to verify the OCSP
+ response. This situation occurs when the OCSP responder uses a delegate
+ certificate.
+ """
+
+ @abc.abstractproperty
+ def responder_key_hash(self):
+ """
+ The responder's key hash or None
+ """
+
+ @abc.abstractproperty
+ def responder_name(self):
+ """
+ The responder's Name or None
+ """
+
+ @abc.abstractproperty
+ def produced_at(self):
+ """
+ The time the response was produced
+ """
+
+ @abc.abstractproperty
+ def certificate_status(self):
+ """
+ The status of the certificate (an element from the OCSPCertStatus enum)
+ """
+
+ @abc.abstractproperty
+ def revocation_time(self):
+ """
+ The date of when the certificate was revoked or None if not
+ revoked.
+ """
+
+ @abc.abstractproperty
+ def revocation_reason(self):
+ """
+ The reason the certificate was revoked or None if not specified or
+ not revoked.
+ """
+
+ @abc.abstractproperty
+ def this_update(self):
+ """
+ The most recent time at which the status being indicated is known by
+ the responder to have been correct
+ """
+
+ @abc.abstractproperty
+ def next_update(self):
+ """
+ The time when newer information will be available
+ """
+
+ @abc.abstractproperty
+ def issuer_key_hash(self):
+ """
+ The hash of the issuer public key
+ """
+
+ @abc.abstractproperty
+ def issuer_name_hash(self):
+ """
+ The hash of the issuer name
+ """
+
+ @abc.abstractproperty
+ def hash_algorithm(self):
+ """
+ The hash algorithm used in the issuer name and key hashes
+ """
+
+ @abc.abstractproperty
+ def serial_number(self):
+ """
+ The serial number of the cert whose status is being checked
+ """
+
+ @abc.abstractproperty
+ def extensions(self):
+ """
+ The list of response extensions. Not single response extensions.
+ """
+
+ @abc.abstractproperty
+ def single_extensions(self):
+ """
+ The list of single response extensions. Not response extensions.
+ """
diff --git a/contrib/python/cryptography/py2/cryptography/x509/oid.py b/contrib/python/cryptography/py2/cryptography/x509/oid.py
new file mode 100644
index 0000000000..2bf606e50d
--- /dev/null
+++ b/contrib/python/cryptography/py2/cryptography/x509/oid.py
@@ -0,0 +1,265 @@
+# This file is dual licensed under the terms of the Apache License, Version
+# 2.0, and the BSD License. See the LICENSE file in the root of this repository
+# for complete details.
+
+from __future__ import absolute_import, division, print_function
+
+from cryptography.hazmat._oid import ObjectIdentifier
+from cryptography.hazmat.primitives import hashes
+
+
+class ExtensionOID(object):
+ SUBJECT_DIRECTORY_ATTRIBUTES = ObjectIdentifier("2.5.29.9")
+ SUBJECT_KEY_IDENTIFIER = ObjectIdentifier("2.5.29.14")
+ KEY_USAGE = ObjectIdentifier("2.5.29.15")
+ SUBJECT_ALTERNATIVE_NAME = ObjectIdentifier("2.5.29.17")
+ ISSUER_ALTERNATIVE_NAME = ObjectIdentifier("2.5.29.18")
+ BASIC_CONSTRAINTS = ObjectIdentifier("2.5.29.19")
+ NAME_CONSTRAINTS = ObjectIdentifier("2.5.29.30")
+ CRL_DISTRIBUTION_POINTS = ObjectIdentifier("2.5.29.31")
+ CERTIFICATE_POLICIES = ObjectIdentifier("2.5.29.32")
+ POLICY_MAPPINGS = ObjectIdentifier("2.5.29.33")
+ AUTHORITY_KEY_IDENTIFIER = ObjectIdentifier("2.5.29.35")
+ POLICY_CONSTRAINTS = ObjectIdentifier("2.5.29.36")
+ EXTENDED_KEY_USAGE = ObjectIdentifier("2.5.29.37")
+ FRESHEST_CRL = ObjectIdentifier("2.5.29.46")
+ INHIBIT_ANY_POLICY = ObjectIdentifier("2.5.29.54")
+ ISSUING_DISTRIBUTION_POINT = ObjectIdentifier("2.5.29.28")
+ AUTHORITY_INFORMATION_ACCESS = ObjectIdentifier("1.3.6.1.5.5.7.1.1")
+ SUBJECT_INFORMATION_ACCESS = ObjectIdentifier("1.3.6.1.5.5.7.1.11")
+ OCSP_NO_CHECK = ObjectIdentifier("1.3.6.1.5.5.7.48.1.5")
+ TLS_FEATURE = ObjectIdentifier("1.3.6.1.5.5.7.1.24")
+ CRL_NUMBER = ObjectIdentifier("2.5.29.20")
+ DELTA_CRL_INDICATOR = ObjectIdentifier("2.5.29.27")
+ PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS = ObjectIdentifier(
+ "1.3.6.1.4.1.11129.2.4.2"
+ )
+ PRECERT_POISON = ObjectIdentifier("1.3.6.1.4.1.11129.2.4.3")
+ SIGNED_CERTIFICATE_TIMESTAMPS = ObjectIdentifier("1.3.6.1.4.1.11129.2.4.5")
+
+
+class OCSPExtensionOID(object):
+ NONCE = ObjectIdentifier("1.3.6.1.5.5.7.48.1.2")
+
+
+class CRLEntryExtensionOID(object):
+ CERTIFICATE_ISSUER = ObjectIdentifier("2.5.29.29")
+ CRL_REASON = ObjectIdentifier("2.5.29.21")
+ INVALIDITY_DATE = ObjectIdentifier("2.5.29.24")
+
+
+class NameOID(object):
+ COMMON_NAME = ObjectIdentifier("2.5.4.3")
+ COUNTRY_NAME = ObjectIdentifier("2.5.4.6")
+ LOCALITY_NAME = ObjectIdentifier("2.5.4.7")
+ STATE_OR_PROVINCE_NAME = ObjectIdentifier("2.5.4.8")
+ STREET_ADDRESS = ObjectIdentifier("2.5.4.9")
+ ORGANIZATION_NAME = ObjectIdentifier("2.5.4.10")
+ ORGANIZATIONAL_UNIT_NAME = ObjectIdentifier("2.5.4.11")
+ SERIAL_NUMBER = ObjectIdentifier("2.5.4.5")
+ SURNAME = ObjectIdentifier("2.5.4.4")
+ GIVEN_NAME = ObjectIdentifier("2.5.4.42")
+ TITLE = ObjectIdentifier("2.5.4.12")
+ GENERATION_QUALIFIER = ObjectIdentifier("2.5.4.44")
+ X500_UNIQUE_IDENTIFIER = ObjectIdentifier("2.5.4.45")
+ DN_QUALIFIER = ObjectIdentifier("2.5.4.46")
+ PSEUDONYM = ObjectIdentifier("2.5.4.65")
+ USER_ID = ObjectIdentifier("0.9.2342.19200300.100.1.1")
+ DOMAIN_COMPONENT = ObjectIdentifier("0.9.2342.19200300.100.1.25")
+ EMAIL_ADDRESS = ObjectIdentifier("1.2.840.113549.1.9.1")
+ JURISDICTION_COUNTRY_NAME = ObjectIdentifier("1.3.6.1.4.1.311.60.2.1.3")
+ JURISDICTION_LOCALITY_NAME = ObjectIdentifier("1.3.6.1.4.1.311.60.2.1.1")
+ JURISDICTION_STATE_OR_PROVINCE_NAME = ObjectIdentifier(
+ "1.3.6.1.4.1.311.60.2.1.2"
+ )
+ BUSINESS_CATEGORY = ObjectIdentifier("2.5.4.15")
+ POSTAL_ADDRESS = ObjectIdentifier("2.5.4.16")
+ POSTAL_CODE = ObjectIdentifier("2.5.4.17")
+ INN = ObjectIdentifier("1.2.643.3.131.1.1")
+ OGRN = ObjectIdentifier("1.2.643.100.1")
+ SNILS = ObjectIdentifier("1.2.643.100.3")
+ UNSTRUCTURED_NAME = ObjectIdentifier("1.2.840.113549.1.9.2")
+
+
+class SignatureAlgorithmOID(object):
+ RSA_WITH_MD5 = ObjectIdentifier("1.2.840.113549.1.1.4")
+ RSA_WITH_SHA1 = ObjectIdentifier("1.2.840.113549.1.1.5")
+ # This is an alternate OID for RSA with SHA1 that is occasionally seen
+ _RSA_WITH_SHA1 = ObjectIdentifier("1.3.14.3.2.29")
+ RSA_WITH_SHA224 = ObjectIdentifier("1.2.840.113549.1.1.14")
+ RSA_WITH_SHA256 = ObjectIdentifier("1.2.840.113549.1.1.11")
+ RSA_WITH_SHA384 = ObjectIdentifier("1.2.840.113549.1.1.12")
+ RSA_WITH_SHA512 = ObjectIdentifier("1.2.840.113549.1.1.13")
+ RSASSA_PSS = ObjectIdentifier("1.2.840.113549.1.1.10")
+ ECDSA_WITH_SHA1 = ObjectIdentifier("1.2.840.10045.4.1")
+ ECDSA_WITH_SHA224 = ObjectIdentifier("1.2.840.10045.4.3.1")
+ ECDSA_WITH_SHA256 = ObjectIdentifier("1.2.840.10045.4.3.2")
+ ECDSA_WITH_SHA384 = ObjectIdentifier("1.2.840.10045.4.3.3")
+ ECDSA_WITH_SHA512 = ObjectIdentifier("1.2.840.10045.4.3.4")
+ DSA_WITH_SHA1 = ObjectIdentifier("1.2.840.10040.4.3")
+ DSA_WITH_SHA224 = ObjectIdentifier("2.16.840.1.101.3.4.3.1")
+ DSA_WITH_SHA256 = ObjectIdentifier("2.16.840.1.101.3.4.3.2")
+ ED25519 = ObjectIdentifier("1.3.101.112")
+ ED448 = ObjectIdentifier("1.3.101.113")
+ GOSTR3411_94_WITH_3410_2001 = ObjectIdentifier("1.2.643.2.2.3")
+ GOSTR3410_2012_WITH_3411_2012_256 = ObjectIdentifier("1.2.643.7.1.1.3.2")
+ GOSTR3410_2012_WITH_3411_2012_512 = ObjectIdentifier("1.2.643.7.1.1.3.3")
+
+
+_SIG_OIDS_TO_HASH = {
+ SignatureAlgorithmOID.RSA_WITH_MD5: hashes.MD5(),
+ SignatureAlgorithmOID.RSA_WITH_SHA1: hashes.SHA1(),
+ SignatureAlgorithmOID._RSA_WITH_SHA1: hashes.SHA1(),
+ SignatureAlgorithmOID.RSA_WITH_SHA224: hashes.SHA224(),
+ SignatureAlgorithmOID.RSA_WITH_SHA256: hashes.SHA256(),
+ SignatureAlgorithmOID.RSA_WITH_SHA384: hashes.SHA384(),
+ SignatureAlgorithmOID.RSA_WITH_SHA512: hashes.SHA512(),
+ SignatureAlgorithmOID.ECDSA_WITH_SHA1: hashes.SHA1(),
+ SignatureAlgorithmOID.ECDSA_WITH_SHA224: hashes.SHA224(),
+ SignatureAlgorithmOID.ECDSA_WITH_SHA256: hashes.SHA256(),
+ SignatureAlgorithmOID.ECDSA_WITH_SHA384: hashes.SHA384(),
+ SignatureAlgorithmOID.ECDSA_WITH_SHA512: hashes.SHA512(),
+ SignatureAlgorithmOID.DSA_WITH_SHA1: hashes.SHA1(),
+ SignatureAlgorithmOID.DSA_WITH_SHA224: hashes.SHA224(),
+ SignatureAlgorithmOID.DSA_WITH_SHA256: hashes.SHA256(),
+ SignatureAlgorithmOID.ED25519: None,
+ SignatureAlgorithmOID.ED448: None,
+ SignatureAlgorithmOID.GOSTR3411_94_WITH_3410_2001: None,
+ SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_256: None,
+ SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_512: None,
+}
+
+
+class ExtendedKeyUsageOID(object):
+ SERVER_AUTH = ObjectIdentifier("1.3.6.1.5.5.7.3.1")
+ CLIENT_AUTH = ObjectIdentifier("1.3.6.1.5.5.7.3.2")
+ CODE_SIGNING = ObjectIdentifier("1.3.6.1.5.5.7.3.3")
+ EMAIL_PROTECTION = ObjectIdentifier("1.3.6.1.5.5.7.3.4")
+ TIME_STAMPING = ObjectIdentifier("1.3.6.1.5.5.7.3.8")
+ OCSP_SIGNING = ObjectIdentifier("1.3.6.1.5.5.7.3.9")
+ ANY_EXTENDED_KEY_USAGE = ObjectIdentifier("2.5.29.37.0")
+
+
+class AuthorityInformationAccessOID(object):
+ CA_ISSUERS = ObjectIdentifier("1.3.6.1.5.5.7.48.2")
+ OCSP = ObjectIdentifier("1.3.6.1.5.5.7.48.1")
+
+
+class SubjectInformationAccessOID(object):
+ CA_REPOSITORY = ObjectIdentifier("1.3.6.1.5.5.7.48.5")
+
+
+class CertificatePoliciesOID(object):
+ CPS_QUALIFIER = ObjectIdentifier("1.3.6.1.5.5.7.2.1")
+ CPS_USER_NOTICE = ObjectIdentifier("1.3.6.1.5.5.7.2.2")
+ ANY_POLICY = ObjectIdentifier("2.5.29.32.0")
+
+
+class AttributeOID(object):
+ CHALLENGE_PASSWORD = ObjectIdentifier("1.2.840.113549.1.9.7")
+ UNSTRUCTURED_NAME = ObjectIdentifier("1.2.840.113549.1.9.2")
+
+
+_OID_NAMES = {
+ NameOID.COMMON_NAME: "commonName",
+ NameOID.COUNTRY_NAME: "countryName",
+ NameOID.LOCALITY_NAME: "localityName",
+ NameOID.STATE_OR_PROVINCE_NAME: "stateOrProvinceName",
+ NameOID.STREET_ADDRESS: "streetAddress",
+ NameOID.ORGANIZATION_NAME: "organizationName",
+ NameOID.ORGANIZATIONAL_UNIT_NAME: "organizationalUnitName",
+ NameOID.SERIAL_NUMBER: "serialNumber",
+ NameOID.SURNAME: "surname",
+ NameOID.GIVEN_NAME: "givenName",
+ NameOID.TITLE: "title",
+ NameOID.GENERATION_QUALIFIER: "generationQualifier",
+ NameOID.X500_UNIQUE_IDENTIFIER: "x500UniqueIdentifier",
+ NameOID.DN_QUALIFIER: "dnQualifier",
+ NameOID.PSEUDONYM: "pseudonym",
+ NameOID.USER_ID: "userID",
+ NameOID.DOMAIN_COMPONENT: "domainComponent",
+ NameOID.EMAIL_ADDRESS: "emailAddress",
+ NameOID.JURISDICTION_COUNTRY_NAME: "jurisdictionCountryName",
+ NameOID.JURISDICTION_LOCALITY_NAME: "jurisdictionLocalityName",
+ NameOID.JURISDICTION_STATE_OR_PROVINCE_NAME: (
+ "jurisdictionStateOrProvinceName"
+ ),
+ NameOID.BUSINESS_CATEGORY: "businessCategory",
+ NameOID.POSTAL_ADDRESS: "postalAddress",
+ NameOID.POSTAL_CODE: "postalCode",
+ NameOID.INN: "INN",
+ NameOID.OGRN: "OGRN",
+ NameOID.SNILS: "SNILS",
+ NameOID.UNSTRUCTURED_NAME: "unstructuredName",
+ SignatureAlgorithmOID.RSA_WITH_MD5: "md5WithRSAEncryption",
+ SignatureAlgorithmOID.RSA_WITH_SHA1: "sha1WithRSAEncryption",
+ SignatureAlgorithmOID.RSA_WITH_SHA224: "sha224WithRSAEncryption",
+ SignatureAlgorithmOID.RSA_WITH_SHA256: "sha256WithRSAEncryption",
+ SignatureAlgorithmOID.RSA_WITH_SHA384: "sha384WithRSAEncryption",
+ SignatureAlgorithmOID.RSA_WITH_SHA512: "sha512WithRSAEncryption",
+ SignatureAlgorithmOID.RSASSA_PSS: "RSASSA-PSS",
+ SignatureAlgorithmOID.ECDSA_WITH_SHA1: "ecdsa-with-SHA1",
+ SignatureAlgorithmOID.ECDSA_WITH_SHA224: "ecdsa-with-SHA224",
+ SignatureAlgorithmOID.ECDSA_WITH_SHA256: "ecdsa-with-SHA256",
+ SignatureAlgorithmOID.ECDSA_WITH_SHA384: "ecdsa-with-SHA384",
+ SignatureAlgorithmOID.ECDSA_WITH_SHA512: "ecdsa-with-SHA512",
+ SignatureAlgorithmOID.DSA_WITH_SHA1: "dsa-with-sha1",
+ SignatureAlgorithmOID.DSA_WITH_SHA224: "dsa-with-sha224",
+ SignatureAlgorithmOID.DSA_WITH_SHA256: "dsa-with-sha256",
+ SignatureAlgorithmOID.ED25519: "ed25519",
+ SignatureAlgorithmOID.ED448: "ed448",
+ SignatureAlgorithmOID.GOSTR3411_94_WITH_3410_2001: (
+ "GOST R 34.11-94 with GOST R 34.10-2001"
+ ),
+ SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_256: (
+ "GOST R 34.10-2012 with GOST R 34.11-2012 (256 bit)"
+ ),
+ SignatureAlgorithmOID.GOSTR3410_2012_WITH_3411_2012_512: (
+ "GOST R 34.10-2012 with GOST R 34.11-2012 (512 bit)"
+ ),
+ ExtendedKeyUsageOID.SERVER_AUTH: "serverAuth",
+ ExtendedKeyUsageOID.CLIENT_AUTH: "clientAuth",
+ ExtendedKeyUsageOID.CODE_SIGNING: "codeSigning",
+ ExtendedKeyUsageOID.EMAIL_PROTECTION: "emailProtection",
+ ExtendedKeyUsageOID.TIME_STAMPING: "timeStamping",
+ ExtendedKeyUsageOID.OCSP_SIGNING: "OCSPSigning",
+ ExtensionOID.SUBJECT_DIRECTORY_ATTRIBUTES: "subjectDirectoryAttributes",
+ ExtensionOID.SUBJECT_KEY_IDENTIFIER: "subjectKeyIdentifier",
+ ExtensionOID.KEY_USAGE: "keyUsage",
+ ExtensionOID.SUBJECT_ALTERNATIVE_NAME: "subjectAltName",
+ ExtensionOID.ISSUER_ALTERNATIVE_NAME: "issuerAltName",
+ ExtensionOID.BASIC_CONSTRAINTS: "basicConstraints",
+ ExtensionOID.PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS: (
+ "signedCertificateTimestampList"
+ ),
+ ExtensionOID.SIGNED_CERTIFICATE_TIMESTAMPS: (
+ "signedCertificateTimestampList"
+ ),
+ ExtensionOID.PRECERT_POISON: "ctPoison",
+ CRLEntryExtensionOID.CRL_REASON: "cRLReason",
+ CRLEntryExtensionOID.INVALIDITY_DATE: "invalidityDate",
+ CRLEntryExtensionOID.CERTIFICATE_ISSUER: "certificateIssuer",
+ ExtensionOID.NAME_CONSTRAINTS: "nameConstraints",
+ ExtensionOID.CRL_DISTRIBUTION_POINTS: "cRLDistributionPoints",
+ ExtensionOID.CERTIFICATE_POLICIES: "certificatePolicies",
+ ExtensionOID.POLICY_MAPPINGS: "policyMappings",
+ ExtensionOID.AUTHORITY_KEY_IDENTIFIER: "authorityKeyIdentifier",
+ ExtensionOID.POLICY_CONSTRAINTS: "policyConstraints",
+ ExtensionOID.EXTENDED_KEY_USAGE: "extendedKeyUsage",
+ ExtensionOID.FRESHEST_CRL: "freshestCRL",
+ ExtensionOID.INHIBIT_ANY_POLICY: "inhibitAnyPolicy",
+ ExtensionOID.ISSUING_DISTRIBUTION_POINT: ("issuingDistributionPoint"),
+ ExtensionOID.AUTHORITY_INFORMATION_ACCESS: "authorityInfoAccess",
+ ExtensionOID.SUBJECT_INFORMATION_ACCESS: "subjectInfoAccess",
+ ExtensionOID.OCSP_NO_CHECK: "OCSPNoCheck",
+ ExtensionOID.CRL_NUMBER: "cRLNumber",
+ ExtensionOID.DELTA_CRL_INDICATOR: "deltaCRLIndicator",
+ ExtensionOID.TLS_FEATURE: "TLSFeature",
+ AuthorityInformationAccessOID.OCSP: "OCSP",
+ AuthorityInformationAccessOID.CA_ISSUERS: "caIssuers",
+ SubjectInformationAccessOID.CA_REPOSITORY: "caRepository",
+ CertificatePoliciesOID.CPS_QUALIFIER: "id-qt-cps",
+ CertificatePoliciesOID.CPS_USER_NOTICE: "id-qt-unotice",
+ OCSPExtensionOID.NONCE: "OCSPNonce",
+ AttributeOID.CHALLENGE_PASSWORD: "challengePassword",
+}