aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/python/urllib3
diff options
context:
space:
mode:
authorshadchin <shadchin@yandex-team.ru>2022-02-10 16:44:30 +0300
committerDaniil Cherednik <dcherednik@yandex-team.ru>2022-02-10 16:44:30 +0300
commit2598ef1d0aee359b4b6d5fdd1758916d5907d04f (patch)
tree012bb94d777798f1f56ac1cec429509766d05181 /contrib/python/urllib3
parent6751af0b0c1b952fede40b19b71da8025b5d8bcf (diff)
downloadydb-2598ef1d0aee359b4b6d5fdd1758916d5907d04f.tar.gz
Restoring authorship annotation for <shadchin@yandex-team.ru>. Commit 1 of 2.
Diffstat (limited to 'contrib/python/urllib3')
-rw-r--r--contrib/python/urllib3/.dist-info/METADATA908
-rw-r--r--contrib/python/urllib3/LICENSE.txt42
-rw-r--r--contrib/python/urllib3/README.rst236
-rw-r--r--contrib/python/urllib3/patches/01-arcadia.patch48
-rw-r--r--contrib/python/urllib3/patches/02-fix-ya.make.patch62
-rw-r--r--contrib/python/urllib3/urllib3/__init__.py70
-rw-r--r--contrib/python/urllib3/urllib3/_collections.py88
-rw-r--r--contrib/python/urllib3/urllib3/_version.py4
-rw-r--r--contrib/python/urllib3/urllib3/connection.py746
-rw-r--r--contrib/python/urllib3/urllib3/connectionpool.py864
-rw-r--r--contrib/python/urllib3/urllib3/contrib/_appengine_environ.py72
-rw-r--r--contrib/python/urllib3/urllib3/contrib/_securetransport/bindings.py248
-rw-r--r--contrib/python/urllib3/urllib3/contrib/_securetransport/low_level.py188
-rw-r--r--contrib/python/urllib3/urllib3/contrib/appengine.py198
-rw-r--r--contrib/python/urllib3/urllib3/contrib/ntlmpool.py126
-rw-r--r--contrib/python/urllib3/urllib3/contrib/pyopenssl.py300
-rw-r--r--contrib/python/urllib3/urllib3/contrib/securetransport.py434
-rw-r--r--contrib/python/urllib3/urllib3/contrib/socks.py176
-rw-r--r--contrib/python/urllib3/urllib3/exceptions.py262
-rw-r--r--contrib/python/urllib3/urllib3/fields.py264
-rw-r--r--contrib/python/urllib3/urllib3/filepost.py28
-rw-r--r--contrib/python/urllib3/urllib3/packages/backports/makefile.py10
-rw-r--r--contrib/python/urllib3/urllib3/packages/six.py620
-rw-r--r--contrib/python/urllib3/urllib3/poolmanager.py398
-rw-r--r--contrib/python/urllib3/urllib3/request.py112
-rw-r--r--contrib/python/urllib3/urllib3/response.py630
-rw-r--r--contrib/python/urllib3/urllib3/util/__init__.py66
-rw-r--r--contrib/python/urllib3/urllib3/util/connection.py76
-rw-r--r--contrib/python/urllib3/urllib3/util/proxy.py114
-rw-r--r--contrib/python/urllib3/urllib3/util/queue.py44
-rw-r--r--contrib/python/urllib3/urllib3/util/request.py96
-rw-r--r--contrib/python/urllib3/urllib3/util/response.py78
-rw-r--r--contrib/python/urllib3/urllib3/util/retry.py574
-rw-r--r--contrib/python/urllib3/urllib3/util/ssl_.py558
-rw-r--r--contrib/python/urllib3/urllib3/util/ssl_match_hostname.py64
-rw-r--r--contrib/python/urllib3/urllib3/util/ssltransport.py442
-rw-r--r--contrib/python/urllib3/urllib3/util/timeout.py164
-rw-r--r--contrib/python/urllib3/urllib3/util/url.py600
-rw-r--r--contrib/python/urllib3/urllib3/util/wait.py304
-rw-r--r--contrib/python/urllib3/ya.make60
40 files changed, 5187 insertions, 5187 deletions
diff --git a/contrib/python/urllib3/.dist-info/METADATA b/contrib/python/urllib3/.dist-info/METADATA
index 20ceebfa65..cd454df04e 100644
--- a/contrib/python/urllib3/.dist-info/METADATA
+++ b/contrib/python/urllib3/.dist-info/METADATA
@@ -1,14 +1,14 @@
-Metadata-Version: 2.1
+Metadata-Version: 2.1
Name: urllib3
-Version: 1.26.8
+Version: 1.26.8
Summary: HTTP library with thread-safe connection pooling, file post, and more.
Home-page: https://urllib3.readthedocs.io/
Author: Andrey Petrov
Author-email: andrey.petrov@shazow.net
License: MIT
-Project-URL: Documentation, https://urllib3.readthedocs.io/
-Project-URL: Code, https://github.com/urllib3/urllib3
-Project-URL: Issue tracker, https://github.com/urllib3/urllib3/issues
+Project-URL: Documentation, https://urllib3.readthedocs.io/
+Project-URL: Code, https://github.com/urllib3/urllib3
+Project-URL: Issue tracker, https://github.com/urllib3/urllib3/issues
Keywords: urllib httplib threadsafe filepost http https ssl pooling
Platform: UNKNOWN
Classifier: Environment :: Web Environment
@@ -21,31 +21,31 @@ Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
-Classifier: Programming Language :: Python :: 3.7
-Classifier: Programming Language :: Python :: 3.8
-Classifier: Programming Language :: Python :: 3.9
-Classifier: Programming Language :: Python :: 3.10
-Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
Classifier: Topic :: Internet :: WWW/HTTP
Classifier: Topic :: Software Development :: Libraries
-Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4
-Description-Content-Type: text/x-rst
-License-File: LICENSE.txt
-Provides-Extra: brotli
-Requires-Dist: brotlipy (>=0.6.0) ; extra == 'brotli'
-Provides-Extra: secure
-Requires-Dist: pyOpenSSL (>=0.14) ; extra == 'secure'
-Requires-Dist: cryptography (>=1.3.4) ; extra == 'secure'
-Requires-Dist: idna (>=2.0.0) ; extra == 'secure'
-Requires-Dist: certifi ; extra == 'secure'
-Requires-Dist: ipaddress ; (python_version == "2.7") and extra == 'secure'
-Provides-Extra: socks
-Requires-Dist: PySocks (!=1.5.7,<2.0,>=1.5.6) ; extra == 'socks'
-
-
-urllib3 is a powerful, *user-friendly* HTTP client for Python. Much of the
+Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4
+Description-Content-Type: text/x-rst
+License-File: LICENSE.txt
+Provides-Extra: brotli
+Requires-Dist: brotlipy (>=0.6.0) ; extra == 'brotli'
+Provides-Extra: secure
+Requires-Dist: pyOpenSSL (>=0.14) ; extra == 'secure'
+Requires-Dist: cryptography (>=1.3.4) ; extra == 'secure'
+Requires-Dist: idna (>=2.0.0) ; extra == 'secure'
+Requires-Dist: certifi ; extra == 'secure'
+Requires-Dist: ipaddress ; (python_version == "2.7") and extra == 'secure'
+Provides-Extra: socks
+Requires-Dist: PySocks (!=1.5.7,<2.0,>=1.5.6) ; extra == 'socks'
+
+
+urllib3 is a powerful, *user-friendly* HTTP client for Python. Much of the
Python ecosystem already uses urllib3 and you should too.
urllib3 brings many critical features that are missing from the Python
standard libraries:
@@ -55,14 +55,14 @@ standard libraries:
- Client-side SSL/TLS verification.
- File uploads with multipart encoding.
- Helpers for retrying requests and dealing with HTTP redirects.
-- Support for gzip, deflate, and brotli encoding.
+- Support for gzip, deflate, and brotli encoding.
- Proxy support for HTTP and SOCKS.
- 100% test coverage.
-urllib3 is powerful and easy to use:
-
-.. code-block:: python
+urllib3 is powerful and easy to use:
+.. code-block:: python
+
>>> import urllib3
>>> http = urllib3.PoolManager()
>>> r = http.request('GET', 'http://httpbin.org/robots.txt')
@@ -71,17 +71,17 @@ urllib3 is powerful and easy to use:
>>> r.data
'User-agent: *\nDisallow: /deny\n'
-
+
Installing
----------
urllib3 can be installed with `pip <https://pip.pypa.io>`_::
- $ python -m pip install urllib3
+ $ python -m pip install urllib3
-Alternatively, you can grab the latest source code from `GitHub <https://github.com/urllib3/urllib3>`_::
+Alternatively, you can grab the latest source code from `GitHub <https://github.com/urllib3/urllib3>`_::
- $ git clone git://github.com/urllib3/urllib3.git
+ $ git clone git://github.com/urllib3/urllib3.git
$ python setup.py install
@@ -98,420 +98,420 @@ urllib3 happily accepts contributions. Please see our
`contributing documentation <https://urllib3.readthedocs.io/en/latest/contributing.html>`_
for some tips on getting started.
-
-Security Disclosures
---------------------
-
-To report a security vulnerability, please use the
-`Tidelift security contact <https://tidelift.com/security>`_.
-Tidelift will coordinate the fix and disclosure with maintainers.
-
-
+
+Security Disclosures
+--------------------
+
+To report a security vulnerability, please use the
+`Tidelift security contact <https://tidelift.com/security>`_.
+Tidelift will coordinate the fix and disclosure with maintainers.
+
+
Maintainers
-----------
-- `@sethmlarson <https://github.com/sethmlarson>`__ (Seth M. Larson)
-- `@pquentin <https://github.com/pquentin>`__ (Quentin Pradet)
-- `@theacodes <https://github.com/theacodes>`__ (Thea Flowers)
-- `@haikuginger <https://github.com/haikuginger>`__ (Jess Shapiro)
-- `@lukasa <https://github.com/lukasa>`__ (Cory Benfield)
-- `@sigmavirus24 <https://github.com/sigmavirus24>`__ (Ian Stapleton Cordasco)
-- `@shazow <https://github.com/shazow>`__ (Andrey Petrov)
+- `@sethmlarson <https://github.com/sethmlarson>`__ (Seth M. Larson)
+- `@pquentin <https://github.com/pquentin>`__ (Quentin Pradet)
+- `@theacodes <https://github.com/theacodes>`__ (Thea Flowers)
+- `@haikuginger <https://github.com/haikuginger>`__ (Jess Shapiro)
+- `@lukasa <https://github.com/lukasa>`__ (Cory Benfield)
+- `@sigmavirus24 <https://github.com/sigmavirus24>`__ (Ian Stapleton Cordasco)
+- `@shazow <https://github.com/shazow>`__ (Andrey Petrov)
👋
-
+
Sponsorship
-----------
-If your company benefits from this library, please consider `sponsoring its
-development <https://urllib3.readthedocs.io/en/latest/sponsors.html>`_.
-
-
-For Enterprise
---------------
-
-.. |tideliftlogo| image:: https://nedbatchelder.com/pix/Tidelift_Logos_RGB_Tidelift_Shorthand_On-White_small.png
- :width: 75
- :alt: Tidelift
-
-.. list-table::
- :widths: 10 100
-
- * - |tideliftlogo|
- - Professional support for urllib3 is available as part of the `Tidelift
- Subscription`_. Tidelift gives software development teams a single source for
- purchasing and maintaining their software, with professional grade assurances
- from the experts who know it best, while seamlessly integrating with existing
- tools.
-
-.. _Tidelift Subscription: https://tidelift.com/subscription/pkg/pypi-urllib3?utm_source=pypi-urllib3&utm_medium=referral&utm_campaign=readme
-
-
-Changes
-=======
-
-1.26.8 (2022-01-07)
--------------------
-
-* Added extra message to``urllib3.exceptions.ProxyError`` when urllib3 detects that
- a proxy is configured to use HTTPS but the proxy itself appears to only use HTTP.
-* Added a mention of the size of the connection pool when discarding a connection due to the pool being full.
-* Added explicit support for Python 3.11.
-* Deprecated the ``Retry.MAX_BACKOFF`` class property in favor of ``Retry.DEFAULT_MAX_BACKOFF``
- to better match the rest of the default parameter names. ``Retry.MAX_BACKOFF`` is removed in v2.0.
-* Changed location of the vendored ``ssl.match_hostname`` function from ``urllib3.packages.ssl_match_hostname``
- to ``urllib3.util.ssl_match_hostname`` to ensure Python 3.10+ compatibility after being repackaged
- by downstream distributors.
-* Fixed absolute imports, all imports are now relative.
-
-
-1.26.7 (2021-09-22)
--------------------
-
-* Fixed a bug with HTTPS hostname verification involving IP addresses and lack
- of SNI. (Issue #2400)
-* Fixed a bug where IPv6 braces weren't stripped during certificate hostname
- matching. (Issue #2240)
-
-
-1.26.6 (2021-06-25)
--------------------
-
-* Deprecated the ``urllib3.contrib.ntlmpool`` module. urllib3 is not able to support
- it properly due to `reasons listed in this issue <https://github.com/urllib3/urllib3/issues/2282>`_.
- If you are a user of this module please leave a comment.
-* Changed ``HTTPConnection.request_chunked()`` to not erroneously emit multiple
- ``Transfer-Encoding`` headers in the case that one is already specified.
-* Fixed typo in deprecation message to recommend ``Retry.DEFAULT_ALLOWED_METHODS``.
-
-
-1.26.5 (2021-05-26)
--------------------
-
-* Fixed deprecation warnings emitted in Python 3.10.
-* Updated vendored ``six`` library to 1.16.0.
-* Improved performance of URL parser when splitting
- the authority component.
-
-
-1.26.4 (2021-03-15)
--------------------
-
-* Changed behavior of the default ``SSLContext`` when connecting to HTTPS proxy
- during HTTPS requests. The default ``SSLContext`` now sets ``check_hostname=True``.
-
-
-1.26.3 (2021-01-26)
--------------------
-
-* Fixed bytes and string comparison issue with headers (Pull #2141)
-
-* Changed ``ProxySchemeUnknown`` error message to be
- more actionable if the user supplies a proxy URL without
- a scheme. (Pull #2107)
-
-
-1.26.2 (2020-11-12)
--------------------
-
-* Fixed an issue where ``wrap_socket`` and ``CERT_REQUIRED`` wouldn't
- be imported properly on Python 2.7.8 and earlier (Pull #2052)
-
-
-1.26.1 (2020-11-11)
--------------------
-
-* Fixed an issue where two ``User-Agent`` headers would be sent if a
- ``User-Agent`` header key is passed as ``bytes`` (Pull #2047)
-
-
-1.26.0 (2020-11-10)
--------------------
-
-* **NOTE: urllib3 v2.0 will drop support for Python 2**.
- `Read more in the v2.0 Roadmap <https://urllib3.readthedocs.io/en/latest/v2-roadmap.html>`_.
-
-* Added support for HTTPS proxies contacting HTTPS servers (Pull #1923, Pull #1806)
-
-* Deprecated negotiating TLSv1 and TLSv1.1 by default. Users that
- still wish to use TLS earlier than 1.2 without a deprecation warning
- should opt-in explicitly by setting ``ssl_version=ssl.PROTOCOL_TLSv1_1`` (Pull #2002)
- **Starting in urllib3 v2.0: Connections that receive a ``DeprecationWarning`` will fail**
-
-* Deprecated ``Retry`` options ``Retry.DEFAULT_METHOD_WHITELIST``, ``Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST``
- and ``Retry(method_whitelist=...)`` in favor of ``Retry.DEFAULT_ALLOWED_METHODS``,
- ``Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT``, and ``Retry(allowed_methods=...)``
- (Pull #2000) **Starting in urllib3 v2.0: Deprecated options will be removed**
-
-* Added default ``User-Agent`` header to every request (Pull #1750)
-
-* Added ``urllib3.util.SKIP_HEADER`` for skipping ``User-Agent``, ``Accept-Encoding``,
- and ``Host`` headers from being automatically emitted with requests (Pull #2018)
-
-* Collapse ``transfer-encoding: chunked`` request data and framing into
- the same ``socket.send()`` call (Pull #1906)
-
-* Send ``http/1.1`` ALPN identifier with every TLS handshake by default (Pull #1894)
-
-* Properly terminate SecureTransport connections when CA verification fails (Pull #1977)
-
-* Don't emit an ``SNIMissingWarning`` when passing ``server_hostname=None``
- to SecureTransport (Pull #1903)
-
-* Disabled requesting TLSv1.2 session tickets as they weren't being used by urllib3 (Pull #1970)
-
-* Suppress ``BrokenPipeError`` when writing request body after the server
- has closed the socket (Pull #1524)
-
-* Wrap ``ssl.SSLError`` that can be raised from reading a socket (e.g. "bad MAC")
- into an ``urllib3.exceptions.SSLError`` (Pull #1939)
-
-
-1.25.11 (2020-10-19)
---------------------
-
-* Fix retry backoff time parsed from ``Retry-After`` header when given
- in the HTTP date format. The HTTP date was parsed as the local timezone
- rather than accounting for the timezone in the HTTP date (typically
- UTC) (Pull #1932, Pull #1935, Pull #1938, Pull #1949)
-
-* Fix issue where an error would be raised when the ``SSLKEYLOGFILE``
- environment variable was set to the empty string. Now ``SSLContext.keylog_file``
- is not set in this situation (Pull #2016)
-
-
-1.25.10 (2020-07-22)
---------------------
-
-* Added support for ``SSLKEYLOGFILE`` environment variable for
- logging TLS session keys with use with programs like
- Wireshark for decrypting captured web traffic (Pull #1867)
-
-* Fixed loading of SecureTransport libraries on macOS Big Sur
- due to the new dynamic linker cache (Pull #1905)
-
-* Collapse chunked request bodies data and framing into one
- call to ``send()`` to reduce the number of TCP packets by 2-4x (Pull #1906)
-
-* Don't insert ``None`` into ``ConnectionPool`` if the pool
- was empty when requesting a connection (Pull #1866)
-
-* Avoid ``hasattr`` call in ``BrotliDecoder.decompress()`` (Pull #1858)
-
-
-1.25.9 (2020-04-16)
--------------------
-
-* Added ``InvalidProxyConfigurationWarning`` which is raised when
- erroneously specifying an HTTPS proxy URL. urllib3 doesn't currently
- support connecting to HTTPS proxies but will soon be able to
- and we would like users to migrate properly without much breakage.
-
- See `this GitHub issue <https://github.com/urllib3/urllib3/issues/1850>`_
- for more information on how to fix your proxy config. (Pull #1851)
-
-* Drain connection after ``PoolManager`` redirect (Pull #1817)
-
-* Ensure ``load_verify_locations`` raises ``SSLError`` for all backends (Pull #1812)
-
-* Rename ``VerifiedHTTPSConnection`` to ``HTTPSConnection`` (Pull #1805)
-
-* Allow the CA certificate data to be passed as a string (Pull #1804)
-
-* Raise ``ValueError`` if method contains control characters (Pull #1800)
-
-* Add ``__repr__`` to ``Timeout`` (Pull #1795)
-
-
-1.25.8 (2020-01-20)
--------------------
-
-* Drop support for EOL Python 3.4 (Pull #1774)
-
-* Optimize _encode_invalid_chars (Pull #1787)
-
-
-1.25.7 (2019-11-11)
--------------------
-
-* Preserve ``chunked`` parameter on retries (Pull #1715, Pull #1734)
-
-* Allow unset ``SERVER_SOFTWARE`` in App Engine (Pull #1704, Issue #1470)
-
-* Fix issue where URL fragment was sent within the request target. (Pull #1732)
-
-* Fix issue where an empty query section in a URL would fail to parse. (Pull #1732)
-
-* Remove TLS 1.3 support in SecureTransport due to Apple removing support (Pull #1703)
-
-
-1.25.6 (2019-09-24)
--------------------
-
-* Fix issue where tilde (``~``) characters were incorrectly
- percent-encoded in the path. (Pull #1692)
-
-
-1.25.5 (2019-09-19)
--------------------
-
-* Add mitigation for BPO-37428 affecting Python <3.7.4 and OpenSSL 1.1.1+ which
- caused certificate verification to be enabled when using ``cert_reqs=CERT_NONE``.
- (Issue #1682)
-
-
-1.25.4 (2019-09-19)
--------------------
-
-* Propagate Retry-After header settings to subsequent retries. (Pull #1607)
-
-* Fix edge case where Retry-After header was still respected even when
- explicitly opted out of. (Pull #1607)
-
-* Remove dependency on ``rfc3986`` for URL parsing.
-
-* Fix issue where URLs containing invalid characters within ``Url.auth`` would
- raise an exception instead of percent-encoding those characters.
-
-* Add support for ``HTTPResponse.auto_close = False`` which makes HTTP responses
- work well with BufferedReaders and other ``io`` module features. (Pull #1652)
-
-* Percent-encode invalid characters in URL for ``HTTPConnectionPool.request()`` (Pull #1673)
-
-
-1.25.3 (2019-05-23)
--------------------
-
-* Change ``HTTPSConnection`` to load system CA certificates
- when ``ca_certs``, ``ca_cert_dir``, and ``ssl_context`` are
- unspecified. (Pull #1608, Issue #1603)
-
-* Upgrade bundled rfc3986 to v1.3.2. (Pull #1609, Issue #1605)
-
-
-1.25.2 (2019-04-28)
--------------------
-
-* Change ``is_ipaddress`` to not detect IPvFuture addresses. (Pull #1583)
-
-* Change ``parse_url`` to percent-encode invalid characters within the
- path, query, and target components. (Pull #1586)
-
-
-1.25.1 (2019-04-24)
--------------------
-
-* Add support for Google's ``Brotli`` package. (Pull #1572, Pull #1579)
-
-* Upgrade bundled rfc3986 to v1.3.1 (Pull #1578)
-
-
-1.25 (2019-04-22)
------------------
-
-* Require and validate certificates by default when using HTTPS (Pull #1507)
-
-* Upgraded ``urllib3.utils.parse_url()`` to be RFC 3986 compliant. (Pull #1487)
-
-* Added support for ``key_password`` for ``HTTPSConnectionPool`` to use
- encrypted ``key_file`` without creating your own ``SSLContext`` object. (Pull #1489)
-
-* Add TLSv1.3 support to CPython, pyOpenSSL, and SecureTransport ``SSLContext``
- implementations. (Pull #1496)
-
-* Switched the default multipart header encoder from RFC 2231 to HTML 5 working draft. (Issue #303, Pull #1492)
-
-* Fixed issue where OpenSSL would block if an encrypted client private key was
- given and no password was given. Instead an ``SSLError`` is raised. (Pull #1489)
-
-* Added support for Brotli content encoding. It is enabled automatically if
- ``brotlipy`` package is installed which can be requested with
- ``urllib3[brotli]`` extra. (Pull #1532)
-
-* Drop ciphers using DSS key exchange from default TLS cipher suites.
- Improve default ciphers when using SecureTransport. (Pull #1496)
-
-* Implemented a more efficient ``HTTPResponse.__iter__()`` method. (Issue #1483)
-
-1.24.3 (2019-05-01)
--------------------
-
-* Apply fix for CVE-2019-9740. (Pull #1591)
-
-1.24.2 (2019-04-17)
--------------------
-
-* Don't load system certificates by default when any other ``ca_certs``, ``ca_certs_dir`` or
- ``ssl_context`` parameters are specified.
-
-* Remove Authorization header regardless of case when redirecting to cross-site. (Issue #1510)
-
-* Add support for IPv6 addresses in subjectAltName section of certificates. (Issue #1269)
-
-
-1.24.1 (2018-11-02)
--------------------
-
-* Remove quadratic behavior within ``GzipDecoder.decompress()`` (Issue #1467)
-
-* Restored functionality of ``ciphers`` parameter for ``create_urllib3_context()``. (Issue #1462)
-
-
-1.24 (2018-10-16)
------------------
-
-* Allow key_server_hostname to be specified when initializing a PoolManager to allow custom SNI to be overridden. (Pull #1449)
-
-* Test against Python 3.7 on AppVeyor. (Pull #1453)
-
-* Early-out ipv6 checks when running on App Engine. (Pull #1450)
-
-* Change ambiguous description of backoff_factor (Pull #1436)
-
-* Add ability to handle multiple Content-Encodings (Issue #1441 and Pull #1442)
-
-* Skip DNS names that can't be idna-decoded when using pyOpenSSL (Issue #1405).
-
-* Add a server_hostname parameter to HTTPSConnection which allows for
- overriding the SNI hostname sent in the handshake. (Pull #1397)
-
-* Drop support for EOL Python 2.6 (Pull #1429 and Pull #1430)
-
-* Fixed bug where responses with header Content-Type: message/* erroneously
- raised HeaderParsingError, resulting in a warning being logged. (Pull #1439)
-
-* Move urllib3 to src/urllib3 (Pull #1409)
-
-
-1.23 (2018-06-04)
------------------
-
-* Allow providing a list of headers to strip from requests when redirecting
- to a different host. Defaults to the ``Authorization`` header. Different
- headers can be set via ``Retry.remove_headers_on_redirect``. (Issue #1316)
-
-* Fix ``util.selectors._fileobj_to_fd`` to accept ``long`` (Issue #1247).
-
-* Dropped Python 3.3 support. (Pull #1242)
-
-* Put the connection back in the pool when calling stream() or read_chunked() on
- a chunked HEAD response. (Issue #1234)
-
-* Fixed pyOpenSSL-specific ssl client authentication issue when clients
- attempted to auth via certificate + chain (Issue #1060)
-
-* Add the port to the connectionpool connect print (Pull #1251)
-
-* Don't use the ``uuid`` module to create multipart data boundaries. (Pull #1380)
-
-* ``read_chunked()`` on a closed response returns no chunks. (Issue #1088)
-
-* Add Python 2.6 support to ``contrib.securetransport`` (Pull #1359)
-
-* Added support for auth info in url for SOCKS proxy (Pull #1363)
-
-
+If your company benefits from this library, please consider `sponsoring its
+development <https://urllib3.readthedocs.io/en/latest/sponsors.html>`_.
+
+
+For Enterprise
+--------------
+
+.. |tideliftlogo| image:: https://nedbatchelder.com/pix/Tidelift_Logos_RGB_Tidelift_Shorthand_On-White_small.png
+ :width: 75
+ :alt: Tidelift
+
+.. list-table::
+ :widths: 10 100
+
+ * - |tideliftlogo|
+ - Professional support for urllib3 is available as part of the `Tidelift
+ Subscription`_. Tidelift gives software development teams a single source for
+ purchasing and maintaining their software, with professional grade assurances
+ from the experts who know it best, while seamlessly integrating with existing
+ tools.
+
+.. _Tidelift Subscription: https://tidelift.com/subscription/pkg/pypi-urllib3?utm_source=pypi-urllib3&utm_medium=referral&utm_campaign=readme
+
+
+Changes
+=======
+
+1.26.8 (2022-01-07)
+-------------------
+
+* Added extra message to``urllib3.exceptions.ProxyError`` when urllib3 detects that
+ a proxy is configured to use HTTPS but the proxy itself appears to only use HTTP.
+* Added a mention of the size of the connection pool when discarding a connection due to the pool being full.
+* Added explicit support for Python 3.11.
+* Deprecated the ``Retry.MAX_BACKOFF`` class property in favor of ``Retry.DEFAULT_MAX_BACKOFF``
+ to better match the rest of the default parameter names. ``Retry.MAX_BACKOFF`` is removed in v2.0.
+* Changed location of the vendored ``ssl.match_hostname`` function from ``urllib3.packages.ssl_match_hostname``
+ to ``urllib3.util.ssl_match_hostname`` to ensure Python 3.10+ compatibility after being repackaged
+ by downstream distributors.
+* Fixed absolute imports, all imports are now relative.
+
+
+1.26.7 (2021-09-22)
+-------------------
+
+* Fixed a bug with HTTPS hostname verification involving IP addresses and lack
+ of SNI. (Issue #2400)
+* Fixed a bug where IPv6 braces weren't stripped during certificate hostname
+ matching. (Issue #2240)
+
+
+1.26.6 (2021-06-25)
+-------------------
+
+* Deprecated the ``urllib3.contrib.ntlmpool`` module. urllib3 is not able to support
+ it properly due to `reasons listed in this issue <https://github.com/urllib3/urllib3/issues/2282>`_.
+ If you are a user of this module please leave a comment.
+* Changed ``HTTPConnection.request_chunked()`` to not erroneously emit multiple
+ ``Transfer-Encoding`` headers in the case that one is already specified.
+* Fixed typo in deprecation message to recommend ``Retry.DEFAULT_ALLOWED_METHODS``.
+
+
+1.26.5 (2021-05-26)
+-------------------
+
+* Fixed deprecation warnings emitted in Python 3.10.
+* Updated vendored ``six`` library to 1.16.0.
+* Improved performance of URL parser when splitting
+ the authority component.
+
+
+1.26.4 (2021-03-15)
+-------------------
+
+* Changed behavior of the default ``SSLContext`` when connecting to HTTPS proxy
+ during HTTPS requests. The default ``SSLContext`` now sets ``check_hostname=True``.
+
+
+1.26.3 (2021-01-26)
+-------------------
+
+* Fixed bytes and string comparison issue with headers (Pull #2141)
+
+* Changed ``ProxySchemeUnknown`` error message to be
+ more actionable if the user supplies a proxy URL without
+ a scheme. (Pull #2107)
+
+
+1.26.2 (2020-11-12)
+-------------------
+
+* Fixed an issue where ``wrap_socket`` and ``CERT_REQUIRED`` wouldn't
+ be imported properly on Python 2.7.8 and earlier (Pull #2052)
+
+
+1.26.1 (2020-11-11)
+-------------------
+
+* Fixed an issue where two ``User-Agent`` headers would be sent if a
+ ``User-Agent`` header key is passed as ``bytes`` (Pull #2047)
+
+
+1.26.0 (2020-11-10)
+-------------------
+
+* **NOTE: urllib3 v2.0 will drop support for Python 2**.
+ `Read more in the v2.0 Roadmap <https://urllib3.readthedocs.io/en/latest/v2-roadmap.html>`_.
+
+* Added support for HTTPS proxies contacting HTTPS servers (Pull #1923, Pull #1806)
+
+* Deprecated negotiating TLSv1 and TLSv1.1 by default. Users that
+ still wish to use TLS earlier than 1.2 without a deprecation warning
+ should opt-in explicitly by setting ``ssl_version=ssl.PROTOCOL_TLSv1_1`` (Pull #2002)
+ **Starting in urllib3 v2.0: Connections that receive a ``DeprecationWarning`` will fail**
+
+* Deprecated ``Retry`` options ``Retry.DEFAULT_METHOD_WHITELIST``, ``Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST``
+ and ``Retry(method_whitelist=...)`` in favor of ``Retry.DEFAULT_ALLOWED_METHODS``,
+ ``Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT``, and ``Retry(allowed_methods=...)``
+ (Pull #2000) **Starting in urllib3 v2.0: Deprecated options will be removed**
+
+* Added default ``User-Agent`` header to every request (Pull #1750)
+
+* Added ``urllib3.util.SKIP_HEADER`` for skipping ``User-Agent``, ``Accept-Encoding``,
+ and ``Host`` headers from being automatically emitted with requests (Pull #2018)
+
+* Collapse ``transfer-encoding: chunked`` request data and framing into
+ the same ``socket.send()`` call (Pull #1906)
+
+* Send ``http/1.1`` ALPN identifier with every TLS handshake by default (Pull #1894)
+
+* Properly terminate SecureTransport connections when CA verification fails (Pull #1977)
+
+* Don't emit an ``SNIMissingWarning`` when passing ``server_hostname=None``
+ to SecureTransport (Pull #1903)
+
+* Disabled requesting TLSv1.2 session tickets as they weren't being used by urllib3 (Pull #1970)
+
+* Suppress ``BrokenPipeError`` when writing request body after the server
+ has closed the socket (Pull #1524)
+
+* Wrap ``ssl.SSLError`` that can be raised from reading a socket (e.g. "bad MAC")
+ into an ``urllib3.exceptions.SSLError`` (Pull #1939)
+
+
+1.25.11 (2020-10-19)
+--------------------
+
+* Fix retry backoff time parsed from ``Retry-After`` header when given
+ in the HTTP date format. The HTTP date was parsed as the local timezone
+ rather than accounting for the timezone in the HTTP date (typically
+ UTC) (Pull #1932, Pull #1935, Pull #1938, Pull #1949)
+
+* Fix issue where an error would be raised when the ``SSLKEYLOGFILE``
+ environment variable was set to the empty string. Now ``SSLContext.keylog_file``
+ is not set in this situation (Pull #2016)
+
+
+1.25.10 (2020-07-22)
+--------------------
+
+* Added support for ``SSLKEYLOGFILE`` environment variable for
+ logging TLS session keys with use with programs like
+ Wireshark for decrypting captured web traffic (Pull #1867)
+
+* Fixed loading of SecureTransport libraries on macOS Big Sur
+ due to the new dynamic linker cache (Pull #1905)
+
+* Collapse chunked request bodies data and framing into one
+ call to ``send()`` to reduce the number of TCP packets by 2-4x (Pull #1906)
+
+* Don't insert ``None`` into ``ConnectionPool`` if the pool
+ was empty when requesting a connection (Pull #1866)
+
+* Avoid ``hasattr`` call in ``BrotliDecoder.decompress()`` (Pull #1858)
+
+
+1.25.9 (2020-04-16)
+-------------------
+
+* Added ``InvalidProxyConfigurationWarning`` which is raised when
+ erroneously specifying an HTTPS proxy URL. urllib3 doesn't currently
+ support connecting to HTTPS proxies but will soon be able to
+ and we would like users to migrate properly without much breakage.
+
+ See `this GitHub issue <https://github.com/urllib3/urllib3/issues/1850>`_
+ for more information on how to fix your proxy config. (Pull #1851)
+
+* Drain connection after ``PoolManager`` redirect (Pull #1817)
+
+* Ensure ``load_verify_locations`` raises ``SSLError`` for all backends (Pull #1812)
+
+* Rename ``VerifiedHTTPSConnection`` to ``HTTPSConnection`` (Pull #1805)
+
+* Allow the CA certificate data to be passed as a string (Pull #1804)
+
+* Raise ``ValueError`` if method contains control characters (Pull #1800)
+
+* Add ``__repr__`` to ``Timeout`` (Pull #1795)
+
+
+1.25.8 (2020-01-20)
+-------------------
+
+* Drop support for EOL Python 3.4 (Pull #1774)
+
+* Optimize _encode_invalid_chars (Pull #1787)
+
+
+1.25.7 (2019-11-11)
+-------------------
+
+* Preserve ``chunked`` parameter on retries (Pull #1715, Pull #1734)
+
+* Allow unset ``SERVER_SOFTWARE`` in App Engine (Pull #1704, Issue #1470)
+
+* Fix issue where URL fragment was sent within the request target. (Pull #1732)
+
+* Fix issue where an empty query section in a URL would fail to parse. (Pull #1732)
+
+* Remove TLS 1.3 support in SecureTransport due to Apple removing support (Pull #1703)
+
+
+1.25.6 (2019-09-24)
+-------------------
+
+* Fix issue where tilde (``~``) characters were incorrectly
+ percent-encoded in the path. (Pull #1692)
+
+
+1.25.5 (2019-09-19)
+-------------------
+
+* Add mitigation for BPO-37428 affecting Python <3.7.4 and OpenSSL 1.1.1+ which
+ caused certificate verification to be enabled when using ``cert_reqs=CERT_NONE``.
+ (Issue #1682)
+
+
+1.25.4 (2019-09-19)
+-------------------
+
+* Propagate Retry-After header settings to subsequent retries. (Pull #1607)
+
+* Fix edge case where Retry-After header was still respected even when
+ explicitly opted out of. (Pull #1607)
+
+* Remove dependency on ``rfc3986`` for URL parsing.
+
+* Fix issue where URLs containing invalid characters within ``Url.auth`` would
+ raise an exception instead of percent-encoding those characters.
+
+* Add support for ``HTTPResponse.auto_close = False`` which makes HTTP responses
+ work well with BufferedReaders and other ``io`` module features. (Pull #1652)
+
+* Percent-encode invalid characters in URL for ``HTTPConnectionPool.request()`` (Pull #1673)
+
+
+1.25.3 (2019-05-23)
+-------------------
+
+* Change ``HTTPSConnection`` to load system CA certificates
+ when ``ca_certs``, ``ca_cert_dir``, and ``ssl_context`` are
+ unspecified. (Pull #1608, Issue #1603)
+
+* Upgrade bundled rfc3986 to v1.3.2. (Pull #1609, Issue #1605)
+
+
+1.25.2 (2019-04-28)
+-------------------
+
+* Change ``is_ipaddress`` to not detect IPvFuture addresses. (Pull #1583)
+
+* Change ``parse_url`` to percent-encode invalid characters within the
+ path, query, and target components. (Pull #1586)
+
+
+1.25.1 (2019-04-24)
+-------------------
+
+* Add support for Google's ``Brotli`` package. (Pull #1572, Pull #1579)
+
+* Upgrade bundled rfc3986 to v1.3.1 (Pull #1578)
+
+
+1.25 (2019-04-22)
+-----------------
+
+* Require and validate certificates by default when using HTTPS (Pull #1507)
+
+* Upgraded ``urllib3.utils.parse_url()`` to be RFC 3986 compliant. (Pull #1487)
+
+* Added support for ``key_password`` for ``HTTPSConnectionPool`` to use
+ encrypted ``key_file`` without creating your own ``SSLContext`` object. (Pull #1489)
+
+* Add TLSv1.3 support to CPython, pyOpenSSL, and SecureTransport ``SSLContext``
+ implementations. (Pull #1496)
+
+* Switched the default multipart header encoder from RFC 2231 to HTML 5 working draft. (Issue #303, Pull #1492)
+
+* Fixed issue where OpenSSL would block if an encrypted client private key was
+ given and no password was given. Instead an ``SSLError`` is raised. (Pull #1489)
+
+* Added support for Brotli content encoding. It is enabled automatically if
+ ``brotlipy`` package is installed which can be requested with
+ ``urllib3[brotli]`` extra. (Pull #1532)
+
+* Drop ciphers using DSS key exchange from default TLS cipher suites.
+ Improve default ciphers when using SecureTransport. (Pull #1496)
+
+* Implemented a more efficient ``HTTPResponse.__iter__()`` method. (Issue #1483)
+
+1.24.3 (2019-05-01)
+-------------------
+
+* Apply fix for CVE-2019-9740. (Pull #1591)
+
+1.24.2 (2019-04-17)
+-------------------
+
+* Don't load system certificates by default when any other ``ca_certs``, ``ca_certs_dir`` or
+ ``ssl_context`` parameters are specified.
+
+* Remove Authorization header regardless of case when redirecting to cross-site. (Issue #1510)
+
+* Add support for IPv6 addresses in subjectAltName section of certificates. (Issue #1269)
+
+
+1.24.1 (2018-11-02)
+-------------------
+
+* Remove quadratic behavior within ``GzipDecoder.decompress()`` (Issue #1467)
+
+* Restored functionality of ``ciphers`` parameter for ``create_urllib3_context()``. (Issue #1462)
+
+
+1.24 (2018-10-16)
+-----------------
+
+* Allow key_server_hostname to be specified when initializing a PoolManager to allow custom SNI to be overridden. (Pull #1449)
+
+* Test against Python 3.7 on AppVeyor. (Pull #1453)
+
+* Early-out ipv6 checks when running on App Engine. (Pull #1450)
+
+* Change ambiguous description of backoff_factor (Pull #1436)
+
+* Add ability to handle multiple Content-Encodings (Issue #1441 and Pull #1442)
+
+* Skip DNS names that can't be idna-decoded when using pyOpenSSL (Issue #1405).
+
+* Add a server_hostname parameter to HTTPSConnection which allows for
+ overriding the SNI hostname sent in the handshake. (Pull #1397)
+
+* Drop support for EOL Python 2.6 (Pull #1429 and Pull #1430)
+
+* Fixed bug where responses with header Content-Type: message/* erroneously
+ raised HeaderParsingError, resulting in a warning being logged. (Pull #1439)
+
+* Move urllib3 to src/urllib3 (Pull #1409)
+
+
+1.23 (2018-06-04)
+-----------------
+
+* Allow providing a list of headers to strip from requests when redirecting
+ to a different host. Defaults to the ``Authorization`` header. Different
+ headers can be set via ``Retry.remove_headers_on_redirect``. (Issue #1316)
+
+* Fix ``util.selectors._fileobj_to_fd`` to accept ``long`` (Issue #1247).
+
+* Dropped Python 3.3 support. (Pull #1242)
+
+* Put the connection back in the pool when calling stream() or read_chunked() on
+ a chunked HEAD response. (Issue #1234)
+
+* Fixed pyOpenSSL-specific ssl client authentication issue when clients
+ attempted to auth via certificate + chain (Issue #1060)
+
+* Add the port to the connectionpool connect print (Pull #1251)
+
+* Don't use the ``uuid`` module to create multipart data boundaries. (Pull #1380)
+
+* ``read_chunked()`` on a closed response returns no chunks. (Issue #1088)
+
+* Add Python 2.6 support to ``contrib.securetransport`` (Pull #1359)
+
+* Added support for auth info in url for SOCKS proxy (Pull #1363)
+
+
1.22 (2017-07-20)
-----------------
@@ -600,7 +600,7 @@ Changes
* Fix some bugs that occur when modules incautiously patch the queue module.
(Pull #1061)
-* Prevent retries from occurring on read timeouts for which the request method
+* Prevent retries from occurring on read timeouts for which the request method
was not in the method whitelist. (Issue #1059)
* Changed the PyOpenSSL contrib module to lazily load idna to avoid
@@ -634,7 +634,7 @@ Changes
non-httplib underlying FPs. (Pull #990)
* Empty filenames in multipart headers are now emitted as such, rather than
- being suppressed. (Issue #1015)
+ being suppressed. (Issue #1015)
* Prefer user-supplied Host headers on chunked uploads. (Issue #1009)
@@ -651,13 +651,13 @@ Changes
interprets the presence of any flag as requesting certificate validation.
There is no PR for this patch, as it was prepared for simultaneous disclosure
- and release. The master branch received the same fix in Pull #1010.
+ and release. The master branch received the same fix in Pull #1010.
1.18 (2016-09-26)
-----------------
-* Fixed incorrect message for IncompleteRead exception. (Pull #973)
+* Fixed incorrect message for IncompleteRead exception. (Pull #973)
* Accept ``iPAddress`` subject alternative name fields in TLS certificates.
(Issue #258)
@@ -686,32 +686,32 @@ Changes
contains retries history. (Issue #848)
* Timeout can no longer be set as boolean, and must be greater than zero.
- (Pull #924)
+ (Pull #924)
* Removed pyasn1 and ndg-httpsclient from dependencies used for PyOpenSSL. We
now use cryptography and idna, both of which are already dependencies of
- PyOpenSSL. (Pull #930)
+ PyOpenSSL. (Pull #930)
* Fixed infinite loop in ``stream`` when amt=None. (Issue #928)
* Try to use the operating system's certificates when we are using an
- ``SSLContext``. (Pull #941)
+ ``SSLContext``. (Pull #941)
* Updated cipher suite list to allow ChaCha20+Poly1305. AES-GCM is preferred to
- ChaCha20, but ChaCha20 is then preferred to everything else. (Pull #947)
+ ChaCha20, but ChaCha20 is then preferred to everything else. (Pull #947)
-* Updated cipher suite list to remove 3DES-based cipher suites. (Pull #958)
+* Updated cipher suite list to remove 3DES-based cipher suites. (Pull #958)
-* Removed the cipher suite fallback to allow HIGH ciphers. (Pull #958)
+* Removed the cipher suite fallback to allow HIGH ciphers. (Pull #958)
* Implemented ``length_remaining`` to determine remaining content
- to be read. (Pull #949)
+ to be read. (Pull #949)
* Implemented ``enforce_content_length`` to enable exceptions when
- incomplete data chunks are received. (Pull #949)
+ incomplete data chunks are received. (Pull #949)
* Dropped connection start, dropped connection reset, redirect, forced retry,
- and new HTTPS connection log levels to DEBUG, from INFO. (Pull #967)
+ and new HTTPS connection log levels to DEBUG, from INFO. (Pull #967)
1.16 (2016-06-11)
@@ -995,7 +995,7 @@ Changes
* All errors during a retry-enabled request should be wrapped in
``urllib3.exceptions.MaxRetryError``, including timeout-related exceptions
which were previously exempt. Underlying error is accessible from the
- ``.reason`` property. (Issue #326)
+ ``.reason`` property. (Issue #326)
* ``urllib3.exceptions.ConnectionError`` renamed to
``urllib3.exceptions.ProtocolError``. (Issue #326)
@@ -1363,7 +1363,7 @@ Changes
* Refactored code to be even more decoupled, reusable, and extendable.
* License header added to ``.py`` files.
* Embiggened the documentation: Lots of Sphinx-friendly docstrings in the code
- and docs in ``docs/`` and on https://urllib3.readthedocs.io/.
+ and docs in ``docs/`` and on https://urllib3.readthedocs.io/.
* Embettered all the things!
* Started writing this file.
diff --git a/contrib/python/urllib3/LICENSE.txt b/contrib/python/urllib3/LICENSE.txt
index 429a1767e4..66b9e71620 100644
--- a/contrib/python/urllib3/LICENSE.txt
+++ b/contrib/python/urllib3/LICENSE.txt
@@ -1,21 +1,21 @@
-MIT License
-
-Copyright (c) 2008-2020 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
+MIT License
+
+Copyright (c) 2008-2020 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/contrib/python/urllib3/README.rst b/contrib/python/urllib3/README.rst
index 75b33750b6..c21707ed93 100644
--- a/contrib/python/urllib3/README.rst
+++ b/contrib/python/urllib3/README.rst
@@ -1,118 +1,118 @@
-.. raw:: html
-
- <p align="center">
- <a href="https://github.com/urllib3/urllib3">
- <img src="./docs/images/banner.svg" width="60%" alt="urllib3" />
- </a>
- </p>
- <p align="center">
- <a href="https://pypi.org/project/urllib3"><img alt="PyPI Version" src="https://img.shields.io/pypi/v/urllib3.svg?maxAge=86400" /></a>
- <a href="https://pypi.org/project/urllib3"><img alt="Python Versions" src="https://img.shields.io/pypi/pyversions/urllib3.svg?maxAge=86400" /></a>
- <a href="https://discord.gg/CHEgCZN"><img alt="Join our Discord" src="https://img.shields.io/discord/756342717725933608?color=%237289da&label=discord" /></a>
- <a href="https://codecov.io/gh/urllib3/urllib3"><img alt="Coverage Status" src="https://img.shields.io/codecov/c/github/urllib3/urllib3.svg" /></a>
- <a href="https://github.com/urllib3/urllib3/actions?query=workflow%3ACI"><img alt="Build Status on GitHub" src="https://github.com/urllib3/urllib3/workflows/CI/badge.svg" /></a>
- <a href="https://travis-ci.org/urllib3/urllib3"><img alt="Build Status on Travis" src="https://travis-ci.org/urllib3/urllib3.svg?branch=master" /></a>
- <a href="https://urllib3.readthedocs.io"><img alt="Documentation Status" src="https://readthedocs.org/projects/urllib3/badge/?version=latest" /></a>
- </p>
-
-urllib3 is a powerful, *user-friendly* HTTP client for Python. Much of the
-Python ecosystem already uses urllib3 and you should too.
-urllib3 brings many critical features that are missing from the Python
-standard libraries:
-
-- Thread safety.
-- Connection pooling.
-- Client-side SSL/TLS verification.
-- File uploads with multipart encoding.
-- Helpers for retrying requests and dealing with HTTP redirects.
-- Support for gzip, deflate, and brotli encoding.
-- Proxy support for HTTP and SOCKS.
-- 100% test coverage.
-
-urllib3 is powerful and easy to use:
-
-.. code-block:: python
-
- >>> import urllib3
- >>> http = urllib3.PoolManager()
- >>> r = http.request('GET', 'http://httpbin.org/robots.txt')
- >>> r.status
- 200
- >>> r.data
- 'User-agent: *\nDisallow: /deny\n'
-
-
-Installing
-----------
-
-urllib3 can be installed with `pip <https://pip.pypa.io>`_::
-
- $ python -m pip install urllib3
-
-Alternatively, you can grab the latest source code from `GitHub <https://github.com/urllib3/urllib3>`_::
-
- $ git clone git://github.com/urllib3/urllib3.git
- $ python setup.py install
-
-
-Documentation
--------------
-
-urllib3 has usage and reference documentation at `urllib3.readthedocs.io <https://urllib3.readthedocs.io>`_.
-
-
-Contributing
-------------
-
-urllib3 happily accepts contributions. Please see our
-`contributing documentation <https://urllib3.readthedocs.io/en/latest/contributing.html>`_
-for some tips on getting started.
-
-
-Security Disclosures
---------------------
-
-To report a security vulnerability, please use the
-`Tidelift security contact <https://tidelift.com/security>`_.
-Tidelift will coordinate the fix and disclosure with maintainers.
-
-
-Maintainers
------------
-
-- `@sethmlarson <https://github.com/sethmlarson>`__ (Seth M. Larson)
-- `@pquentin <https://github.com/pquentin>`__ (Quentin Pradet)
-- `@theacodes <https://github.com/theacodes>`__ (Thea Flowers)
-- `@haikuginger <https://github.com/haikuginger>`__ (Jess Shapiro)
-- `@lukasa <https://github.com/lukasa>`__ (Cory Benfield)
-- `@sigmavirus24 <https://github.com/sigmavirus24>`__ (Ian Stapleton Cordasco)
-- `@shazow <https://github.com/shazow>`__ (Andrey Petrov)
-
-👋
-
-
-Sponsorship
------------
-
-If your company benefits from this library, please consider `sponsoring its
-development <https://urllib3.readthedocs.io/en/latest/sponsors.html>`_.
-
-
-For Enterprise
---------------
-
-.. |tideliftlogo| image:: https://nedbatchelder.com/pix/Tidelift_Logos_RGB_Tidelift_Shorthand_On-White_small.png
- :width: 75
- :alt: Tidelift
-
-.. list-table::
- :widths: 10 100
-
- * - |tideliftlogo|
- - Professional support for urllib3 is available as part of the `Tidelift
- Subscription`_. Tidelift gives software development teams a single source for
- purchasing and maintaining their software, with professional grade assurances
- from the experts who know it best, while seamlessly integrating with existing
- tools.
-
-.. _Tidelift Subscription: https://tidelift.com/subscription/pkg/pypi-urllib3?utm_source=pypi-urllib3&utm_medium=referral&utm_campaign=readme
+.. raw:: html
+
+ <p align="center">
+ <a href="https://github.com/urllib3/urllib3">
+ <img src="./docs/images/banner.svg" width="60%" alt="urllib3" />
+ </a>
+ </p>
+ <p align="center">
+ <a href="https://pypi.org/project/urllib3"><img alt="PyPI Version" src="https://img.shields.io/pypi/v/urllib3.svg?maxAge=86400" /></a>
+ <a href="https://pypi.org/project/urllib3"><img alt="Python Versions" src="https://img.shields.io/pypi/pyversions/urllib3.svg?maxAge=86400" /></a>
+ <a href="https://discord.gg/CHEgCZN"><img alt="Join our Discord" src="https://img.shields.io/discord/756342717725933608?color=%237289da&label=discord" /></a>
+ <a href="https://codecov.io/gh/urllib3/urllib3"><img alt="Coverage Status" src="https://img.shields.io/codecov/c/github/urllib3/urllib3.svg" /></a>
+ <a href="https://github.com/urllib3/urllib3/actions?query=workflow%3ACI"><img alt="Build Status on GitHub" src="https://github.com/urllib3/urllib3/workflows/CI/badge.svg" /></a>
+ <a href="https://travis-ci.org/urllib3/urllib3"><img alt="Build Status on Travis" src="https://travis-ci.org/urllib3/urllib3.svg?branch=master" /></a>
+ <a href="https://urllib3.readthedocs.io"><img alt="Documentation Status" src="https://readthedocs.org/projects/urllib3/badge/?version=latest" /></a>
+ </p>
+
+urllib3 is a powerful, *user-friendly* HTTP client for Python. Much of the
+Python ecosystem already uses urllib3 and you should too.
+urllib3 brings many critical features that are missing from the Python
+standard libraries:
+
+- Thread safety.
+- Connection pooling.
+- Client-side SSL/TLS verification.
+- File uploads with multipart encoding.
+- Helpers for retrying requests and dealing with HTTP redirects.
+- Support for gzip, deflate, and brotli encoding.
+- Proxy support for HTTP and SOCKS.
+- 100% test coverage.
+
+urllib3 is powerful and easy to use:
+
+.. code-block:: python
+
+ >>> import urllib3
+ >>> http = urllib3.PoolManager()
+ >>> r = http.request('GET', 'http://httpbin.org/robots.txt')
+ >>> r.status
+ 200
+ >>> r.data
+ 'User-agent: *\nDisallow: /deny\n'
+
+
+Installing
+----------
+
+urllib3 can be installed with `pip <https://pip.pypa.io>`_::
+
+ $ python -m pip install urllib3
+
+Alternatively, you can grab the latest source code from `GitHub <https://github.com/urllib3/urllib3>`_::
+
+ $ git clone git://github.com/urllib3/urllib3.git
+ $ python setup.py install
+
+
+Documentation
+-------------
+
+urllib3 has usage and reference documentation at `urllib3.readthedocs.io <https://urllib3.readthedocs.io>`_.
+
+
+Contributing
+------------
+
+urllib3 happily accepts contributions. Please see our
+`contributing documentation <https://urllib3.readthedocs.io/en/latest/contributing.html>`_
+for some tips on getting started.
+
+
+Security Disclosures
+--------------------
+
+To report a security vulnerability, please use the
+`Tidelift security contact <https://tidelift.com/security>`_.
+Tidelift will coordinate the fix and disclosure with maintainers.
+
+
+Maintainers
+-----------
+
+- `@sethmlarson <https://github.com/sethmlarson>`__ (Seth M. Larson)
+- `@pquentin <https://github.com/pquentin>`__ (Quentin Pradet)
+- `@theacodes <https://github.com/theacodes>`__ (Thea Flowers)
+- `@haikuginger <https://github.com/haikuginger>`__ (Jess Shapiro)
+- `@lukasa <https://github.com/lukasa>`__ (Cory Benfield)
+- `@sigmavirus24 <https://github.com/sigmavirus24>`__ (Ian Stapleton Cordasco)
+- `@shazow <https://github.com/shazow>`__ (Andrey Petrov)
+
+👋
+
+
+Sponsorship
+-----------
+
+If your company benefits from this library, please consider `sponsoring its
+development <https://urllib3.readthedocs.io/en/latest/sponsors.html>`_.
+
+
+For Enterprise
+--------------
+
+.. |tideliftlogo| image:: https://nedbatchelder.com/pix/Tidelift_Logos_RGB_Tidelift_Shorthand_On-White_small.png
+ :width: 75
+ :alt: Tidelift
+
+.. list-table::
+ :widths: 10 100
+
+ * - |tideliftlogo|
+ - Professional support for urllib3 is available as part of the `Tidelift
+ Subscription`_. Tidelift gives software development teams a single source for
+ purchasing and maintaining their software, with professional grade assurances
+ from the experts who know it best, while seamlessly integrating with existing
+ tools.
+
+.. _Tidelift Subscription: https://tidelift.com/subscription/pkg/pypi-urllib3?utm_source=pypi-urllib3&utm_medium=referral&utm_campaign=readme
diff --git a/contrib/python/urllib3/patches/01-arcadia.patch b/contrib/python/urllib3/patches/01-arcadia.patch
index 22d4aaf235..adb12a8389 100644
--- a/contrib/python/urllib3/patches/01-arcadia.patch
+++ b/contrib/python/urllib3/patches/01-arcadia.patch
@@ -1,24 +1,24 @@
---- contrib/python/urllib3/urllib3/connection.py (index)
-+++ contrib/python/urllib3/urllib3/connection.py (working tree)
-@@ -349,7 +349,8 @@ class HTTPSConnection(HTTPConnection):
- self.key_password = key_password
- self.assert_hostname = assert_hostname
- self.assert_fingerprint = assert_fingerprint
-- self.ca_certs = ca_certs and os.path.expanduser(ca_certs)
-+ self.ca_certs = os.path.expanduser(ca_certs) \
-+ if isinstance(ca_certs, six.string_types) else ca_certs
- self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir)
- self.ca_cert_data = ca_cert_data
-
---- contrib/python/urllib3/urllib3/response.py (index)
-+++ contrib/python/urllib3/urllib3/response.py (working tree)
-@@ -9,7 +9,8 @@ from socket import timeout as SocketTimeout
-
- try:
- import brotli
--except ImportError:
-+ brotli.error
-+except (ImportError, Exception):
- brotli = None
-
- from ._collections import HTTPHeaderDict
+--- contrib/python/urllib3/urllib3/connection.py (index)
++++ contrib/python/urllib3/urllib3/connection.py (working tree)
+@@ -349,7 +349,8 @@ class HTTPSConnection(HTTPConnection):
+ self.key_password = key_password
+ self.assert_hostname = assert_hostname
+ self.assert_fingerprint = assert_fingerprint
+- self.ca_certs = ca_certs and os.path.expanduser(ca_certs)
++ self.ca_certs = os.path.expanduser(ca_certs) \
++ if isinstance(ca_certs, six.string_types) else ca_certs
+ self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir)
+ self.ca_cert_data = ca_cert_data
+
+--- contrib/python/urllib3/urllib3/response.py (index)
++++ contrib/python/urllib3/urllib3/response.py (working tree)
+@@ -9,7 +9,8 @@ from socket import timeout as SocketTimeout
+
+ try:
+ import brotli
+-except ImportError:
++ brotli.error
++except (ImportError, Exception):
+ brotli = None
+
+ from ._collections import HTTPHeaderDict
diff --git a/contrib/python/urllib3/patches/02-fix-ya.make.patch b/contrib/python/urllib3/patches/02-fix-ya.make.patch
index 599c7ebf97..a190705a61 100644
--- a/contrib/python/urllib3/patches/02-fix-ya.make.patch
+++ b/contrib/python/urllib3/patches/02-fix-ya.make.patch
@@ -1,31 +1,31 @@
---- contrib/python/urllib3/ya.make (index)
-+++ contrib/python/urllib3/ya.make (working tree)
-@@ -24,20 +22,20 @@ PY_SRCS(
- urllib3/connectionpool.py
- urllib3/contrib/__init__.py
- urllib3/contrib/_appengine_environ.py
-- urllib3/contrib/_securetransport/__init__.py
-- urllib3/contrib/_securetransport/bindings.py
-- urllib3/contrib/_securetransport/low_level.py
-+ #urllib3/contrib/_securetransport/__init__.py
-+ #urllib3/contrib/_securetransport/bindings.py
-+ #urllib3/contrib/_securetransport/low_level.py
- urllib3/contrib/appengine.py
-- urllib3/contrib/ntlmpool.py
-- urllib3/contrib/pyopenssl.py
-- urllib3/contrib/securetransport.py
-+ #urllib3/contrib/ntlmpool.py
-+ #urllib3/contrib/pyopenssl.py
-+ #urllib3/contrib/securetransport.py
- urllib3/contrib/socks.py
- urllib3/exceptions.py
- urllib3/fields.py
- urllib3/filepost.py
- urllib3/packages/__init__.py
-- urllib3/packages/backports/__init__.py
-- urllib3/packages/backports/makefile.py
-+ #urllib3/packages/backports/__init__.py
-+ #urllib3/packages/backports/makefile.py
- urllib3/packages/six.py
- urllib3/poolmanager.py
- urllib3/request.py
+--- contrib/python/urllib3/ya.make (index)
++++ contrib/python/urllib3/ya.make (working tree)
+@@ -24,20 +22,20 @@ PY_SRCS(
+ urllib3/connectionpool.py
+ urllib3/contrib/__init__.py
+ urllib3/contrib/_appengine_environ.py
+- urllib3/contrib/_securetransport/__init__.py
+- urllib3/contrib/_securetransport/bindings.py
+- urllib3/contrib/_securetransport/low_level.py
++ #urllib3/contrib/_securetransport/__init__.py
++ #urllib3/contrib/_securetransport/bindings.py
++ #urllib3/contrib/_securetransport/low_level.py
+ urllib3/contrib/appengine.py
+- urllib3/contrib/ntlmpool.py
+- urllib3/contrib/pyopenssl.py
+- urllib3/contrib/securetransport.py
++ #urllib3/contrib/ntlmpool.py
++ #urllib3/contrib/pyopenssl.py
++ #urllib3/contrib/securetransport.py
+ urllib3/contrib/socks.py
+ urllib3/exceptions.py
+ urllib3/fields.py
+ urllib3/filepost.py
+ urllib3/packages/__init__.py
+- urllib3/packages/backports/__init__.py
+- urllib3/packages/backports/makefile.py
++ #urllib3/packages/backports/__init__.py
++ #urllib3/packages/backports/makefile.py
+ urllib3/packages/six.py
+ urllib3/poolmanager.py
+ urllib3/request.py
diff --git a/contrib/python/urllib3/urllib3/__init__.py b/contrib/python/urllib3/urllib3/__init__.py
index fe86b59d78..237c789a7d 100644
--- a/contrib/python/urllib3/urllib3/__init__.py
+++ b/contrib/python/urllib3/urllib3/__init__.py
@@ -1,43 +1,43 @@
"""
-Python HTTP library with thread-safe connection pooling, file post support, user friendly, and more
+Python HTTP library with thread-safe connection pooling, file post support, user friendly, and more
"""
from __future__ import absolute_import
-
-# Set default logging handler to avoid "No handler found" warnings.
-import logging
+
+# Set default logging handler to avoid "No handler found" warnings.
+import logging
import warnings
-from logging import NullHandler
+from logging import NullHandler
-from . import exceptions
-from ._version import __version__
-from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, connection_from_url
+from . import exceptions
+from ._version import __version__
+from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, connection_from_url
from .filepost import encode_multipart_formdata
from .poolmanager import PoolManager, ProxyManager, proxy_from_url
from .response import HTTPResponse
from .util.request import make_headers
-from .util.retry import Retry
-from .util.timeout import Timeout
-from .util.url import get_host
+from .util.retry import Retry
+from .util.timeout import Timeout
+from .util.url import get_host
-__author__ = "Andrey Petrov (andrey.petrov@shazow.net)"
-__license__ = "MIT"
-__version__ = __version__
+__author__ = "Andrey Petrov (andrey.petrov@shazow.net)"
+__license__ = "MIT"
+__version__ = __version__
__all__ = (
- "HTTPConnectionPool",
- "HTTPSConnectionPool",
- "PoolManager",
- "ProxyManager",
- "HTTPResponse",
- "Retry",
- "Timeout",
- "add_stderr_logger",
- "connection_from_url",
- "disable_warnings",
- "encode_multipart_formdata",
- "get_host",
- "make_headers",
- "proxy_from_url",
+ "HTTPConnectionPool",
+ "HTTPSConnectionPool",
+ "PoolManager",
+ "ProxyManager",
+ "HTTPResponse",
+ "Retry",
+ "Timeout",
+ "add_stderr_logger",
+ "connection_from_url",
+ "disable_warnings",
+ "encode_multipart_formdata",
+ "get_host",
+ "make_headers",
+ "proxy_from_url",
)
logging.getLogger(__name__).addHandler(NullHandler())
@@ -54,10 +54,10 @@ def add_stderr_logger(level=logging.DEBUG):
# even if urllib3 is vendored within another package.
logger = logging.getLogger(__name__)
handler = logging.StreamHandler()
- handler.setFormatter(logging.Formatter("%(asctime)s %(levelname)s %(message)s"))
+ handler.setFormatter(logging.Formatter("%(asctime)s %(levelname)s %(message)s"))
logger.addHandler(handler)
logger.setLevel(level)
- logger.debug("Added a stderr logging handler to logger: %s", __name__)
+ logger.debug("Added a stderr logging handler to logger: %s", __name__)
return handler
@@ -69,17 +69,17 @@ del NullHandler
# shouldn't be: otherwise, it's very hard for users to use most Python
# mechanisms to silence them.
# SecurityWarning's always go off by default.
-warnings.simplefilter("always", exceptions.SecurityWarning, append=True)
+warnings.simplefilter("always", exceptions.SecurityWarning, append=True)
# SubjectAltNameWarning's should go off once per host
-warnings.simplefilter("default", exceptions.SubjectAltNameWarning, append=True)
+warnings.simplefilter("default", exceptions.SubjectAltNameWarning, append=True)
# InsecurePlatformWarning's don't vary between requests, so we keep it default.
-warnings.simplefilter("default", exceptions.InsecurePlatformWarning, append=True)
+warnings.simplefilter("default", exceptions.InsecurePlatformWarning, append=True)
# SNIMissingWarnings should go off only once.
-warnings.simplefilter("default", exceptions.SNIMissingWarning, append=True)
+warnings.simplefilter("default", exceptions.SNIMissingWarning, append=True)
def disable_warnings(category=exceptions.HTTPWarning):
"""
Helper for quickly disabling all urllib3 warnings.
"""
- warnings.simplefilter("ignore", category)
+ warnings.simplefilter("ignore", category)
diff --git a/contrib/python/urllib3/urllib3/_collections.py b/contrib/python/urllib3/urllib3/_collections.py
index da9857e986..9ce036c499 100644
--- a/contrib/python/urllib3/urllib3/_collections.py
+++ b/contrib/python/urllib3/urllib3/_collections.py
@@ -1,13 +1,13 @@
from __future__ import absolute_import
-
-try:
- from collections.abc import Mapping, MutableMapping
-except ImportError:
- from collections import Mapping, MutableMapping
+
try:
+ from collections.abc import Mapping, MutableMapping
+except ImportError:
+ from collections import Mapping, MutableMapping
+try:
from threading import RLock
except ImportError: # Platform-specific: No threads available
-
+
class RLock:
def __enter__(self):
pass
@@ -16,13 +16,13 @@ except ImportError: # Platform-specific: No threads available
pass
-from collections import OrderedDict
+from collections import OrderedDict
+
+from .exceptions import InvalidHeader
+from .packages import six
+from .packages.six import iterkeys, itervalues
-from .exceptions import InvalidHeader
-from .packages import six
-from .packages.six import iterkeys, itervalues
-
-__all__ = ["RecentlyUsedContainer", "HTTPHeaderDict"]
+__all__ = ["RecentlyUsedContainer", "HTTPHeaderDict"]
_Null = object()
@@ -85,9 +85,9 @@ class RecentlyUsedContainer(MutableMapping):
return len(self._container)
def __iter__(self):
- raise NotImplementedError(
- "Iteration over this class is unlikely to be threadsafe."
- )
+ raise NotImplementedError(
+ "Iteration over this class is unlikely to be threadsafe."
+ )
def clear(self):
with self.lock:
@@ -155,7 +155,7 @@ class HTTPHeaderDict(MutableMapping):
def __getitem__(self, key):
val = self._container[key.lower()]
- return ", ".join(val[1:])
+ return ", ".join(val[1:])
def __delitem__(self, key):
del self._container[key.lower()]
@@ -164,18 +164,18 @@ class HTTPHeaderDict(MutableMapping):
return key.lower() in self._container
def __eq__(self, other):
- if not isinstance(other, Mapping) and not hasattr(other, "keys"):
+ if not isinstance(other, Mapping) and not hasattr(other, "keys"):
return False
if not isinstance(other, type(self)):
other = type(self)(other)
- return dict((k.lower(), v) for k, v in self.itermerged()) == dict(
- (k.lower(), v) for k, v in other.itermerged()
- )
+ return dict((k.lower(), v) for k, v in self.itermerged()) == dict(
+ (k.lower(), v) for k, v in other.itermerged()
+ )
def __ne__(self, other):
return not self.__eq__(other)
- if six.PY2: # Python 2
+ if six.PY2: # Python 2
iterkeys = MutableMapping.iterkeys
itervalues = MutableMapping.itervalues
@@ -190,9 +190,9 @@ class HTTPHeaderDict(MutableMapping):
yield vals[0]
def pop(self, key, default=__marker):
- """D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
- If key is not found, d is returned if given, otherwise KeyError is raised.
- """
+ """D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
+ If key is not found, d is returned if given, otherwise KeyError is raised.
+ """
# Using the MutableMapping function directly fails due to the private marker.
# Using ordinary dict.pop would expose the internal structures.
# So let's reinvent the wheel.
@@ -234,10 +234,10 @@ class HTTPHeaderDict(MutableMapping):
with self.add instead of self.__setitem__
"""
if len(args) > 1:
- raise TypeError(
- "extend() takes at most 1 positional "
- "arguments ({0} given)".format(len(args))
- )
+ raise TypeError(
+ "extend() takes at most 1 positional "
+ "arguments ({0} given)".format(len(args))
+ )
other = args[0] if len(args) >= 1 else ()
if isinstance(other, HTTPHeaderDict):
@@ -303,7 +303,7 @@ class HTTPHeaderDict(MutableMapping):
"""Iterate over all headers, merging duplicate ones together."""
for key in self:
val = self._container[key.lower()]
- yield val[0], ", ".join(val[1:])
+ yield val[0], ", ".join(val[1:])
def items(self):
return list(self.iteritems())
@@ -314,24 +314,24 @@ class HTTPHeaderDict(MutableMapping):
# python2.7 does not expose a proper API for exporting multiheaders
# efficiently. This function re-reads raw lines from the message
# object and extracts the multiheaders properly.
- obs_fold_continued_leaders = (" ", "\t")
+ obs_fold_continued_leaders = (" ", "\t")
headers = []
for line in message.headers:
- if line.startswith(obs_fold_continued_leaders):
- if not headers:
- # We received a header line that starts with OWS as described
- # in RFC-7230 S3.2.4. This indicates a multiline header, but
- # there exists no previous header to which we can attach it.
- raise InvalidHeader(
- "Header continuation with no previous header: %s" % line
- )
- else:
- key, value = headers[-1]
- headers[-1] = (key, value + " " + line.strip())
- continue
-
- key, value = line.split(":", 1)
+ if line.startswith(obs_fold_continued_leaders):
+ if not headers:
+ # We received a header line that starts with OWS as described
+ # in RFC-7230 S3.2.4. This indicates a multiline header, but
+ # there exists no previous header to which we can attach it.
+ raise InvalidHeader(
+ "Header continuation with no previous header: %s" % line
+ )
+ else:
+ key, value = headers[-1]
+ headers[-1] = (key, value + " " + line.strip())
+ continue
+
+ key, value = line.split(":", 1)
headers.append((key, value.strip()))
return cls(headers)
diff --git a/contrib/python/urllib3/urllib3/_version.py b/contrib/python/urllib3/urllib3/_version.py
index fa8979d73e..bdc4904cb3 100644
--- a/contrib/python/urllib3/urllib3/_version.py
+++ b/contrib/python/urllib3/urllib3/_version.py
@@ -1,2 +1,2 @@
-# This file is protected via CODEOWNERS
-__version__ = "1.26.8"
+# This file is protected via CODEOWNERS
+__version__ = "1.26.8"
diff --git a/contrib/python/urllib3/urllib3/connection.py b/contrib/python/urllib3/urllib3/connection.py
index 625f966749..a7901be327 100644
--- a/contrib/python/urllib3/urllib3/connection.py
+++ b/contrib/python/urllib3/urllib3/connection.py
@@ -1,22 +1,22 @@
from __future__ import absolute_import
-
+
import datetime
import logging
import os
-import re
+import re
import socket
import warnings
-from socket import error as SocketError
-from socket import timeout as SocketTimeout
-
+from socket import error as SocketError
+from socket import timeout as SocketTimeout
+
from .packages import six
from .packages.six.moves.http_client import HTTPConnection as _HTTPConnection
from .packages.six.moves.http_client import HTTPException # noqa: F401
-from .util.proxy import create_proxy_ssl_context
+from .util.proxy import create_proxy_ssl_context
try: # Compiled with SSL?
import ssl
-
+
BaseSSLError = ssl.SSLError
except (ImportError, AttributeError): # Platform-specific: No SSL.
ssl = None
@@ -25,81 +25,81 @@ except (ImportError, AttributeError): # Platform-specific: No SSL.
pass
-try:
- # Python 3: not a no-op, we're adding this to the namespace so it can be imported.
+try:
+ # Python 3: not a no-op, we're adding this to the namespace so it can be imported.
ConnectionError = ConnectionError
-except NameError:
- # Python 2
+except NameError:
+ # Python 2
class ConnectionError(Exception):
pass
-try: # Python 3:
- # Not a no-op, we're adding this to the namespace so it can be imported.
- BrokenPipeError = BrokenPipeError
-except NameError: # Python 2:
-
- class BrokenPipeError(Exception):
- pass
-
-
-from ._collections import HTTPHeaderDict # noqa (historical, removed in v2)
-from ._version import __version__
+try: # Python 3:
+ # Not a no-op, we're adding this to the namespace so it can be imported.
+ BrokenPipeError = BrokenPipeError
+except NameError: # Python 2:
+
+ class BrokenPipeError(Exception):
+ pass
+
+
+from ._collections import HTTPHeaderDict # noqa (historical, removed in v2)
+from ._version import __version__
from .exceptions import (
- ConnectTimeoutError,
- NewConnectionError,
+ ConnectTimeoutError,
+ NewConnectionError,
SubjectAltNameWarning,
SystemTimeWarning,
)
-from .util import SKIP_HEADER, SKIPPABLE_HEADERS, connection
+from .util import SKIP_HEADER, SKIPPABLE_HEADERS, connection
from .util.ssl_ import (
- assert_fingerprint,
- create_urllib3_context,
- is_ipaddress,
- resolve_cert_reqs,
- resolve_ssl_version,
- ssl_wrap_socket,
+ assert_fingerprint,
+ create_urllib3_context,
+ is_ipaddress,
+ resolve_cert_reqs,
+ resolve_ssl_version,
+ ssl_wrap_socket,
)
-from .util.ssl_match_hostname import CertificateError, match_hostname
+from .util.ssl_match_hostname import CertificateError, match_hostname
log = logging.getLogger(__name__)
-port_by_scheme = {"http": 80, "https": 443}
+port_by_scheme = {"http": 80, "https": 443}
-# When it comes time to update this value as a part of regular maintenance
-# (ie test_recent_date is failing) update it to ~6 months before the current date.
-RECENT_DATE = datetime.date(2020, 7, 1)
-
-_CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]")
+# When it comes time to update this value as a part of regular maintenance
+# (ie test_recent_date is failing) update it to ~6 months before the current date.
+RECENT_DATE = datetime.date(2020, 7, 1)
+_CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]")
+
class HTTPConnection(_HTTPConnection, object):
"""
- Based on :class:`http.client.HTTPConnection` but provides an extra constructor
+ Based on :class:`http.client.HTTPConnection` but provides an extra constructor
backwards-compatibility layer between older and newer Pythons.
Additional keyword parameters are used to configure attributes of the connection.
Accepted parameters include:
- - ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool`
- - ``source_address``: Set the source address for the current connection.
- - ``socket_options``: Set specific options on the underlying socket. If not specified, then
- defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling
- Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy.
-
- For example, if you wish to enable TCP Keep Alive in addition to the defaults,
- you might pass:
+ - ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool`
+ - ``source_address``: Set the source address for the current connection.
+ - ``socket_options``: Set specific options on the underlying socket. If not specified, then
+ defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling
+ Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy.
- .. code-block:: python
+ For example, if you wish to enable TCP Keep Alive in addition to the defaults,
+ you might pass:
- HTTPConnection.default_socket_options + [
- (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
- ]
+ .. code-block:: python
- Or you may want to disable the defaults by passing an empty list (e.g., ``[]``).
+ HTTPConnection.default_socket_options + [
+ (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
+ ]
+
+ Or you may want to disable the defaults by passing an empty list (e.g., ``[]``).
"""
- default_port = port_by_scheme["http"]
+ default_port = port_by_scheme["http"]
#: Disable Nagle's algorithm by default.
#: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]``
@@ -108,94 +108,94 @@ class HTTPConnection(_HTTPConnection, object):
#: Whether this connection verifies the host's certificate.
is_verified = False
- #: Whether this proxy connection (if used) verifies the proxy host's
- #: certificate.
- proxy_is_verified = None
-
+ #: Whether this proxy connection (if used) verifies the proxy host's
+ #: certificate.
+ proxy_is_verified = None
+
def __init__(self, *args, **kw):
- if not six.PY2:
- kw.pop("strict", None)
+ if not six.PY2:
+ kw.pop("strict", None)
- # Pre-set source_address.
- self.source_address = kw.get("source_address")
+ # Pre-set source_address.
+ self.source_address = kw.get("source_address")
#: The socket options provided by the user. If no options are
#: provided, we use the default options.
- self.socket_options = kw.pop("socket_options", self.default_socket_options)
-
- # Proxy options provided by the user.
- self.proxy = kw.pop("proxy", None)
- self.proxy_config = kw.pop("proxy_config", None)
+ self.socket_options = kw.pop("socket_options", self.default_socket_options)
+ # Proxy options provided by the user.
+ self.proxy = kw.pop("proxy", None)
+ self.proxy_config = kw.pop("proxy_config", None)
+
_HTTPConnection.__init__(self, *args, **kw)
- @property
- def host(self):
- """
- Getter method to remove any trailing dots that indicate the hostname is an FQDN.
-
- In general, SSL certificates don't include the trailing dot indicating a
- fully-qualified domain name, and thus, they don't validate properly when
- checked against a domain name that includes the dot. In addition, some
- servers may not expect to receive the trailing dot when provided.
-
- However, the hostname with trailing dot is critical to DNS resolution; doing a
- lookup with the trailing dot will properly only resolve the appropriate FQDN,
- whereas a lookup without a trailing dot will search the system's search domain
- list. Thus, it's important to keep the original host around for use only in
- those cases where it's appropriate (i.e., when doing DNS lookup to establish the
- actual TCP connection across which we're going to send HTTP requests).
- """
- return self._dns_host.rstrip(".")
-
- @host.setter
- def host(self, value):
- """
- Setter for the `host` property.
-
- We assume that only urllib3 uses the _dns_host attribute; httplib itself
- only uses `host`, and it seems reasonable that other libraries follow suit.
- """
- self._dns_host = value
-
+ @property
+ def host(self):
+ """
+ Getter method to remove any trailing dots that indicate the hostname is an FQDN.
+
+ In general, SSL certificates don't include the trailing dot indicating a
+ fully-qualified domain name, and thus, they don't validate properly when
+ checked against a domain name that includes the dot. In addition, some
+ servers may not expect to receive the trailing dot when provided.
+
+ However, the hostname with trailing dot is critical to DNS resolution; doing a
+ lookup with the trailing dot will properly only resolve the appropriate FQDN,
+ whereas a lookup without a trailing dot will search the system's search domain
+ list. Thus, it's important to keep the original host around for use only in
+ those cases where it's appropriate (i.e., when doing DNS lookup to establish the
+ actual TCP connection across which we're going to send HTTP requests).
+ """
+ return self._dns_host.rstrip(".")
+
+ @host.setter
+ def host(self, value):
+ """
+ Setter for the `host` property.
+
+ We assume that only urllib3 uses the _dns_host attribute; httplib itself
+ only uses `host`, and it seems reasonable that other libraries follow suit.
+ """
+ self._dns_host = value
+
def _new_conn(self):
- """Establish a socket connection and set nodelay settings on it.
+ """Establish a socket connection and set nodelay settings on it.
:return: New socket connection.
"""
extra_kw = {}
if self.source_address:
- extra_kw["source_address"] = self.source_address
+ extra_kw["source_address"] = self.source_address
if self.socket_options:
- extra_kw["socket_options"] = self.socket_options
+ extra_kw["socket_options"] = self.socket_options
try:
conn = connection.create_connection(
- (self._dns_host, self.port), self.timeout, **extra_kw
- )
+ (self._dns_host, self.port), self.timeout, **extra_kw
+ )
- except SocketTimeout:
+ except SocketTimeout:
raise ConnectTimeoutError(
- self,
- "Connection to %s timed out. (connect timeout=%s)"
- % (self.host, self.timeout),
- )
+ self,
+ "Connection to %s timed out. (connect timeout=%s)"
+ % (self.host, self.timeout),
+ )
except SocketError as e:
raise NewConnectionError(
- self, "Failed to establish a new connection: %s" % e
- )
+ self, "Failed to establish a new connection: %s" % e
+ )
return conn
- def _is_using_tunnel(self):
- # Google App Engine's httplib does not define _tunnel_host
- return getattr(self, "_tunnel_host", None)
-
+ def _is_using_tunnel(self):
+ # Google App Engine's httplib does not define _tunnel_host
+ return getattr(self, "_tunnel_host", None)
+
def _prepare_conn(self, conn):
self.sock = conn
- if self._is_using_tunnel():
+ if self._is_using_tunnel():
# TODO: Fix tunnel so it doesn't depend on self.sock state.
self._tunnel()
# Mark this connection as not reusable
@@ -205,168 +205,168 @@ class HTTPConnection(_HTTPConnection, object):
conn = self._new_conn()
self._prepare_conn(conn)
- def putrequest(self, method, url, *args, **kwargs):
- """ """
- # Empty docstring because the indentation of CPython's implementation
- # is broken but we don't want this method in our documentation.
- match = _CONTAINS_CONTROL_CHAR_RE.search(method)
- if match:
- raise ValueError(
- "Method cannot contain non-token characters %r (found at least %r)"
- % (method, match.group())
- )
-
- return _HTTPConnection.putrequest(self, method, url, *args, **kwargs)
-
- def putheader(self, header, *values):
- """ """
- if not any(isinstance(v, str) and v == SKIP_HEADER for v in values):
- _HTTPConnection.putheader(self, header, *values)
- elif six.ensure_str(header.lower()) not in SKIPPABLE_HEADERS:
- raise ValueError(
- "urllib3.util.SKIP_HEADER only supports '%s'"
- % ("', '".join(map(str.title, sorted(SKIPPABLE_HEADERS))),)
- )
-
- def request(self, method, url, body=None, headers=None):
- if headers is None:
- headers = {}
- else:
- # Avoid modifying the headers passed into .request()
- headers = headers.copy()
- if "user-agent" not in (six.ensure_str(k.lower()) for k in headers):
- headers["User-Agent"] = _get_default_user_agent()
- super(HTTPConnection, self).request(method, url, body=body, headers=headers)
-
+ def putrequest(self, method, url, *args, **kwargs):
+ """ """
+ # Empty docstring because the indentation of CPython's implementation
+ # is broken but we don't want this method in our documentation.
+ match = _CONTAINS_CONTROL_CHAR_RE.search(method)
+ if match:
+ raise ValueError(
+ "Method cannot contain non-token characters %r (found at least %r)"
+ % (method, match.group())
+ )
+
+ return _HTTPConnection.putrequest(self, method, url, *args, **kwargs)
+
+ def putheader(self, header, *values):
+ """ """
+ if not any(isinstance(v, str) and v == SKIP_HEADER for v in values):
+ _HTTPConnection.putheader(self, header, *values)
+ elif six.ensure_str(header.lower()) not in SKIPPABLE_HEADERS:
+ raise ValueError(
+ "urllib3.util.SKIP_HEADER only supports '%s'"
+ % ("', '".join(map(str.title, sorted(SKIPPABLE_HEADERS))),)
+ )
+
+ def request(self, method, url, body=None, headers=None):
+ if headers is None:
+ headers = {}
+ else:
+ # Avoid modifying the headers passed into .request()
+ headers = headers.copy()
+ if "user-agent" not in (six.ensure_str(k.lower()) for k in headers):
+ headers["User-Agent"] = _get_default_user_agent()
+ super(HTTPConnection, self).request(method, url, body=body, headers=headers)
+
def request_chunked(self, method, url, body=None, headers=None):
"""
Alternative to the common request method, which sends the
body with chunked encoding and not as one block
"""
- headers = headers or {}
- header_keys = set([six.ensure_str(k.lower()) for k in headers])
- skip_accept_encoding = "accept-encoding" in header_keys
- skip_host = "host" in header_keys
+ headers = headers or {}
+ header_keys = set([six.ensure_str(k.lower()) for k in headers])
+ skip_accept_encoding = "accept-encoding" in header_keys
+ skip_host = "host" in header_keys
self.putrequest(
- method, url, skip_accept_encoding=skip_accept_encoding, skip_host=skip_host
+ method, url, skip_accept_encoding=skip_accept_encoding, skip_host=skip_host
)
- if "user-agent" not in header_keys:
- self.putheader("User-Agent", _get_default_user_agent())
+ if "user-agent" not in header_keys:
+ self.putheader("User-Agent", _get_default_user_agent())
for header, value in headers.items():
self.putheader(header, value)
- if "transfer-encoding" not in header_keys:
- self.putheader("Transfer-Encoding", "chunked")
+ if "transfer-encoding" not in header_keys:
+ self.putheader("Transfer-Encoding", "chunked")
self.endheaders()
if body is not None:
- stringish_types = six.string_types + (bytes,)
+ stringish_types = six.string_types + (bytes,)
if isinstance(body, stringish_types):
body = (body,)
for chunk in body:
if not chunk:
continue
- if not isinstance(chunk, bytes):
- chunk = chunk.encode("utf8")
+ if not isinstance(chunk, bytes):
+ chunk = chunk.encode("utf8")
len_str = hex(len(chunk))[2:]
- to_send = bytearray(len_str.encode())
- to_send += b"\r\n"
- to_send += chunk
- to_send += b"\r\n"
- self.send(to_send)
+ to_send = bytearray(len_str.encode())
+ to_send += b"\r\n"
+ to_send += chunk
+ to_send += b"\r\n"
+ self.send(to_send)
# After the if clause, to always have a closed body
- self.send(b"0\r\n\r\n")
+ self.send(b"0\r\n\r\n")
class HTTPSConnection(HTTPConnection):
- """
- Many of the parameters to this constructor are passed to the underlying SSL
- socket by means of :py:func:`urllib3.util.ssl_wrap_socket`.
- """
-
- default_port = port_by_scheme["https"]
-
- cert_reqs = None
- ca_certs = None
- ca_cert_dir = None
- ca_cert_data = None
+ """
+ Many of the parameters to this constructor are passed to the underlying SSL
+ socket by means of :py:func:`urllib3.util.ssl_wrap_socket`.
+ """
+
+ default_port = port_by_scheme["https"]
+
+ cert_reqs = None
+ ca_certs = None
+ ca_cert_dir = None
+ ca_cert_data = None
ssl_version = None
- assert_fingerprint = None
- tls_in_tls_required = False
-
- def __init__(
- self,
- host,
- port=None,
- key_file=None,
- cert_file=None,
- key_password=None,
- strict=None,
- timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
- ssl_context=None,
- server_hostname=None,
- **kw
- ):
-
- HTTPConnection.__init__(self, host, port, strict=strict, timeout=timeout, **kw)
+ assert_fingerprint = None
+ tls_in_tls_required = False
+
+ def __init__(
+ self,
+ host,
+ port=None,
+ key_file=None,
+ cert_file=None,
+ key_password=None,
+ strict=None,
+ timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
+ ssl_context=None,
+ server_hostname=None,
+ **kw
+ ):
+
+ HTTPConnection.__init__(self, host, port, strict=strict, timeout=timeout, **kw)
self.key_file = key_file
self.cert_file = cert_file
- self.key_password = key_password
+ self.key_password = key_password
self.ssl_context = ssl_context
- self.server_hostname = server_hostname
+ self.server_hostname = server_hostname
# Required property for Google AppEngine 1.9.0 which otherwise causes
# HTTPS requests to go out as HTTP. (See Issue #356)
- self._protocol = "https"
-
- def set_cert(
- self,
- key_file=None,
- cert_file=None,
- cert_reqs=None,
- key_password=None,
- ca_certs=None,
- assert_hostname=None,
- assert_fingerprint=None,
- ca_cert_dir=None,
- ca_cert_data=None,
- ):
+ self._protocol = "https"
+
+ def set_cert(
+ self,
+ key_file=None,
+ cert_file=None,
+ cert_reqs=None,
+ key_password=None,
+ ca_certs=None,
+ assert_hostname=None,
+ assert_fingerprint=None,
+ ca_cert_dir=None,
+ ca_cert_data=None,
+ ):
"""
This method should only be called once, before the connection is used.
"""
- # If cert_reqs is not provided we'll assume CERT_REQUIRED unless we also
- # have an SSLContext object in which case we'll use its verify_mode.
+ # If cert_reqs is not provided we'll assume CERT_REQUIRED unless we also
+ # have an SSLContext object in which case we'll use its verify_mode.
if cert_reqs is None:
- if self.ssl_context is not None:
+ if self.ssl_context is not None:
cert_reqs = self.ssl_context.verify_mode
- else:
- cert_reqs = resolve_cert_reqs(None)
+ else:
+ cert_reqs = resolve_cert_reqs(None)
self.key_file = key_file
self.cert_file = cert_file
self.cert_reqs = cert_reqs
- self.key_password = key_password
+ self.key_password = key_password
self.assert_hostname = assert_hostname
self.assert_fingerprint = assert_fingerprint
self.ca_certs = os.path.expanduser(ca_certs) \
if isinstance(ca_certs, six.string_types) else ca_certs
self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir)
- self.ca_cert_data = ca_cert_data
+ self.ca_cert_data = ca_cert_data
def connect(self):
# Add certificate verification
conn = self._new_conn()
hostname = self.host
- tls_in_tls = False
-
- if self._is_using_tunnel():
- if self.tls_in_tls_required:
- conn = self._connect_tls_proxy(hostname, conn)
- tls_in_tls = True
+ tls_in_tls = False
+ if self._is_using_tunnel():
+ if self.tls_in_tls_required:
+ conn = self._connect_tls_proxy(hostname, conn)
+ tls_in_tls = True
+
self.sock = conn
-
+
# Calls self._set_hostport(), so self.host is
# self._tunnel_host below.
self._tunnel()
@@ -376,25 +376,25 @@ class HTTPSConnection(HTTPConnection):
# Override the host with the one we're requesting data from.
hostname = self._tunnel_host
- server_hostname = hostname
- if self.server_hostname is not None:
- server_hostname = self.server_hostname
-
+ server_hostname = hostname
+ if self.server_hostname is not None:
+ server_hostname = self.server_hostname
+
is_time_off = datetime.date.today() < RECENT_DATE
if is_time_off:
- warnings.warn(
- (
- "System time is way off (before {0}). This will probably "
- "lead to SSL verification errors"
- ).format(RECENT_DATE),
- SystemTimeWarning,
+ warnings.warn(
+ (
+ "System time is way off (before {0}). This will probably "
+ "lead to SSL verification errors"
+ ).format(RECENT_DATE),
+ SystemTimeWarning,
)
# Wrap socket using verification with the root certs in
# trusted_root_certs
- default_ssl_context = False
+ default_ssl_context = False
if self.ssl_context is None:
- default_ssl_context = True
+ default_ssl_context = True
self.ssl_context = create_urllib3_context(
ssl_version=resolve_ssl_version(self.ssl_version),
cert_reqs=resolve_cert_reqs(self.cert_reqs),
@@ -402,150 +402,150 @@ class HTTPSConnection(HTTPConnection):
context = self.ssl_context
context.verify_mode = resolve_cert_reqs(self.cert_reqs)
-
- # Try to load OS default certs if none are given.
- # Works well on Windows (requires Python3.4+)
- if (
- not self.ca_certs
- and not self.ca_cert_dir
- and not self.ca_cert_data
- and default_ssl_context
- and hasattr(context, "load_default_certs")
- ):
- context.load_default_certs()
-
+
+ # Try to load OS default certs if none are given.
+ # Works well on Windows (requires Python3.4+)
+ if (
+ not self.ca_certs
+ and not self.ca_cert_dir
+ and not self.ca_cert_data
+ and default_ssl_context
+ and hasattr(context, "load_default_certs")
+ ):
+ context.load_default_certs()
+
self.sock = ssl_wrap_socket(
sock=conn,
keyfile=self.key_file,
certfile=self.cert_file,
- key_password=self.key_password,
+ key_password=self.key_password,
ca_certs=self.ca_certs,
ca_cert_dir=self.ca_cert_dir,
- ca_cert_data=self.ca_cert_data,
- server_hostname=server_hostname,
- ssl_context=context,
- tls_in_tls=tls_in_tls,
- )
-
- # If we're using all defaults and the connection
- # is TLSv1 or TLSv1.1 we throw a DeprecationWarning
- # for the host.
- if (
- default_ssl_context
- and self.ssl_version is None
- and hasattr(self.sock, "version")
- and self.sock.version() in {"TLSv1", "TLSv1.1"}
- ):
- warnings.warn(
- "Negotiating TLSv1/TLSv1.1 by default is deprecated "
- "and will be disabled in urllib3 v2.0.0. Connecting to "
- "'%s' with '%s' can be enabled by explicitly opting-in "
- "with 'ssl_version'" % (self.host, self.sock.version()),
- DeprecationWarning,
- )
-
+ ca_cert_data=self.ca_cert_data,
+ server_hostname=server_hostname,
+ ssl_context=context,
+ tls_in_tls=tls_in_tls,
+ )
+
+ # If we're using all defaults and the connection
+ # is TLSv1 or TLSv1.1 we throw a DeprecationWarning
+ # for the host.
+ if (
+ default_ssl_context
+ and self.ssl_version is None
+ and hasattr(self.sock, "version")
+ and self.sock.version() in {"TLSv1", "TLSv1.1"}
+ ):
+ warnings.warn(
+ "Negotiating TLSv1/TLSv1.1 by default is deprecated "
+ "and will be disabled in urllib3 v2.0.0. Connecting to "
+ "'%s' with '%s' can be enabled by explicitly opting-in "
+ "with 'ssl_version'" % (self.host, self.sock.version()),
+ DeprecationWarning,
+ )
+
if self.assert_fingerprint:
- assert_fingerprint(
- self.sock.getpeercert(binary_form=True), self.assert_fingerprint
- )
- elif (
- context.verify_mode != ssl.CERT_NONE
- and not getattr(context, "check_hostname", False)
- and self.assert_hostname is not False
- ):
+ assert_fingerprint(
+ self.sock.getpeercert(binary_form=True), self.assert_fingerprint
+ )
+ elif (
+ context.verify_mode != ssl.CERT_NONE
+ and not getattr(context, "check_hostname", False)
+ and self.assert_hostname is not False
+ ):
# While urllib3 attempts to always turn off hostname matching from
# the TLS library, this cannot always be done. So we check whether
# the TLS Library still thinks it's matching hostnames.
cert = self.sock.getpeercert()
- if not cert.get("subjectAltName", ()):
- warnings.warn(
- (
- "Certificate for {0} has no `subjectAltName`, falling back to check for a "
- "`commonName` for now. This feature is being removed by major browsers and "
- "deprecated by RFC 2818. (See https://github.com/urllib3/urllib3/issues/497 "
- "for details.)".format(hostname)
- ),
- SubjectAltNameWarning,
+ if not cert.get("subjectAltName", ()):
+ warnings.warn(
+ (
+ "Certificate for {0} has no `subjectAltName`, falling back to check for a "
+ "`commonName` for now. This feature is being removed by major browsers and "
+ "deprecated by RFC 2818. (See https://github.com/urllib3/urllib3/issues/497 "
+ "for details.)".format(hostname)
+ ),
+ SubjectAltNameWarning,
)
- _match_hostname(cert, self.assert_hostname or server_hostname)
+ _match_hostname(cert, self.assert_hostname or server_hostname)
self.is_verified = (
- context.verify_mode == ssl.CERT_REQUIRED
- or self.assert_fingerprint is not None
- )
-
- def _connect_tls_proxy(self, hostname, conn):
- """
- Establish a TLS connection to the proxy using the provided SSL context.
- """
- proxy_config = self.proxy_config
- ssl_context = proxy_config.ssl_context
- if ssl_context:
- # If the user provided a proxy context, we assume CA and client
- # certificates have already been set
- return ssl_wrap_socket(
- sock=conn,
- server_hostname=hostname,
- ssl_context=ssl_context,
- )
-
- ssl_context = create_proxy_ssl_context(
- self.ssl_version,
- self.cert_reqs,
- self.ca_certs,
- self.ca_cert_dir,
- self.ca_cert_data,
- )
-
- # If no cert was provided, use only the default options for server
- # certificate validation
- socket = ssl_wrap_socket(
- sock=conn,
- ca_certs=self.ca_certs,
- ca_cert_dir=self.ca_cert_dir,
- ca_cert_data=self.ca_cert_data,
- server_hostname=hostname,
- ssl_context=ssl_context,
+ context.verify_mode == ssl.CERT_REQUIRED
+ or self.assert_fingerprint is not None
)
- if ssl_context.verify_mode != ssl.CERT_NONE and not getattr(
- ssl_context, "check_hostname", False
- ):
- # While urllib3 attempts to always turn off hostname matching from
- # the TLS library, this cannot always be done. So we check whether
- # the TLS Library still thinks it's matching hostnames.
- cert = socket.getpeercert()
- if not cert.get("subjectAltName", ()):
- warnings.warn(
- (
- "Certificate for {0} has no `subjectAltName`, falling back to check for a "
- "`commonName` for now. This feature is being removed by major browsers and "
- "deprecated by RFC 2818. (See https://github.com/urllib3/urllib3/issues/497 "
- "for details.)".format(hostname)
- ),
- SubjectAltNameWarning,
- )
- _match_hostname(cert, hostname)
-
- self.proxy_is_verified = ssl_context.verify_mode == ssl.CERT_REQUIRED
- return socket
-
-
+ def _connect_tls_proxy(self, hostname, conn):
+ """
+ Establish a TLS connection to the proxy using the provided SSL context.
+ """
+ proxy_config = self.proxy_config
+ ssl_context = proxy_config.ssl_context
+ if ssl_context:
+ # If the user provided a proxy context, we assume CA and client
+ # certificates have already been set
+ return ssl_wrap_socket(
+ sock=conn,
+ server_hostname=hostname,
+ ssl_context=ssl_context,
+ )
+
+ ssl_context = create_proxy_ssl_context(
+ self.ssl_version,
+ self.cert_reqs,
+ self.ca_certs,
+ self.ca_cert_dir,
+ self.ca_cert_data,
+ )
+
+ # If no cert was provided, use only the default options for server
+ # certificate validation
+ socket = ssl_wrap_socket(
+ sock=conn,
+ ca_certs=self.ca_certs,
+ ca_cert_dir=self.ca_cert_dir,
+ ca_cert_data=self.ca_cert_data,
+ server_hostname=hostname,
+ ssl_context=ssl_context,
+ )
+
+ if ssl_context.verify_mode != ssl.CERT_NONE and not getattr(
+ ssl_context, "check_hostname", False
+ ):
+ # While urllib3 attempts to always turn off hostname matching from
+ # the TLS library, this cannot always be done. So we check whether
+ # the TLS Library still thinks it's matching hostnames.
+ cert = socket.getpeercert()
+ if not cert.get("subjectAltName", ()):
+ warnings.warn(
+ (
+ "Certificate for {0} has no `subjectAltName`, falling back to check for a "
+ "`commonName` for now. This feature is being removed by major browsers and "
+ "deprecated by RFC 2818. (See https://github.com/urllib3/urllib3/issues/497 "
+ "for details.)".format(hostname)
+ ),
+ SubjectAltNameWarning,
+ )
+ _match_hostname(cert, hostname)
+
+ self.proxy_is_verified = ssl_context.verify_mode == ssl.CERT_REQUIRED
+ return socket
+
+
def _match_hostname(cert, asserted_hostname):
- # Our upstream implementation of ssl.match_hostname()
- # only applies this normalization to IP addresses so it doesn't
- # match DNS SANs so we do the same thing!
- stripped_hostname = asserted_hostname.strip("u[]")
- if is_ipaddress(stripped_hostname):
- asserted_hostname = stripped_hostname
-
+ # Our upstream implementation of ssl.match_hostname()
+ # only applies this normalization to IP addresses so it doesn't
+ # match DNS SANs so we do the same thing!
+ stripped_hostname = asserted_hostname.strip("u[]")
+ if is_ipaddress(stripped_hostname):
+ asserted_hostname = stripped_hostname
+
try:
match_hostname(cert, asserted_hostname)
except CertificateError as e:
- log.warning(
- "Certificate did not match expected hostname: %s. Certificate: %s",
- asserted_hostname,
- cert,
+ log.warning(
+ "Certificate did not match expected hostname: %s. Certificate: %s",
+ asserted_hostname,
+ cert,
)
# Add cert to exception and reraise so client code can inspect
# the cert when catching the exception, if they want to
@@ -553,18 +553,18 @@ def _match_hostname(cert, asserted_hostname):
raise
-def _get_default_user_agent():
- return "python-urllib3/%s" % __version__
-
-
-class DummyConnection(object):
- """Used to detect a failed ConnectionCls import."""
-
- pass
-
-
-if not ssl:
- HTTPSConnection = DummyConnection # noqa: F811
-
-
-VerifiedHTTPSConnection = HTTPSConnection
+def _get_default_user_agent():
+ return "python-urllib3/%s" % __version__
+
+
+class DummyConnection(object):
+ """Used to detect a failed ConnectionCls import."""
+
+ pass
+
+
+if not ssl:
+ HTTPSConnection = DummyConnection # noqa: F811
+
+
+VerifiedHTTPSConnection = HTTPSConnection
diff --git a/contrib/python/urllib3/urllib3/connectionpool.py b/contrib/python/urllib3/urllib3/connectionpool.py
index 15bffcb23a..00d392868c 100644
--- a/contrib/python/urllib3/urllib3/connectionpool.py
+++ b/contrib/python/urllib3/urllib3/connectionpool.py
@@ -1,34 +1,34 @@
from __future__ import absolute_import
-
+
import errno
import logging
-import re
-import socket
+import re
+import socket
import sys
import warnings
-from socket import error as SocketError
-from socket import timeout as SocketTimeout
-
-from .connection import (
- BaseSSLError,
- BrokenPipeError,
- DummyConnection,
- HTTPConnection,
- HTTPException,
- HTTPSConnection,
- VerifiedHTTPSConnection,
- port_by_scheme,
-)
+from socket import error as SocketError
+from socket import timeout as SocketTimeout
+
+from .connection import (
+ BaseSSLError,
+ BrokenPipeError,
+ DummyConnection,
+ HTTPConnection,
+ HTTPException,
+ HTTPSConnection,
+ VerifiedHTTPSConnection,
+ port_by_scheme,
+)
from .exceptions import (
ClosedPoolError,
EmptyPoolError,
HeaderParsingError,
HostChangedError,
- InsecureRequestWarning,
+ InsecureRequestWarning,
LocationValueError,
MaxRetryError,
- NewConnectionError,
- ProtocolError,
+ NewConnectionError,
+ ProtocolError,
ProxyError,
ReadTimeoutError,
SSLError,
@@ -39,16 +39,16 @@ from .packages.six.moves import queue
from .request import RequestMethods
from .response import HTTPResponse
from .util.connection import is_connection_dropped
-from .util.proxy import connection_requires_http_tunnel
-from .util.queue import LifoQueue
+from .util.proxy import connection_requires_http_tunnel
+from .util.queue import LifoQueue
from .util.request import set_file_position
from .util.response import assert_header_parsing
from .util.retry import Retry
-from .util.ssl_match_hostname import CertificateError
+from .util.ssl_match_hostname import CertificateError
from .util.timeout import Timeout
-from .util.url import Url, _encode_target
-from .util.url import _normalize_host as normalize_host
-from .util.url import get_host, parse_url
+from .util.url import Url, _encode_target
+from .util.url import _normalize_host as normalize_host
+from .util.url import get_host, parse_url
xrange = six.moves.xrange
@@ -62,26 +62,26 @@ class ConnectionPool(object):
"""
Base class for all connection pools, such as
:class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`.
-
- .. note::
- ConnectionPool.urlopen() does not normalize or percent-encode target URIs
- which is useful if your target server doesn't support percent-encoded
- target URIs.
+
+ .. note::
+ ConnectionPool.urlopen() does not normalize or percent-encode target URIs
+ which is useful if your target server doesn't support percent-encoded
+ target URIs.
"""
scheme = None
- QueueCls = LifoQueue
+ QueueCls = LifoQueue
def __init__(self, host, port=None):
if not host:
raise LocationValueError("No host specified.")
- self.host = _normalize_host(host, scheme=self.scheme)
+ self.host = _normalize_host(host, scheme=self.scheme)
self._proxy_host = host.lower()
self.port = port
def __str__(self):
- return "%s(host=%r, port=%r)" % (type(self).__name__, self.host, self.port)
+ return "%s(host=%r, port=%r)" % (type(self).__name__, self.host, self.port)
def __enter__(self):
return self
@@ -99,7 +99,7 @@ class ConnectionPool(object):
# This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252
-_blocking_errnos = {errno.EAGAIN, errno.EWOULDBLOCK}
+_blocking_errnos = {errno.EAGAIN, errno.EWOULDBLOCK}
class HTTPConnectionPool(ConnectionPool, RequestMethods):
@@ -108,16 +108,16 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
:param host:
Host used for this HTTP Connection (e.g. "localhost"), passed into
- :class:`http.client.HTTPConnection`.
+ :class:`http.client.HTTPConnection`.
:param port:
Port used for this HTTP Connection (None is equivalent to 80), passed
- into :class:`http.client.HTTPConnection`.
+ into :class:`http.client.HTTPConnection`.
:param strict:
Causes BadStatusLine to be raised if the status line can't be parsed
as a valid HTTP/1.0 or 1.1 status line, passed into
- :class:`http.client.HTTPConnection`.
+ :class:`http.client.HTTPConnection`.
.. note::
Only works in Python 2. This parameter is ignored in Python 3.
@@ -151,36 +151,36 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
:param _proxy:
Parsed proxy URL, should not be used directly, instead, see
- :class:`urllib3.ProxyManager`
+ :class:`urllib3.ProxyManager`
:param _proxy_headers:
A dictionary with proxy headers, should not be used directly,
- instead, see :class:`urllib3.ProxyManager`
+ instead, see :class:`urllib3.ProxyManager`
:param \\**conn_kw:
Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`,
:class:`urllib3.connection.HTTPSConnection` instances.
"""
- scheme = "http"
+ scheme = "http"
ConnectionCls = HTTPConnection
ResponseCls = HTTPResponse
- def __init__(
- self,
- host,
- port=None,
- strict=False,
- timeout=Timeout.DEFAULT_TIMEOUT,
- maxsize=1,
- block=False,
- headers=None,
- retries=None,
- _proxy=None,
- _proxy_headers=None,
- _proxy_config=None,
- **conn_kw
- ):
+ def __init__(
+ self,
+ host,
+ port=None,
+ strict=False,
+ timeout=Timeout.DEFAULT_TIMEOUT,
+ maxsize=1,
+ block=False,
+ headers=None,
+ retries=None,
+ _proxy=None,
+ _proxy_headers=None,
+ _proxy_config=None,
+ **conn_kw
+ ):
ConnectionPool.__init__(self, host, port)
RequestMethods.__init__(self, headers)
@@ -200,7 +200,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
self.proxy = _proxy
self.proxy_headers = _proxy_headers or {}
- self.proxy_config = _proxy_config
+ self.proxy_config = _proxy_config
# Fill the queue up so that doing get() on it will block properly
for _ in xrange(maxsize):
@@ -215,30 +215,30 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
# Enable Nagle's algorithm for proxies, to avoid packet fragmentation.
# We cannot know if the user has added default socket options, so we cannot replace the
# list.
- self.conn_kw.setdefault("socket_options", [])
-
- self.conn_kw["proxy"] = self.proxy
- self.conn_kw["proxy_config"] = self.proxy_config
+ self.conn_kw.setdefault("socket_options", [])
+ self.conn_kw["proxy"] = self.proxy
+ self.conn_kw["proxy_config"] = self.proxy_config
+
def _new_conn(self):
"""
Return a fresh :class:`HTTPConnection`.
"""
self.num_connections += 1
- log.debug(
- "Starting new HTTP connection (%d): %s:%s",
- self.num_connections,
- self.host,
- self.port or "80",
- )
-
- conn = self.ConnectionCls(
- host=self.host,
- port=self.port,
- timeout=self.timeout.connect_timeout,
- strict=self.strict,
- **self.conn_kw
- )
+ log.debug(
+ "Starting new HTTP connection (%d): %s:%s",
+ self.num_connections,
+ self.host,
+ self.port or "80",
+ )
+
+ conn = self.ConnectionCls(
+ host=self.host,
+ port=self.port,
+ timeout=self.timeout.connect_timeout,
+ strict=self.strict,
+ **self.conn_kw
+ )
return conn
def _get_conn(self, timeout=None):
@@ -262,19 +262,19 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
except queue.Empty:
if self.block:
- raise EmptyPoolError(
- self,
- "Pool reached maximum size and no more connections are allowed.",
- )
+ raise EmptyPoolError(
+ self,
+ "Pool reached maximum size and no more connections are allowed.",
+ )
pass # Oh well, we'll create a new connection then
# If this is a persistent connection, check if it got disconnected
if conn and is_connection_dropped(conn):
log.debug("Resetting dropped connection: %s", self.host)
conn.close()
- if getattr(conn, "auto_open", 1) == 0:
+ if getattr(conn, "auto_open", 1) == 0:
# This is a proxied connection that has been mutated by
- # http.client._tunnel() and cannot be reused (since it would
+ # http.client._tunnel() and cannot be reused (since it would
# attempt to bypass the proxy)
conn = None
@@ -302,11 +302,11 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
pass
except queue.Full:
# This should never happen if self.block == True
- log.warning(
- "Connection pool is full, discarding connection: %s. Connection pool size: %s",
- self.host,
- self.pool.qsize(),
- )
+ log.warning(
+ "Connection pool is full, discarding connection: %s. Connection pool size: %s",
+ self.host,
+ self.pool.qsize(),
+ )
# Connection never got put back into the pool, close it.
if conn:
conn.close()
@@ -322,7 +322,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
pass
def _get_timeout(self, timeout):
- """Helper that always returns a :class:`urllib3.util.Timeout`"""
+ """Helper that always returns a :class:`urllib3.util.Timeout`"""
if timeout is _Default:
return self.timeout.clone()
@@ -337,30 +337,30 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
"""Is the error actually a timeout? Will raise a ReadTimeout or pass"""
if isinstance(err, SocketTimeout):
- raise ReadTimeoutError(
- self, url, "Read timed out. (read timeout=%s)" % timeout_value
- )
+ raise ReadTimeoutError(
+ self, url, "Read timed out. (read timeout=%s)" % timeout_value
+ )
# See the above comment about EAGAIN in Python 3. In Python 2 we have
# to specifically catch it and throw the timeout error
- if hasattr(err, "errno") and err.errno in _blocking_errnos:
- raise ReadTimeoutError(
- self, url, "Read timed out. (read timeout=%s)" % timeout_value
- )
+ if hasattr(err, "errno") and err.errno in _blocking_errnos:
+ raise ReadTimeoutError(
+ self, url, "Read timed out. (read timeout=%s)" % timeout_value
+ )
# Catch possible read timeouts thrown as SSL errors. If not the
# case, rethrow the original. We need to do this because of:
# http://bugs.python.org/issue10272
- if "timed out" in str(err) or "did not complete (read)" in str(
- err
- ): # Python < 2.7.4
- raise ReadTimeoutError(
- self, url, "Read timed out. (read timeout=%s)" % timeout_value
- )
-
- def _make_request(
- self, conn, method, url, timeout=_Default, chunked=False, **httplib_request_kw
- ):
+ if "timed out" in str(err) or "did not complete (read)" in str(
+ err
+ ): # Python < 2.7.4
+ raise ReadTimeoutError(
+ self, url, "Read timed out. (read timeout=%s)" % timeout_value
+ )
+
+ def _make_request(
+ self, conn, method, url, timeout=_Default, chunked=False, **httplib_request_kw
+ ):
"""
Perform a request on a given urllib connection object taken from our
pool.
@@ -389,36 +389,36 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
raise
- # conn.request() calls http.client.*.request, not the method in
+ # conn.request() calls http.client.*.request, not the method in
# urllib3.request. It also calls makefile (recv) on the socket.
- try:
- if chunked:
- conn.request_chunked(method, url, **httplib_request_kw)
- else:
- conn.request(method, url, **httplib_request_kw)
-
- # We are swallowing BrokenPipeError (errno.EPIPE) since the server is
- # legitimately able to close the connection after sending a valid response.
- # With this behaviour, the received response is still readable.
- except BrokenPipeError:
- # Python 3
- pass
- except IOError as e:
- # Python 2 and macOS/Linux
- # EPIPE and ESHUTDOWN are BrokenPipeError on Python 2, and EPROTOTYPE is needed on macOS
- # https://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/
- if e.errno not in {
- errno.EPIPE,
- errno.ESHUTDOWN,
- errno.EPROTOTYPE,
- }:
- raise
-
+ try:
+ if chunked:
+ conn.request_chunked(method, url, **httplib_request_kw)
+ else:
+ conn.request(method, url, **httplib_request_kw)
+
+ # We are swallowing BrokenPipeError (errno.EPIPE) since the server is
+ # legitimately able to close the connection after sending a valid response.
+ # With this behaviour, the received response is still readable.
+ except BrokenPipeError:
+ # Python 3
+ pass
+ except IOError as e:
+ # Python 2 and macOS/Linux
+ # EPIPE and ESHUTDOWN are BrokenPipeError on Python 2, and EPROTOTYPE is needed on macOS
+ # https://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/
+ if e.errno not in {
+ errno.EPIPE,
+ errno.ESHUTDOWN,
+ errno.EPROTOTYPE,
+ }:
+ raise
+
# Reset the timeout for the recv() on the socket
read_timeout = timeout_obj.read_timeout
# App Engine doesn't have a sock attr
- if getattr(conn, "sock", None):
+ if getattr(conn, "sock", None):
# In Python 3 socket.py will catch EAGAIN and return None when you
# try and read into the file pointer created by http.client, which
# instead raises a BadStatusLine exception. Instead of catching
@@ -426,8 +426,8 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
# timeouts, check for a zero timeout before making the request.
if read_timeout == 0:
raise ReadTimeoutError(
- self, url, "Read timed out. (read timeout=%s)" % read_timeout
- )
+ self, url, "Read timed out. (read timeout=%s)" % read_timeout
+ )
if read_timeout is Timeout.DEFAULT_TIMEOUT:
conn.sock.settimeout(socket.getdefaulttimeout())
else: # None or a value
@@ -435,45 +435,45 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
# Receive the response from the server
try:
- try:
- # Python 2.7, use buffering of HTTP responses
+ try:
+ # Python 2.7, use buffering of HTTP responses
httplib_response = conn.getresponse(buffering=True)
- except TypeError:
- # Python 3
+ except TypeError:
+ # Python 3
try:
httplib_response = conn.getresponse()
- except BaseException as e:
- # Remove the TypeError from the exception chain in
- # Python 3 (including for exceptions like SystemExit).
- # Otherwise it looks like a bug in the code.
+ except BaseException as e:
+ # Remove the TypeError from the exception chain in
+ # Python 3 (including for exceptions like SystemExit).
+ # Otherwise it looks like a bug in the code.
six.raise_from(e, None)
except (SocketTimeout, BaseSSLError, SocketError) as e:
self._raise_timeout(err=e, url=url, timeout_value=read_timeout)
raise
# AppEngine doesn't have a version attr.
- http_version = getattr(conn, "_http_vsn_str", "HTTP/?")
- log.debug(
- '%s://%s:%s "%s %s %s" %s %s',
- self.scheme,
- self.host,
- self.port,
- method,
- url,
- http_version,
- httplib_response.status,
- httplib_response.length,
- )
+ http_version = getattr(conn, "_http_vsn_str", "HTTP/?")
+ log.debug(
+ '%s://%s:%s "%s %s %s" %s %s',
+ self.scheme,
+ self.host,
+ self.port,
+ method,
+ url,
+ http_version,
+ httplib_response.status,
+ httplib_response.length,
+ )
try:
assert_header_parsing(httplib_response.msg)
except (HeaderParsingError, TypeError) as hpe: # Platform-specific: Python 3
log.warning(
- "Failed to parse headers (url=%s): %s",
- self._absolute_url(url),
- hpe,
- exc_info=True,
- )
+ "Failed to parse headers (url=%s): %s",
+ self._absolute_url(url),
+ hpe,
+ exc_info=True,
+ )
return httplib_response
@@ -484,8 +484,8 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
"""
Close all pooled connections and disable the pool.
"""
- if self.pool is None:
- return
+ if self.pool is None:
+ return
# Disable access to the pool
old_pool, self.pool = self.pool, None
@@ -503,13 +503,13 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
Check if the given ``url`` is a member of the same host as this
connection pool.
"""
- if url.startswith("/"):
+ if url.startswith("/"):
return True
# TODO: Add optional support for socket.gethostbyname checking.
scheme, host, port = get_host(url)
- if host is not None:
- host = _normalize_host(host, scheme=scheme)
+ if host is not None:
+ host = _normalize_host(host, scheme=scheme)
# Use explicit default port for comparison when none is given
if self.port and not port:
@@ -519,22 +519,22 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
return (scheme, host, port) == (self.scheme, self.host, self.port)
- def urlopen(
- self,
- method,
- url,
- body=None,
- headers=None,
- retries=None,
- redirect=True,
- assert_same_host=True,
- timeout=_Default,
- pool_timeout=None,
- release_conn=None,
- chunked=False,
- body_pos=None,
- **response_kw
- ):
+ def urlopen(
+ self,
+ method,
+ url,
+ body=None,
+ headers=None,
+ retries=None,
+ redirect=True,
+ assert_same_host=True,
+ timeout=_Default,
+ pool_timeout=None,
+ release_conn=None,
+ chunked=False,
+ body_pos=None,
+ **response_kw
+ ):
"""
Get a connection from the pool and perform an HTTP request. This is the
lowest level call for making a request, so you'll need to specify all
@@ -555,12 +555,12 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
:param method:
HTTP request method (such as GET, POST, PUT, etc.)
- :param url:
- The URL to perform the request on.
-
+ :param url:
+ The URL to perform the request on.
+
:param body:
- Data to send in the request body, either :class:`str`, :class:`bytes`,
- an iterable of :class:`str`/:class:`bytes`, or a file-like object.
+ Data to send in the request body, either :class:`str`, :class:`bytes`,
+ an iterable of :class:`str`/:class:`bytes`, or a file-like object.
:param headers:
Dictionary of custom headers to send, such as User-Agent,
@@ -590,7 +590,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
:param assert_same_host:
If ``True``, will make sure that the host of the pool requests is
- consistent else will raise HostChangedError. When ``False``, you can
+ consistent else will raise HostChangedError. When ``False``, you can
use the pool on an HTTP proxy and request foreign hosts.
:param timeout:
@@ -627,10 +627,10 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
Additional parameters are passed to
:meth:`urllib3.response.HTTPResponse.from_httplib`
"""
-
- parsed_url = parse_url(url)
- destination_scheme = parsed_url.scheme
-
+
+ parsed_url = parse_url(url)
+ destination_scheme = parsed_url.scheme
+
if headers is None:
headers = self.headers
@@ -638,18 +638,18 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
if release_conn is None:
- release_conn = response_kw.get("preload_content", True)
+ release_conn = response_kw.get("preload_content", True)
# Check host
if assert_same_host and not self.is_same_host(url):
raise HostChangedError(self, url, retries)
- # Ensure that the URL we're connecting to is properly encoded
- if url.startswith("/"):
- url = six.ensure_str(_encode_target(url))
- else:
- url = six.ensure_str(parsed_url.url)
-
+ # Ensure that the URL we're connecting to is properly encoded
+ if url.startswith("/"):
+ url = six.ensure_str(_encode_target(url))
+ else:
+ url = six.ensure_str(parsed_url.url)
+
conn = None
# Track whether `conn` needs to be released before
@@ -660,17 +660,17 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
#
# See issue #651 [1] for details.
#
- # [1] <https://github.com/urllib3/urllib3/issues/651>
+ # [1] <https://github.com/urllib3/urllib3/issues/651>
release_this_conn = release_conn
- http_tunnel_required = connection_requires_http_tunnel(
- self.proxy, self.proxy_config, destination_scheme
- )
-
- # Merge the proxy headers. Only done when not using HTTP CONNECT. We
- # have to copy the headers dict so we can safely change it without those
- # changes being reflected in anyone else's copy.
- if not http_tunnel_required:
+ http_tunnel_required = connection_requires_http_tunnel(
+ self.proxy, self.proxy_config, destination_scheme
+ )
+
+ # Merge the proxy headers. Only done when not using HTTP CONNECT. We
+ # have to copy the headers dict so we can safely change it without those
+ # changes being reflected in anyone else's copy.
+ if not http_tunnel_required:
headers = headers.copy()
headers.update(self.proxy_headers)
@@ -693,22 +693,22 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
conn.timeout = timeout_obj.connect_timeout
- is_new_proxy_conn = self.proxy is not None and not getattr(
- conn, "sock", None
- )
- if is_new_proxy_conn and http_tunnel_required:
+ is_new_proxy_conn = self.proxy is not None and not getattr(
+ conn, "sock", None
+ )
+ if is_new_proxy_conn and http_tunnel_required:
self._prepare_proxy(conn)
# Make the request on the httplib connection object.
- httplib_response = self._make_request(
- conn,
- method,
- url,
- timeout=timeout_obj,
- body=body,
- headers=headers,
- chunked=chunked,
- )
+ httplib_response = self._make_request(
+ conn,
+ method,
+ url,
+ timeout=timeout_obj,
+ body=body,
+ headers=headers,
+ chunked=chunked,
+ )
# If we're going to release the connection in ``finally:``, then
# the response doesn't need to know about the connection. Otherwise
@@ -717,74 +717,74 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
response_conn = conn if not release_conn else None
# Pass method to Response for length checking
- response_kw["request_method"] = method
+ response_kw["request_method"] = method
# Import httplib's response into our own wrapper object
- response = self.ResponseCls.from_httplib(
- httplib_response,
- pool=self,
- connection=response_conn,
- retries=retries,
- **response_kw
- )
+ response = self.ResponseCls.from_httplib(
+ httplib_response,
+ pool=self,
+ connection=response_conn,
+ retries=retries,
+ **response_kw
+ )
# Everything went great!
clean_exit = True
- except EmptyPoolError:
- # Didn't get a connection from the pool, no need to clean up
- clean_exit = True
- release_this_conn = False
- raise
-
- except (
- TimeoutError,
- HTTPException,
- SocketError,
- ProtocolError,
- BaseSSLError,
- SSLError,
- CertificateError,
- ) as e:
+ except EmptyPoolError:
+ # Didn't get a connection from the pool, no need to clean up
+ clean_exit = True
+ release_this_conn = False
+ raise
+
+ except (
+ TimeoutError,
+ HTTPException,
+ SocketError,
+ ProtocolError,
+ BaseSSLError,
+ SSLError,
+ CertificateError,
+ ) as e:
# Discard the connection for these exceptions. It will be
# replaced during the next _get_conn() call.
clean_exit = False
-
- def _is_ssl_error_message_from_http_proxy(ssl_error):
- # We're trying to detect the message 'WRONG_VERSION_NUMBER' but
- # SSLErrors are kinda all over the place when it comes to the message,
- # so we try to cover our bases here!
- message = " ".join(re.split("[^a-z]", str(ssl_error).lower()))
- return (
- "wrong version number" in message or "unknown protocol" in message
- )
-
- # Try to detect a common user error with proxies which is to
- # set an HTTP proxy to be HTTPS when it should be 'http://'
- # (ie {'http': 'http://proxy', 'https': 'https://proxy'})
- # Instead we add a nice error message and point to a URL.
- if (
- isinstance(e, BaseSSLError)
- and self.proxy
- and _is_ssl_error_message_from_http_proxy(e)
- ):
- e = ProxyError(
- "Your proxy appears to only use HTTP and not HTTPS, "
- "try changing your proxy URL to be HTTP. See: "
- "https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
- "#https-proxy-error-http-proxy",
- SSLError(e),
- )
- elif isinstance(e, (BaseSSLError, CertificateError)):
+
+ def _is_ssl_error_message_from_http_proxy(ssl_error):
+ # We're trying to detect the message 'WRONG_VERSION_NUMBER' but
+ # SSLErrors are kinda all over the place when it comes to the message,
+ # so we try to cover our bases here!
+ message = " ".join(re.split("[^a-z]", str(ssl_error).lower()))
+ return (
+ "wrong version number" in message or "unknown protocol" in message
+ )
+
+ # Try to detect a common user error with proxies which is to
+ # set an HTTP proxy to be HTTPS when it should be 'http://'
+ # (ie {'http': 'http://proxy', 'https': 'https://proxy'})
+ # Instead we add a nice error message and point to a URL.
+ if (
+ isinstance(e, BaseSSLError)
+ and self.proxy
+ and _is_ssl_error_message_from_http_proxy(e)
+ ):
+ e = ProxyError(
+ "Your proxy appears to only use HTTP and not HTTPS, "
+ "try changing your proxy URL to be HTTP. See: "
+ "https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
+ "#https-proxy-error-http-proxy",
+ SSLError(e),
+ )
+ elif isinstance(e, (BaseSSLError, CertificateError)):
e = SSLError(e)
elif isinstance(e, (SocketError, NewConnectionError)) and self.proxy:
- e = ProxyError("Cannot connect to proxy.", e)
+ e = ProxyError("Cannot connect to proxy.", e)
elif isinstance(e, (SocketError, HTTPException)):
- e = ProtocolError("Connection aborted.", e)
+ e = ProtocolError("Connection aborted.", e)
- retries = retries.increment(
- method, url, error=e, _pool=self, _stacktrace=sys.exc_info()[2]
- )
+ retries = retries.increment(
+ method, url, error=e, _pool=self, _stacktrace=sys.exc_info()[2]
+ )
retries.sleep()
# Keep track of the error for the retry warning.
@@ -807,87 +807,87 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
if not conn:
# Try again
- log.warning(
- "Retrying (%r) after connection broken by '%r': %s", retries, err, url
- )
- return self.urlopen(
- method,
- url,
- body,
- headers,
- retries,
- redirect,
- assert_same_host,
- timeout=timeout,
- pool_timeout=pool_timeout,
- release_conn=release_conn,
- chunked=chunked,
- body_pos=body_pos,
- **response_kw
- )
+ log.warning(
+ "Retrying (%r) after connection broken by '%r': %s", retries, err, url
+ )
+ return self.urlopen(
+ method,
+ url,
+ body,
+ headers,
+ retries,
+ redirect,
+ assert_same_host,
+ timeout=timeout,
+ pool_timeout=pool_timeout,
+ release_conn=release_conn,
+ chunked=chunked,
+ body_pos=body_pos,
+ **response_kw
+ )
# Handle redirect?
redirect_location = redirect and response.get_redirect_location()
if redirect_location:
if response.status == 303:
- method = "GET"
+ method = "GET"
try:
retries = retries.increment(method, url, response=response, _pool=self)
except MaxRetryError:
if retries.raise_on_redirect:
- response.drain_conn()
+ response.drain_conn()
raise
return response
- response.drain_conn()
+ response.drain_conn()
retries.sleep_for_retry(response)
log.debug("Redirecting %s -> %s", url, redirect_location)
return self.urlopen(
- method,
- redirect_location,
- body,
- headers,
- retries=retries,
- redirect=redirect,
+ method,
+ redirect_location,
+ body,
+ headers,
+ retries=retries,
+ redirect=redirect,
assert_same_host=assert_same_host,
- timeout=timeout,
- pool_timeout=pool_timeout,
- release_conn=release_conn,
- chunked=chunked,
- body_pos=body_pos,
- **response_kw
- )
+ timeout=timeout,
+ pool_timeout=pool_timeout,
+ release_conn=release_conn,
+ chunked=chunked,
+ body_pos=body_pos,
+ **response_kw
+ )
# Check if we should retry the HTTP response.
- has_retry_after = bool(response.getheader("Retry-After"))
+ has_retry_after = bool(response.getheader("Retry-After"))
if retries.is_retry(method, response.status, has_retry_after):
try:
retries = retries.increment(method, url, response=response, _pool=self)
except MaxRetryError:
if retries.raise_on_status:
- response.drain_conn()
+ response.drain_conn()
raise
return response
- response.drain_conn()
+ response.drain_conn()
retries.sleep(response)
log.debug("Retry: %s", url)
return self.urlopen(
- method,
- url,
- body,
- headers,
- retries=retries,
- redirect=redirect,
+ method,
+ url,
+ body,
+ headers,
+ retries=retries,
+ redirect=redirect,
assert_same_host=assert_same_host,
- timeout=timeout,
- pool_timeout=pool_timeout,
+ timeout=timeout,
+ pool_timeout=pool_timeout,
release_conn=release_conn,
- chunked=chunked,
- body_pos=body_pos,
- **response_kw
- )
+ chunked=chunked,
+ body_pos=body_pos,
+ **response_kw
+ )
return response
@@ -896,62 +896,62 @@ class HTTPSConnectionPool(HTTPConnectionPool):
"""
Same as :class:`.HTTPConnectionPool`, but HTTPS.
- :class:`.HTTPSConnection` uses one of ``assert_fingerprint``,
+ :class:`.HTTPSConnection` uses one of ``assert_fingerprint``,
``assert_hostname`` and ``host`` in this order to verify connections.
If ``assert_hostname`` is False, no verification is done.
The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``,
- ``ca_cert_dir``, ``ssl_version``, ``key_password`` are only used if :mod:`ssl`
- is available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade
+ ``ca_cert_dir``, ``ssl_version``, ``key_password`` are only used if :mod:`ssl`
+ is available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade
the connection socket into an SSL socket.
"""
- scheme = "https"
+ scheme = "https"
ConnectionCls = HTTPSConnection
- def __init__(
- self,
- host,
- port=None,
- strict=False,
- timeout=Timeout.DEFAULT_TIMEOUT,
- maxsize=1,
- block=False,
- headers=None,
- retries=None,
- _proxy=None,
- _proxy_headers=None,
- key_file=None,
- cert_file=None,
- cert_reqs=None,
- key_password=None,
- ca_certs=None,
- ssl_version=None,
- assert_hostname=None,
- assert_fingerprint=None,
- ca_cert_dir=None,
- **conn_kw
- ):
-
- HTTPConnectionPool.__init__(
- self,
- host,
- port,
- strict,
- timeout,
- maxsize,
- block,
- headers,
- retries,
- _proxy,
- _proxy_headers,
- **conn_kw
- )
+ def __init__(
+ self,
+ host,
+ port=None,
+ strict=False,
+ timeout=Timeout.DEFAULT_TIMEOUT,
+ maxsize=1,
+ block=False,
+ headers=None,
+ retries=None,
+ _proxy=None,
+ _proxy_headers=None,
+ key_file=None,
+ cert_file=None,
+ cert_reqs=None,
+ key_password=None,
+ ca_certs=None,
+ ssl_version=None,
+ assert_hostname=None,
+ assert_fingerprint=None,
+ ca_cert_dir=None,
+ **conn_kw
+ ):
+
+ HTTPConnectionPool.__init__(
+ self,
+ host,
+ port,
+ strict,
+ timeout,
+ maxsize,
+ block,
+ headers,
+ retries,
+ _proxy,
+ _proxy_headers,
+ **conn_kw
+ )
self.key_file = key_file
self.cert_file = cert_file
self.cert_reqs = cert_reqs
- self.key_password = key_password
+ self.key_password = key_password
self.ca_certs = ca_certs
self.ca_cert_dir = ca_cert_dir
self.ssl_version = ssl_version
@@ -965,50 +965,50 @@ class HTTPSConnectionPool(HTTPConnectionPool):
"""
if isinstance(conn, VerifiedHTTPSConnection):
- conn.set_cert(
- key_file=self.key_file,
- key_password=self.key_password,
- cert_file=self.cert_file,
- cert_reqs=self.cert_reqs,
- ca_certs=self.ca_certs,
- ca_cert_dir=self.ca_cert_dir,
- assert_hostname=self.assert_hostname,
- assert_fingerprint=self.assert_fingerprint,
- )
+ conn.set_cert(
+ key_file=self.key_file,
+ key_password=self.key_password,
+ cert_file=self.cert_file,
+ cert_reqs=self.cert_reqs,
+ ca_certs=self.ca_certs,
+ ca_cert_dir=self.ca_cert_dir,
+ assert_hostname=self.assert_hostname,
+ assert_fingerprint=self.assert_fingerprint,
+ )
conn.ssl_version = self.ssl_version
return conn
def _prepare_proxy(self, conn):
"""
- Establishes a tunnel connection through HTTP CONNECT.
-
- Tunnel connection is established early because otherwise httplib would
- improperly set Host: header to proxy's IP:port.
+ Establishes a tunnel connection through HTTP CONNECT.
+
+ Tunnel connection is established early because otherwise httplib would
+ improperly set Host: header to proxy's IP:port.
"""
-
- conn.set_tunnel(self._proxy_host, self.port, self.proxy_headers)
-
- if self.proxy.scheme == "https":
- conn.tls_in_tls_required = True
-
+
+ conn.set_tunnel(self._proxy_host, self.port, self.proxy_headers)
+
+ if self.proxy.scheme == "https":
+ conn.tls_in_tls_required = True
+
conn.connect()
def _new_conn(self):
"""
- Return a fresh :class:`http.client.HTTPSConnection`.
+ Return a fresh :class:`http.client.HTTPSConnection`.
"""
self.num_connections += 1
- log.debug(
- "Starting new HTTPS connection (%d): %s:%s",
- self.num_connections,
- self.host,
- self.port or "443",
- )
+ log.debug(
+ "Starting new HTTPS connection (%d): %s:%s",
+ self.num_connections,
+ self.host,
+ self.port or "443",
+ )
if not self.ConnectionCls or self.ConnectionCls is DummyConnection:
- raise SSLError(
- "Can't connect to HTTPS URL because the SSL module is not available."
- )
+ raise SSLError(
+ "Can't connect to HTTPS URL because the SSL module is not available."
+ )
actual_host = self.host
actual_port = self.port
@@ -1016,16 +1016,16 @@ class HTTPSConnectionPool(HTTPConnectionPool):
actual_host = self.proxy.host
actual_port = self.proxy.port
- conn = self.ConnectionCls(
- host=actual_host,
- port=actual_port,
- timeout=self.timeout.connect_timeout,
- strict=self.strict,
- cert_file=self.cert_file,
- key_file=self.key_file,
- key_password=self.key_password,
- **self.conn_kw
- )
+ conn = self.ConnectionCls(
+ host=actual_host,
+ port=actual_port,
+ timeout=self.timeout.connect_timeout,
+ strict=self.strict,
+ cert_file=self.cert_file,
+ key_file=self.key_file,
+ key_password=self.key_password,
+ **self.conn_kw
+ )
return self._prepare_conn(conn)
@@ -1036,32 +1036,32 @@ class HTTPSConnectionPool(HTTPConnectionPool):
super(HTTPSConnectionPool, self)._validate_conn(conn)
# Force connect early to allow us to validate the connection.
- if not getattr(conn, "sock", None): # AppEngine might not have `.sock`
+ if not getattr(conn, "sock", None): # AppEngine might not have `.sock`
conn.connect()
if not conn.is_verified:
- warnings.warn(
- (
- "Unverified HTTPS request is being made to host '%s'. "
- "Adding certificate verification is strongly advised. See: "
- "https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
- "#ssl-warnings" % conn.host
- ),
- InsecureRequestWarning,
- )
-
- if getattr(conn, "proxy_is_verified", None) is False:
- warnings.warn(
- (
- "Unverified HTTPS connection done to an HTTPS proxy. "
- "Adding certificate verification is strongly advised. See: "
- "https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
- "#ssl-warnings"
- ),
- InsecureRequestWarning,
- )
-
-
+ warnings.warn(
+ (
+ "Unverified HTTPS request is being made to host '%s'. "
+ "Adding certificate verification is strongly advised. See: "
+ "https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
+ "#ssl-warnings" % conn.host
+ ),
+ InsecureRequestWarning,
+ )
+
+ if getattr(conn, "proxy_is_verified", None) is False:
+ warnings.warn(
+ (
+ "Unverified HTTPS connection done to an HTTPS proxy. "
+ "Adding certificate verification is strongly advised. See: "
+ "https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
+ "#ssl-warnings"
+ ),
+ InsecureRequestWarning,
+ )
+
+
def connection_from_url(url, **kw):
"""
Given a url, return an :class:`.ConnectionPool` instance of its host.
@@ -1084,25 +1084,25 @@ def connection_from_url(url, **kw):
"""
scheme, host, port = get_host(url)
port = port or port_by_scheme.get(scheme, 80)
- if scheme == "https":
+ if scheme == "https":
return HTTPSConnectionPool(host, port=port, **kw)
else:
return HTTPConnectionPool(host, port=port, **kw)
-def _normalize_host(host, scheme):
+def _normalize_host(host, scheme):
"""
- Normalize hosts for comparisons and use with sockets.
+ Normalize hosts for comparisons and use with sockets.
"""
- host = normalize_host(host, scheme)
-
+ host = normalize_host(host, scheme)
+
# httplib doesn't like it when we include brackets in IPv6 addresses
# Specifically, if we include brackets but also pass the port then
# httplib crazily doubles up the square brackets on the Host header.
# Instead, we need to make sure we never pass ``None`` as the port.
# However, for backward compatibility reasons we can't actually
# *assert* that. See http://bugs.python.org/issue28539
- if host.startswith("[") and host.endswith("]"):
- host = host[1:-1]
+ if host.startswith("[") and host.endswith("]"):
+ host = host[1:-1]
return host
diff --git a/contrib/python/urllib3/urllib3/contrib/_appengine_environ.py b/contrib/python/urllib3/urllib3/contrib/_appengine_environ.py
index 8765b907d7..7967426929 100644
--- a/contrib/python/urllib3/urllib3/contrib/_appengine_environ.py
+++ b/contrib/python/urllib3/urllib3/contrib/_appengine_environ.py
@@ -1,36 +1,36 @@
-"""
-This module provides means to detect the App Engine environment.
-"""
-
-import os
-
-
-def is_appengine():
- return is_local_appengine() or is_prod_appengine()
-
-
-def is_appengine_sandbox():
- """Reports if the app is running in the first generation sandbox.
-
- The second generation runtimes are technically still in a sandbox, but it
- is much less restrictive, so generally you shouldn't need to check for it.
- see https://cloud.google.com/appengine/docs/standard/runtimes
- """
- return is_appengine() and os.environ["APPENGINE_RUNTIME"] == "python27"
-
-
-def is_local_appengine():
- return "APPENGINE_RUNTIME" in os.environ and os.environ.get(
- "SERVER_SOFTWARE", ""
- ).startswith("Development/")
-
-
-def is_prod_appengine():
- return "APPENGINE_RUNTIME" in os.environ and os.environ.get(
- "SERVER_SOFTWARE", ""
- ).startswith("Google App Engine/")
-
-
-def is_prod_appengine_mvms():
- """Deprecated."""
- return False
+"""
+This module provides means to detect the App Engine environment.
+"""
+
+import os
+
+
+def is_appengine():
+ return is_local_appengine() or is_prod_appengine()
+
+
+def is_appengine_sandbox():
+ """Reports if the app is running in the first generation sandbox.
+
+ The second generation runtimes are technically still in a sandbox, but it
+ is much less restrictive, so generally you shouldn't need to check for it.
+ see https://cloud.google.com/appengine/docs/standard/runtimes
+ """
+ return is_appengine() and os.environ["APPENGINE_RUNTIME"] == "python27"
+
+
+def is_local_appengine():
+ return "APPENGINE_RUNTIME" in os.environ and os.environ.get(
+ "SERVER_SOFTWARE", ""
+ ).startswith("Development/")
+
+
+def is_prod_appengine():
+ return "APPENGINE_RUNTIME" in os.environ and os.environ.get(
+ "SERVER_SOFTWARE", ""
+ ).startswith("Google App Engine/")
+
+
+def is_prod_appengine_mvms():
+ """Deprecated."""
+ return False
diff --git a/contrib/python/urllib3/urllib3/contrib/_securetransport/bindings.py b/contrib/python/urllib3/urllib3/contrib/_securetransport/bindings.py
index 264d564dbd..fc5a89a038 100644
--- a/contrib/python/urllib3/urllib3/contrib/_securetransport/bindings.py
+++ b/contrib/python/urllib3/urllib3/contrib/_securetransport/bindings.py
@@ -33,60 +33,60 @@ from __future__ import absolute_import
import platform
from ctypes import (
- CDLL,
- CFUNCTYPE,
- POINTER,
- c_bool,
- c_byte,
- c_char_p,
- c_int32,
- c_long,
- c_size_t,
- c_uint32,
- c_ulong,
- c_void_p,
+ CDLL,
+ CFUNCTYPE,
+ POINTER,
+ c_bool,
+ c_byte,
+ c_char_p,
+ c_int32,
+ c_long,
+ c_size_t,
+ c_uint32,
+ c_ulong,
+ c_void_p,
)
-from ctypes.util import find_library
+from ctypes.util import find_library
+
+from ...packages.six import raise_from
-from ...packages.six import raise_from
-
-if platform.system() != "Darwin":
- raise ImportError("Only macOS is supported")
+if platform.system() != "Darwin":
+ raise ImportError("Only macOS is supported")
version = platform.mac_ver()[0]
-version_info = tuple(map(int, version.split(".")))
+version_info = tuple(map(int, version.split(".")))
if version_info < (10, 8):
raise OSError(
- "Only OS X 10.8 and newer are supported, not %s.%s"
- % (version_info[0], version_info[1])
+ "Only OS X 10.8 and newer are supported, not %s.%s"
+ % (version_info[0], version_info[1])
)
-def load_cdll(name, macos10_16_path):
- """Loads a CDLL by name, falling back to known path on 10.16+"""
- try:
- # Big Sur is technically 11 but we use 10.16 due to the Big Sur
- # beta being labeled as 10.16.
- if version_info >= (10, 16):
- path = macos10_16_path
- else:
- path = find_library(name)
- if not path:
- raise OSError # Caught and reraised as 'ImportError'
- return CDLL(path, use_errno=True)
- except OSError:
- raise_from(ImportError("The library %s failed to load" % name), None)
-
-
-Security = load_cdll(
- "Security", "/System/Library/Frameworks/Security.framework/Security"
-)
-CoreFoundation = load_cdll(
- "CoreFoundation",
- "/System/Library/Frameworks/CoreFoundation.framework/CoreFoundation",
-)
-
-
+def load_cdll(name, macos10_16_path):
+ """Loads a CDLL by name, falling back to known path on 10.16+"""
+ try:
+ # Big Sur is technically 11 but we use 10.16 due to the Big Sur
+ # beta being labeled as 10.16.
+ if version_info >= (10, 16):
+ path = macos10_16_path
+ else:
+ path = find_library(name)
+ if not path:
+ raise OSError # Caught and reraised as 'ImportError'
+ return CDLL(path, use_errno=True)
+ except OSError:
+ raise_from(ImportError("The library %s failed to load" % name), None)
+
+
+Security = load_cdll(
+ "Security", "/System/Library/Frameworks/Security.framework/Security"
+)
+CoreFoundation = load_cdll(
+ "CoreFoundation",
+ "/System/Library/Frameworks/CoreFoundation.framework/CoreFoundation",
+)
+
+
Boolean = c_bool
CFIndex = c_long
CFStringEncoding = c_uint32
@@ -154,19 +154,19 @@ try:
Security.SecKeyGetTypeID.argtypes = []
Security.SecKeyGetTypeID.restype = CFTypeID
- Security.SecCertificateCreateWithData.argtypes = [CFAllocatorRef, CFDataRef]
+ Security.SecCertificateCreateWithData.argtypes = [CFAllocatorRef, CFDataRef]
Security.SecCertificateCreateWithData.restype = SecCertificateRef
- Security.SecCertificateCopyData.argtypes = [SecCertificateRef]
+ Security.SecCertificateCopyData.argtypes = [SecCertificateRef]
Security.SecCertificateCopyData.restype = CFDataRef
- Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p]
+ Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p]
Security.SecCopyErrorMessageString.restype = CFStringRef
Security.SecIdentityCreateWithCertificate.argtypes = [
CFTypeRef,
SecCertificateRef,
- POINTER(SecIdentityRef),
+ POINTER(SecIdentityRef),
]
Security.SecIdentityCreateWithCertificate.restype = OSStatus
@@ -176,133 +176,133 @@ try:
c_void_p,
Boolean,
c_void_p,
- POINTER(SecKeychainRef),
+ POINTER(SecKeychainRef),
]
Security.SecKeychainCreate.restype = OSStatus
- Security.SecKeychainDelete.argtypes = [SecKeychainRef]
+ Security.SecKeychainDelete.argtypes = [SecKeychainRef]
Security.SecKeychainDelete.restype = OSStatus
Security.SecPKCS12Import.argtypes = [
CFDataRef,
CFDictionaryRef,
- POINTER(CFArrayRef),
+ POINTER(CFArrayRef),
]
Security.SecPKCS12Import.restype = OSStatus
SSLReadFunc = CFUNCTYPE(OSStatus, SSLConnectionRef, c_void_p, POINTER(c_size_t))
- SSLWriteFunc = CFUNCTYPE(
- OSStatus, SSLConnectionRef, POINTER(c_byte), POINTER(c_size_t)
- )
+ SSLWriteFunc = CFUNCTYPE(
+ OSStatus, SSLConnectionRef, POINTER(c_byte), POINTER(c_size_t)
+ )
- Security.SSLSetIOFuncs.argtypes = [SSLContextRef, SSLReadFunc, SSLWriteFunc]
+ Security.SSLSetIOFuncs.argtypes = [SSLContextRef, SSLReadFunc, SSLWriteFunc]
Security.SSLSetIOFuncs.restype = OSStatus
- Security.SSLSetPeerID.argtypes = [SSLContextRef, c_char_p, c_size_t]
+ Security.SSLSetPeerID.argtypes = [SSLContextRef, c_char_p, c_size_t]
Security.SSLSetPeerID.restype = OSStatus
- Security.SSLSetCertificate.argtypes = [SSLContextRef, CFArrayRef]
+ Security.SSLSetCertificate.argtypes = [SSLContextRef, CFArrayRef]
Security.SSLSetCertificate.restype = OSStatus
- Security.SSLSetCertificateAuthorities.argtypes = [SSLContextRef, CFTypeRef, Boolean]
+ Security.SSLSetCertificateAuthorities.argtypes = [SSLContextRef, CFTypeRef, Boolean]
Security.SSLSetCertificateAuthorities.restype = OSStatus
- Security.SSLSetConnection.argtypes = [SSLContextRef, SSLConnectionRef]
+ Security.SSLSetConnection.argtypes = [SSLContextRef, SSLConnectionRef]
Security.SSLSetConnection.restype = OSStatus
- Security.SSLSetPeerDomainName.argtypes = [SSLContextRef, c_char_p, c_size_t]
+ Security.SSLSetPeerDomainName.argtypes = [SSLContextRef, c_char_p, c_size_t]
Security.SSLSetPeerDomainName.restype = OSStatus
- Security.SSLHandshake.argtypes = [SSLContextRef]
+ Security.SSLHandshake.argtypes = [SSLContextRef]
Security.SSLHandshake.restype = OSStatus
- Security.SSLRead.argtypes = [SSLContextRef, c_char_p, c_size_t, POINTER(c_size_t)]
+ Security.SSLRead.argtypes = [SSLContextRef, c_char_p, c_size_t, POINTER(c_size_t)]
Security.SSLRead.restype = OSStatus
- Security.SSLWrite.argtypes = [SSLContextRef, c_char_p, c_size_t, POINTER(c_size_t)]
+ Security.SSLWrite.argtypes = [SSLContextRef, c_char_p, c_size_t, POINTER(c_size_t)]
Security.SSLWrite.restype = OSStatus
- Security.SSLClose.argtypes = [SSLContextRef]
+ Security.SSLClose.argtypes = [SSLContextRef]
Security.SSLClose.restype = OSStatus
- Security.SSLGetNumberSupportedCiphers.argtypes = [SSLContextRef, POINTER(c_size_t)]
+ Security.SSLGetNumberSupportedCiphers.argtypes = [SSLContextRef, POINTER(c_size_t)]
Security.SSLGetNumberSupportedCiphers.restype = OSStatus
Security.SSLGetSupportedCiphers.argtypes = [
SSLContextRef,
POINTER(SSLCipherSuite),
- POINTER(c_size_t),
+ POINTER(c_size_t),
]
Security.SSLGetSupportedCiphers.restype = OSStatus
Security.SSLSetEnabledCiphers.argtypes = [
SSLContextRef,
POINTER(SSLCipherSuite),
- c_size_t,
+ c_size_t,
]
Security.SSLSetEnabledCiphers.restype = OSStatus
- Security.SSLGetNumberEnabledCiphers.argtype = [SSLContextRef, POINTER(c_size_t)]
+ Security.SSLGetNumberEnabledCiphers.argtype = [SSLContextRef, POINTER(c_size_t)]
Security.SSLGetNumberEnabledCiphers.restype = OSStatus
Security.SSLGetEnabledCiphers.argtypes = [
SSLContextRef,
POINTER(SSLCipherSuite),
- POINTER(c_size_t),
+ POINTER(c_size_t),
]
Security.SSLGetEnabledCiphers.restype = OSStatus
- Security.SSLGetNegotiatedCipher.argtypes = [SSLContextRef, POINTER(SSLCipherSuite)]
+ Security.SSLGetNegotiatedCipher.argtypes = [SSLContextRef, POINTER(SSLCipherSuite)]
Security.SSLGetNegotiatedCipher.restype = OSStatus
Security.SSLGetNegotiatedProtocolVersion.argtypes = [
SSLContextRef,
- POINTER(SSLProtocol),
+ POINTER(SSLProtocol),
]
Security.SSLGetNegotiatedProtocolVersion.restype = OSStatus
- Security.SSLCopyPeerTrust.argtypes = [SSLContextRef, POINTER(SecTrustRef)]
+ Security.SSLCopyPeerTrust.argtypes = [SSLContextRef, POINTER(SecTrustRef)]
Security.SSLCopyPeerTrust.restype = OSStatus
- Security.SecTrustSetAnchorCertificates.argtypes = [SecTrustRef, CFArrayRef]
+ Security.SecTrustSetAnchorCertificates.argtypes = [SecTrustRef, CFArrayRef]
Security.SecTrustSetAnchorCertificates.restype = OSStatus
- Security.SecTrustSetAnchorCertificatesOnly.argstypes = [SecTrustRef, Boolean]
+ Security.SecTrustSetAnchorCertificatesOnly.argstypes = [SecTrustRef, Boolean]
Security.SecTrustSetAnchorCertificatesOnly.restype = OSStatus
- Security.SecTrustEvaluate.argtypes = [SecTrustRef, POINTER(SecTrustResultType)]
+ Security.SecTrustEvaluate.argtypes = [SecTrustRef, POINTER(SecTrustResultType)]
Security.SecTrustEvaluate.restype = OSStatus
- Security.SecTrustGetCertificateCount.argtypes = [SecTrustRef]
+ Security.SecTrustGetCertificateCount.argtypes = [SecTrustRef]
Security.SecTrustGetCertificateCount.restype = CFIndex
- Security.SecTrustGetCertificateAtIndex.argtypes = [SecTrustRef, CFIndex]
+ Security.SecTrustGetCertificateAtIndex.argtypes = [SecTrustRef, CFIndex]
Security.SecTrustGetCertificateAtIndex.restype = SecCertificateRef
Security.SSLCreateContext.argtypes = [
CFAllocatorRef,
SSLProtocolSide,
- SSLConnectionType,
+ SSLConnectionType,
]
Security.SSLCreateContext.restype = SSLContextRef
- Security.SSLSetSessionOption.argtypes = [SSLContextRef, SSLSessionOption, Boolean]
+ Security.SSLSetSessionOption.argtypes = [SSLContextRef, SSLSessionOption, Boolean]
Security.SSLSetSessionOption.restype = OSStatus
- Security.SSLSetProtocolVersionMin.argtypes = [SSLContextRef, SSLProtocol]
+ Security.SSLSetProtocolVersionMin.argtypes = [SSLContextRef, SSLProtocol]
Security.SSLSetProtocolVersionMin.restype = OSStatus
- Security.SSLSetProtocolVersionMax.argtypes = [SSLContextRef, SSLProtocol]
+ Security.SSLSetProtocolVersionMax.argtypes = [SSLContextRef, SSLProtocol]
Security.SSLSetProtocolVersionMax.restype = OSStatus
- try:
- Security.SSLSetALPNProtocols.argtypes = [SSLContextRef, CFArrayRef]
- Security.SSLSetALPNProtocols.restype = OSStatus
- except AttributeError:
- # Supported only in 10.12+
- pass
-
- Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p]
+ try:
+ Security.SSLSetALPNProtocols.argtypes = [SSLContextRef, CFArrayRef]
+ Security.SSLSetALPNProtocols.restype = OSStatus
+ except AttributeError:
+ # Supported only in 10.12+
+ pass
+
+ Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p]
Security.SecCopyErrorMessageString.restype = CFStringRef
Security.SSLReadFunc = SSLReadFunc
@@ -318,47 +318,47 @@ try:
Security.OSStatus = OSStatus
Security.kSecImportExportPassphrase = CFStringRef.in_dll(
- Security, "kSecImportExportPassphrase"
+ Security, "kSecImportExportPassphrase"
)
Security.kSecImportItemIdentity = CFStringRef.in_dll(
- Security, "kSecImportItemIdentity"
+ Security, "kSecImportItemIdentity"
)
# CoreFoundation time!
- CoreFoundation.CFRetain.argtypes = [CFTypeRef]
+ CoreFoundation.CFRetain.argtypes = [CFTypeRef]
CoreFoundation.CFRetain.restype = CFTypeRef
- CoreFoundation.CFRelease.argtypes = [CFTypeRef]
+ CoreFoundation.CFRelease.argtypes = [CFTypeRef]
CoreFoundation.CFRelease.restype = None
- CoreFoundation.CFGetTypeID.argtypes = [CFTypeRef]
+ CoreFoundation.CFGetTypeID.argtypes = [CFTypeRef]
CoreFoundation.CFGetTypeID.restype = CFTypeID
CoreFoundation.CFStringCreateWithCString.argtypes = [
CFAllocatorRef,
c_char_p,
- CFStringEncoding,
+ CFStringEncoding,
]
CoreFoundation.CFStringCreateWithCString.restype = CFStringRef
- CoreFoundation.CFStringGetCStringPtr.argtypes = [CFStringRef, CFStringEncoding]
+ CoreFoundation.CFStringGetCStringPtr.argtypes = [CFStringRef, CFStringEncoding]
CoreFoundation.CFStringGetCStringPtr.restype = c_char_p
CoreFoundation.CFStringGetCString.argtypes = [
CFStringRef,
c_char_p,
CFIndex,
- CFStringEncoding,
+ CFStringEncoding,
]
CoreFoundation.CFStringGetCString.restype = c_bool
- CoreFoundation.CFDataCreate.argtypes = [CFAllocatorRef, c_char_p, CFIndex]
+ CoreFoundation.CFDataCreate.argtypes = [CFAllocatorRef, c_char_p, CFIndex]
CoreFoundation.CFDataCreate.restype = CFDataRef
- CoreFoundation.CFDataGetLength.argtypes = [CFDataRef]
+ CoreFoundation.CFDataGetLength.argtypes = [CFDataRef]
CoreFoundation.CFDataGetLength.restype = CFIndex
- CoreFoundation.CFDataGetBytePtr.argtypes = [CFDataRef]
+ CoreFoundation.CFDataGetBytePtr.argtypes = [CFDataRef]
CoreFoundation.CFDataGetBytePtr.restype = c_void_p
CoreFoundation.CFDictionaryCreate.argtypes = [
@@ -367,11 +367,11 @@ try:
POINTER(CFTypeRef),
CFIndex,
CFDictionaryKeyCallBacks,
- CFDictionaryValueCallBacks,
+ CFDictionaryValueCallBacks,
]
CoreFoundation.CFDictionaryCreate.restype = CFDictionaryRef
- CoreFoundation.CFDictionaryGetValue.argtypes = [CFDictionaryRef, CFTypeRef]
+ CoreFoundation.CFDictionaryGetValue.argtypes = [CFDictionaryRef, CFTypeRef]
CoreFoundation.CFDictionaryGetValue.restype = CFTypeRef
CoreFoundation.CFArrayCreate.argtypes = [
@@ -385,30 +385,30 @@ try:
CoreFoundation.CFArrayCreateMutable.argtypes = [
CFAllocatorRef,
CFIndex,
- CFArrayCallBacks,
+ CFArrayCallBacks,
]
CoreFoundation.CFArrayCreateMutable.restype = CFMutableArrayRef
- CoreFoundation.CFArrayAppendValue.argtypes = [CFMutableArrayRef, c_void_p]
+ CoreFoundation.CFArrayAppendValue.argtypes = [CFMutableArrayRef, c_void_p]
CoreFoundation.CFArrayAppendValue.restype = None
- CoreFoundation.CFArrayGetCount.argtypes = [CFArrayRef]
+ CoreFoundation.CFArrayGetCount.argtypes = [CFArrayRef]
CoreFoundation.CFArrayGetCount.restype = CFIndex
- CoreFoundation.CFArrayGetValueAtIndex.argtypes = [CFArrayRef, CFIndex]
+ CoreFoundation.CFArrayGetValueAtIndex.argtypes = [CFArrayRef, CFIndex]
CoreFoundation.CFArrayGetValueAtIndex.restype = c_void_p
CoreFoundation.kCFAllocatorDefault = CFAllocatorRef.in_dll(
- CoreFoundation, "kCFAllocatorDefault"
- )
- CoreFoundation.kCFTypeArrayCallBacks = c_void_p.in_dll(
- CoreFoundation, "kCFTypeArrayCallBacks"
+ CoreFoundation, "kCFAllocatorDefault"
)
+ CoreFoundation.kCFTypeArrayCallBacks = c_void_p.in_dll(
+ CoreFoundation, "kCFTypeArrayCallBacks"
+ )
CoreFoundation.kCFTypeDictionaryKeyCallBacks = c_void_p.in_dll(
- CoreFoundation, "kCFTypeDictionaryKeyCallBacks"
+ CoreFoundation, "kCFTypeDictionaryKeyCallBacks"
)
CoreFoundation.kCFTypeDictionaryValueCallBacks = c_void_p.in_dll(
- CoreFoundation, "kCFTypeDictionaryValueCallBacks"
+ CoreFoundation, "kCFTypeDictionaryValueCallBacks"
)
CoreFoundation.CFTypeRef = CFTypeRef
@@ -417,7 +417,7 @@ try:
CoreFoundation.CFDictionaryRef = CFDictionaryRef
except (AttributeError):
- raise ImportError("Error initializing ctypes")
+ raise ImportError("Error initializing ctypes")
class CFConst(object):
@@ -425,7 +425,7 @@ class CFConst(object):
A class object that acts as essentially a namespace for CoreFoundation
constants.
"""
-
+
kCFStringEncodingUTF8 = CFStringEncoding(0x08000100)
@@ -433,7 +433,7 @@ class SecurityConst(object):
"""
A class object that acts as essentially a namespace for Security constants.
"""
-
+
kSSLSessionOptionBreakOnServerAuth = 0
kSSLProtocol2 = 1
@@ -441,9 +441,9 @@ class SecurityConst(object):
kTLSProtocol1 = 4
kTLSProtocol11 = 7
kTLSProtocol12 = 8
- # SecureTransport does not support TLS 1.3 even if there's a constant for it
- kTLSProtocol13 = 10
- kTLSProtocolMaxSupported = 999
+ # SecureTransport does not support TLS 1.3 even if there's a constant for it
+ kTLSProtocol13 = 10
+ kTLSProtocolMaxSupported = 999
kSSLClientSide = 1
kSSLStreamType = 0
@@ -486,13 +486,13 @@ class SecurityConst(object):
errSecInvalidTrustSettings = -25262
# Cipher suites. We only pick the ones our default cipher string allows.
- # Source: https://developer.apple.com/documentation/security/1550981-ssl_cipher_suite_values
+ # Source: https://developer.apple.com/documentation/security/1550981-ssl_cipher_suite_values
TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384 = 0xC02C
TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384 = 0xC030
TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 = 0xC02B
TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256 = 0xC02F
- TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256 = 0xCCA9
- TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256 = 0xCCA8
+ TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256 = 0xCCA9
+ TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256 = 0xCCA8
TLS_DHE_RSA_WITH_AES_256_GCM_SHA384 = 0x009F
TLS_DHE_RSA_WITH_AES_128_GCM_SHA256 = 0x009E
TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384 = 0xC024
@@ -515,5 +515,5 @@ class SecurityConst(object):
TLS_RSA_WITH_AES_128_CBC_SHA = 0x002F
TLS_AES_128_GCM_SHA256 = 0x1301
TLS_AES_256_GCM_SHA384 = 0x1302
- TLS_AES_128_CCM_8_SHA256 = 0x1305
- TLS_AES_128_CCM_SHA256 = 0x1304
+ TLS_AES_128_CCM_8_SHA256 = 0x1305
+ TLS_AES_128_CCM_SHA256 = 0x1304
diff --git a/contrib/python/urllib3/urllib3/contrib/_securetransport/low_level.py b/contrib/python/urllib3/urllib3/contrib/_securetransport/low_level.py
index fa0b245d27..bc85fbb959 100644
--- a/contrib/python/urllib3/urllib3/contrib/_securetransport/low_level.py
+++ b/contrib/python/urllib3/urllib3/contrib/_securetransport/low_level.py
@@ -10,13 +10,13 @@ appropriate and useful assistance to the higher-level code.
import base64
import ctypes
import itertools
-import os
-import re
+import os
+import re
import ssl
-import struct
+import struct
import tempfile
-from .bindings import CFConst, CoreFoundation, Security
+from .bindings import CFConst, CoreFoundation, Security
# This regular expression is used to grab PEM data out of a PEM bundle.
_PEM_CERTS_RE = re.compile(
@@ -56,51 +56,51 @@ def _cf_dictionary_from_tuples(tuples):
)
-def _cfstr(py_bstr):
- """
- Given a Python binary data, create a CFString.
- The string must be CFReleased by the caller.
- """
- c_str = ctypes.c_char_p(py_bstr)
- cf_str = CoreFoundation.CFStringCreateWithCString(
- CoreFoundation.kCFAllocatorDefault,
- c_str,
- CFConst.kCFStringEncodingUTF8,
- )
- return cf_str
-
-
-def _create_cfstring_array(lst):
- """
- Given a list of Python binary data, create an associated CFMutableArray.
- The array must be CFReleased by the caller.
-
- Raises an ssl.SSLError on failure.
- """
- cf_arr = None
- try:
- cf_arr = CoreFoundation.CFArrayCreateMutable(
- CoreFoundation.kCFAllocatorDefault,
- 0,
- ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks),
- )
- if not cf_arr:
- raise MemoryError("Unable to allocate memory!")
- for item in lst:
- cf_str = _cfstr(item)
- if not cf_str:
- raise MemoryError("Unable to allocate memory!")
- try:
- CoreFoundation.CFArrayAppendValue(cf_arr, cf_str)
- finally:
- CoreFoundation.CFRelease(cf_str)
- except BaseException as e:
- if cf_arr:
- CoreFoundation.CFRelease(cf_arr)
- raise ssl.SSLError("Unable to allocate array: %s" % (e,))
- return cf_arr
-
-
+def _cfstr(py_bstr):
+ """
+ Given a Python binary data, create a CFString.
+ The string must be CFReleased by the caller.
+ """
+ c_str = ctypes.c_char_p(py_bstr)
+ cf_str = CoreFoundation.CFStringCreateWithCString(
+ CoreFoundation.kCFAllocatorDefault,
+ c_str,
+ CFConst.kCFStringEncodingUTF8,
+ )
+ return cf_str
+
+
+def _create_cfstring_array(lst):
+ """
+ Given a list of Python binary data, create an associated CFMutableArray.
+ The array must be CFReleased by the caller.
+
+ Raises an ssl.SSLError on failure.
+ """
+ cf_arr = None
+ try:
+ cf_arr = CoreFoundation.CFArrayCreateMutable(
+ CoreFoundation.kCFAllocatorDefault,
+ 0,
+ ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks),
+ )
+ if not cf_arr:
+ raise MemoryError("Unable to allocate memory!")
+ for item in lst:
+ cf_str = _cfstr(item)
+ if not cf_str:
+ raise MemoryError("Unable to allocate memory!")
+ try:
+ CoreFoundation.CFArrayAppendValue(cf_arr, cf_str)
+ finally:
+ CoreFoundation.CFRelease(cf_str)
+ except BaseException as e:
+ if cf_arr:
+ CoreFoundation.CFRelease(cf_arr)
+ raise ssl.SSLError("Unable to allocate array: %s" % (e,))
+ return cf_arr
+
+
def _cf_string_to_unicode(value):
"""
Creates a Unicode string from a CFString object. Used entirely for error
@@ -111,18 +111,18 @@ def _cf_string_to_unicode(value):
value_as_void_p = ctypes.cast(value, ctypes.POINTER(ctypes.c_void_p))
string = CoreFoundation.CFStringGetCStringPtr(
- value_as_void_p, CFConst.kCFStringEncodingUTF8
+ value_as_void_p, CFConst.kCFStringEncodingUTF8
)
if string is None:
buffer = ctypes.create_string_buffer(1024)
result = CoreFoundation.CFStringGetCString(
- value_as_void_p, buffer, 1024, CFConst.kCFStringEncodingUTF8
+ value_as_void_p, buffer, 1024, CFConst.kCFStringEncodingUTF8
)
if not result:
- raise OSError("Error copying C string from CFStringRef")
+ raise OSError("Error copying C string from CFStringRef")
string = buffer.value
if string is not None:
- string = string.decode("utf-8")
+ string = string.decode("utf-8")
return string
@@ -138,8 +138,8 @@ def _assert_no_error(error, exception_class=None):
output = _cf_string_to_unicode(cf_error_string)
CoreFoundation.CFRelease(cf_error_string)
- if output is None or output == u"":
- output = u"OSStatus %s" % error
+ if output is None or output == u"":
+ output = u"OSStatus %s" % error
if exception_class is None:
exception_class = ssl.SSLError
@@ -152,11 +152,11 @@ def _cert_array_from_pem(pem_bundle):
Given a bundle of certs in PEM format, turns them into a CFArray of certs
that can be used to validate a cert chain.
"""
- # Normalize the PEM bundle's line endings.
- pem_bundle = pem_bundle.replace(b"\r\n", b"\n")
-
+ # Normalize the PEM bundle's line endings.
+ pem_bundle = pem_bundle.replace(b"\r\n", b"\n")
+
der_certs = [
- base64.b64decode(match.group(1)) for match in _PEM_CERTS_RE.finditer(pem_bundle)
+ base64.b64decode(match.group(1)) for match in _PEM_CERTS_RE.finditer(pem_bundle)
]
if not der_certs:
raise ssl.SSLError("No root certificates specified")
@@ -164,7 +164,7 @@ def _cert_array_from_pem(pem_bundle):
cert_array = CoreFoundation.CFArrayCreateMutable(
CoreFoundation.kCFAllocatorDefault,
0,
- ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks),
+ ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks),
)
if not cert_array:
raise ssl.SSLError("Unable to allocate memory!")
@@ -188,7 +188,7 @@ def _cert_array_from_pem(pem_bundle):
# We only want to do that if an error occurs: otherwise, the caller
# should free.
CoreFoundation.CFRelease(cert_array)
- raise
+ raise
return cert_array
@@ -227,16 +227,16 @@ def _temporary_keychain():
# some random bytes to password-protect the keychain we're creating, so we
# ask for 40 random bytes.
random_bytes = os.urandom(40)
- filename = base64.b16encode(random_bytes[:8]).decode("utf-8")
- password = base64.b16encode(random_bytes[8:]) # Must be valid UTF-8
+ filename = base64.b16encode(random_bytes[:8]).decode("utf-8")
+ password = base64.b16encode(random_bytes[8:]) # Must be valid UTF-8
tempdirectory = tempfile.mkdtemp()
- keychain_path = os.path.join(tempdirectory, filename).encode("utf-8")
+ keychain_path = os.path.join(tempdirectory, filename).encode("utf-8")
# We now want to create the keychain itself.
keychain = Security.SecKeychainRef()
status = Security.SecKeychainCreate(
- keychain_path, len(password), password, False, None, ctypes.byref(keychain)
+ keychain_path, len(password), password, False, None, ctypes.byref(keychain)
)
_assert_no_error(status)
@@ -255,12 +255,12 @@ def _load_items_from_file(keychain, path):
identities = []
result_array = None
- with open(path, "rb") as f:
+ with open(path, "rb") as f:
raw_filedata = f.read()
try:
filedata = CoreFoundation.CFDataCreate(
- CoreFoundation.kCFAllocatorDefault, raw_filedata, len(raw_filedata)
+ CoreFoundation.kCFAllocatorDefault, raw_filedata, len(raw_filedata)
)
result_array = CoreFoundation.CFArrayRef()
result = Security.SecItemImport(
@@ -271,7 +271,7 @@ def _load_items_from_file(keychain, path):
0, # import flags
None, # key params, can include passphrase in the future
keychain, # The keychain to insert into
- ctypes.byref(result_array), # Results
+ ctypes.byref(result_array), # Results
)
_assert_no_error(result)
@@ -281,7 +281,7 @@ def _load_items_from_file(keychain, path):
# keychain already has them!
result_count = CoreFoundation.CFArrayGetCount(result_array)
for index in range(result_count):
- item = CoreFoundation.CFArrayGetValueAtIndex(result_array, index)
+ item = CoreFoundation.CFArrayGetValueAtIndex(result_array, index)
item = ctypes.cast(item, CoreFoundation.CFTypeRef)
if _is_cert(item):
@@ -339,7 +339,7 @@ def _load_client_cert_chain(keychain, *paths):
try:
for file_path in paths:
- new_identities, new_certs = _load_items_from_file(keychain, file_path)
+ new_identities, new_certs = _load_items_from_file(keychain, file_path)
identities.extend(new_identities)
certificates.extend(new_certs)
@@ -348,7 +348,7 @@ def _load_client_cert_chain(keychain, *paths):
if not identities:
new_identity = Security.SecIdentityRef()
status = Security.SecIdentityCreateWithCertificate(
- keychain, certificates[0], ctypes.byref(new_identity)
+ keychain, certificates[0], ctypes.byref(new_identity)
)
_assert_no_error(status)
identities.append(new_identity)
@@ -372,26 +372,26 @@ def _load_client_cert_chain(keychain, *paths):
finally:
for obj in itertools.chain(identities, certificates):
CoreFoundation.CFRelease(obj)
-
-
-TLS_PROTOCOL_VERSIONS = {
- "SSLv2": (0, 2),
- "SSLv3": (3, 0),
- "TLSv1": (3, 1),
- "TLSv1.1": (3, 2),
- "TLSv1.2": (3, 3),
-}
-
-
-def _build_tls_unknown_ca_alert(version):
- """
- Builds a TLS alert record for an unknown CA.
- """
- ver_maj, ver_min = TLS_PROTOCOL_VERSIONS[version]
- severity_fatal = 0x02
- description_unknown_ca = 0x30
- msg = struct.pack(">BB", severity_fatal, description_unknown_ca)
- msg_len = len(msg)
- record_type_alert = 0x15
- record = struct.pack(">BBBH", record_type_alert, ver_maj, ver_min, msg_len) + msg
- return record
+
+
+TLS_PROTOCOL_VERSIONS = {
+ "SSLv2": (0, 2),
+ "SSLv3": (3, 0),
+ "TLSv1": (3, 1),
+ "TLSv1.1": (3, 2),
+ "TLSv1.2": (3, 3),
+}
+
+
+def _build_tls_unknown_ca_alert(version):
+ """
+ Builds a TLS alert record for an unknown CA.
+ """
+ ver_maj, ver_min = TLS_PROTOCOL_VERSIONS[version]
+ severity_fatal = 0x02
+ description_unknown_ca = 0x30
+ msg = struct.pack(">BB", severity_fatal, description_unknown_ca)
+ msg_len = len(msg)
+ record_type_alert = 0x15
+ record = struct.pack(">BBBH", record_type_alert, ver_maj, ver_min, msg_len) + msg
+ return record
diff --git a/contrib/python/urllib3/urllib3/contrib/appengine.py b/contrib/python/urllib3/urllib3/contrib/appengine.py
index f91bdd6e77..f17b84c6d6 100644
--- a/contrib/python/urllib3/urllib3/contrib/appengine.py
+++ b/contrib/python/urllib3/urllib3/contrib/appengine.py
@@ -39,8 +39,8 @@ urllib3 on Google App Engine:
"""
from __future__ import absolute_import
-
-import io
+
+import io
import logging
import warnings
@@ -49,15 +49,15 @@ from ..exceptions import (
HTTPWarning,
MaxRetryError,
ProtocolError,
- SSLError,
- TimeoutError,
+ SSLError,
+ TimeoutError,
)
-from ..packages.six.moves.urllib.parse import urljoin
+from ..packages.six.moves.urllib.parse import urljoin
from ..request import RequestMethods
from ..response import HTTPResponse
-from ..util.retry import Retry
-from ..util.timeout import Timeout
-from . import _appengine_environ
+from ..util.retry import Retry
+from ..util.timeout import Timeout
+from . import _appengine_environ
try:
from google.appengine.api import urlfetch
@@ -90,30 +90,30 @@ class AppEngineManager(RequestMethods):
* If you attempt to use this on App Engine Flexible, as full socket
support is available.
* If a request size is more than 10 megabytes.
- * If a response size is more than 32 megabytes.
+ * If a response size is more than 32 megabytes.
* If you use an unsupported request method such as OPTIONS.
Beyond those cases, it will raise normal urllib3 errors.
"""
- def __init__(
- self,
- headers=None,
- retries=None,
- validate_certificate=True,
- urlfetch_retries=True,
- ):
+ def __init__(
+ self,
+ headers=None,
+ retries=None,
+ validate_certificate=True,
+ urlfetch_retries=True,
+ ):
if not urlfetch:
raise AppEnginePlatformError(
- "URLFetch is not available in this environment."
- )
+ "URLFetch is not available in this environment."
+ )
warnings.warn(
"urllib3 is using URLFetch on Google App Engine sandbox instead "
"of sockets. To use sockets directly instead of URLFetch see "
- "https://urllib3.readthedocs.io/en/1.26.x/reference/urllib3.contrib.html.",
- AppEnginePlatformWarning,
- )
+ "https://urllib3.readthedocs.io/en/1.26.x/reference/urllib3.contrib.html.",
+ AppEnginePlatformWarning,
+ )
RequestMethods.__init__(self, headers)
self.validate_certificate = validate_certificate
@@ -128,22 +128,22 @@ class AppEngineManager(RequestMethods):
# Return False to re-raise any potential exceptions
return False
- def urlopen(
- self,
- method,
- url,
- body=None,
- headers=None,
- retries=None,
- redirect=True,
- timeout=Timeout.DEFAULT_TIMEOUT,
- **response_kw
- ):
+ def urlopen(
+ self,
+ method,
+ url,
+ body=None,
+ headers=None,
+ retries=None,
+ redirect=True,
+ timeout=Timeout.DEFAULT_TIMEOUT,
+ **response_kw
+ ):
retries = self._get_retries(retries, redirect)
try:
- follow_redirects = redirect and retries.redirect != 0 and retries.total
+ follow_redirects = redirect and retries.redirect != 0 and retries.total
response = urlfetch.fetch(
url,
payload=body,
@@ -158,52 +158,52 @@ class AppEngineManager(RequestMethods):
raise TimeoutError(self, e)
except urlfetch.InvalidURLError as e:
- if "too large" in str(e):
+ if "too large" in str(e):
raise AppEnginePlatformError(
"URLFetch request too large, URLFetch only "
- "supports requests up to 10mb in size.",
- e,
- )
+ "supports requests up to 10mb in size.",
+ e,
+ )
raise ProtocolError(e)
except urlfetch.DownloadError as e:
- if "Too many redirects" in str(e):
+ if "Too many redirects" in str(e):
raise MaxRetryError(self, url, reason=e)
raise ProtocolError(e)
except urlfetch.ResponseTooLargeError as e:
raise AppEnginePlatformError(
"URLFetch response too large, URLFetch only supports"
- "responses up to 32mb in size.",
- e,
- )
+ "responses up to 32mb in size.",
+ e,
+ )
except urlfetch.SSLCertificateError as e:
raise SSLError(e)
except urlfetch.InvalidMethodError as e:
raise AppEnginePlatformError(
- "URLFetch does not support method: %s" % method, e
- )
+ "URLFetch does not support method: %s" % method, e
+ )
http_response = self._urlfetch_response_to_http_response(
- response, retries=retries, **response_kw
- )
+ response, retries=retries, **response_kw
+ )
# Handle redirect?
redirect_location = redirect and http_response.get_redirect_location()
if redirect_location:
# Check for redirect response
- if self.urlfetch_retries and retries.raise_on_redirect:
+ if self.urlfetch_retries and retries.raise_on_redirect:
raise MaxRetryError(self, url, "too many redirects")
else:
if http_response.status == 303:
- method = "GET"
+ method = "GET"
try:
- retries = retries.increment(
- method, url, response=http_response, _pool=self
- )
+ retries = retries.increment(
+ method, url, response=http_response, _pool=self
+ )
except MaxRetryError:
if retries.raise_on_redirect:
raise MaxRetryError(self, url, "too many redirects")
@@ -213,32 +213,32 @@ class AppEngineManager(RequestMethods):
log.debug("Redirecting %s -> %s", url, redirect_location)
redirect_url = urljoin(url, redirect_location)
return self.urlopen(
- method,
- redirect_url,
- body,
- headers,
- retries=retries,
- redirect=redirect,
- timeout=timeout,
- **response_kw
- )
+ method,
+ redirect_url,
+ body,
+ headers,
+ retries=retries,
+ redirect=redirect,
+ timeout=timeout,
+ **response_kw
+ )
# Check if we should retry the HTTP response.
- has_retry_after = bool(http_response.getheader("Retry-After"))
+ has_retry_after = bool(http_response.getheader("Retry-After"))
if retries.is_retry(method, http_response.status, has_retry_after):
- retries = retries.increment(method, url, response=http_response, _pool=self)
+ retries = retries.increment(method, url, response=http_response, _pool=self)
log.debug("Retry: %s", url)
retries.sleep(http_response)
return self.urlopen(
- method,
- url,
- body=body,
- headers=headers,
- retries=retries,
- redirect=redirect,
- timeout=timeout,
- **response_kw
- )
+ method,
+ url,
+ body=body,
+ headers=headers,
+ retries=retries,
+ redirect=redirect,
+ timeout=timeout,
+ **response_kw
+ )
return http_response
@@ -247,37 +247,37 @@ class AppEngineManager(RequestMethods):
if is_prod_appengine():
# Production GAE handles deflate encoding automatically, but does
# not remove the encoding header.
- content_encoding = urlfetch_resp.headers.get("content-encoding")
+ content_encoding = urlfetch_resp.headers.get("content-encoding")
- if content_encoding == "deflate":
- del urlfetch_resp.headers["content-encoding"]
+ if content_encoding == "deflate":
+ del urlfetch_resp.headers["content-encoding"]
- transfer_encoding = urlfetch_resp.headers.get("transfer-encoding")
+ transfer_encoding = urlfetch_resp.headers.get("transfer-encoding")
# We have a full response's content,
# so let's make sure we don't report ourselves as chunked data.
- if transfer_encoding == "chunked":
+ if transfer_encoding == "chunked":
encodings = transfer_encoding.split(",")
- encodings.remove("chunked")
- urlfetch_resp.headers["transfer-encoding"] = ",".join(encodings)
+ encodings.remove("chunked")
+ urlfetch_resp.headers["transfer-encoding"] = ",".join(encodings)
- original_response = HTTPResponse(
+ original_response = HTTPResponse(
# In order for decoding to work, we must present the content as
# a file-like object.
- body=io.BytesIO(urlfetch_resp.content),
- msg=urlfetch_resp.header_msg,
+ body=io.BytesIO(urlfetch_resp.content),
+ msg=urlfetch_resp.header_msg,
headers=urlfetch_resp.headers,
status=urlfetch_resp.status_code,
**response_kw
)
- return HTTPResponse(
- body=io.BytesIO(urlfetch_resp.content),
- headers=urlfetch_resp.headers,
- status=urlfetch_resp.status_code,
- original_response=original_response,
- **response_kw
- )
-
+ return HTTPResponse(
+ body=io.BytesIO(urlfetch_resp.content),
+ headers=urlfetch_resp.headers,
+ status=urlfetch_resp.status_code,
+ original_response=original_response,
+ **response_kw
+ )
+
def _get_absolute_timeout(self, timeout):
if timeout is Timeout.DEFAULT_TIMEOUT:
return None # Defer to URLFetch's default.
@@ -286,29 +286,29 @@ class AppEngineManager(RequestMethods):
warnings.warn(
"URLFetch does not support granular timeout settings, "
"reverting to total or default URLFetch timeout.",
- AppEnginePlatformWarning,
- )
+ AppEnginePlatformWarning,
+ )
return timeout.total
return timeout
def _get_retries(self, retries, redirect):
if not isinstance(retries, Retry):
- retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
+ retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
if retries.connect or retries.read or retries.redirect:
warnings.warn(
"URLFetch only supports total retries and does not "
"recognize connect, read, or redirect retry parameters.",
- AppEnginePlatformWarning,
- )
+ AppEnginePlatformWarning,
+ )
return retries
-# Alias methods from _appengine_environ to maintain public API interface.
+# Alias methods from _appengine_environ to maintain public API interface.
-is_appengine = _appengine_environ.is_appengine
-is_appengine_sandbox = _appengine_environ.is_appengine_sandbox
-is_local_appengine = _appengine_environ.is_local_appengine
-is_prod_appengine = _appengine_environ.is_prod_appengine
-is_prod_appengine_mvms = _appengine_environ.is_prod_appengine_mvms
+is_appengine = _appengine_environ.is_appengine
+is_appengine_sandbox = _appengine_environ.is_appengine_sandbox
+is_local_appengine = _appengine_environ.is_local_appengine
+is_prod_appengine = _appengine_environ.is_prod_appengine
+is_prod_appengine_mvms = _appengine_environ.is_prod_appengine_mvms
diff --git a/contrib/python/urllib3/urllib3/contrib/ntlmpool.py b/contrib/python/urllib3/urllib3/contrib/ntlmpool.py
index 41a8fd174c..7135a83008 100644
--- a/contrib/python/urllib3/urllib3/contrib/ntlmpool.py
+++ b/contrib/python/urllib3/urllib3/contrib/ntlmpool.py
@@ -5,22 +5,22 @@ Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10
"""
from __future__ import absolute_import
-import warnings
+import warnings
from logging import getLogger
-
+
from ntlm import ntlm
from .. import HTTPSConnectionPool
from ..packages.six.moves.http_client import HTTPSConnection
-warnings.warn(
- "The 'urllib3.contrib.ntlmpool' module is deprecated and will be removed "
- "in urllib3 v2.0 release, urllib3 is not able to support it properly due "
- "to reasons listed in issue: https://github.com/urllib3/urllib3/issues/2282. "
- "If you are a user of this module please comment in the mentioned issue.",
- DeprecationWarning,
-)
-
+warnings.warn(
+ "The 'urllib3.contrib.ntlmpool' module is deprecated and will be removed "
+ "in urllib3 v2.0 release, urllib3 is not able to support it properly due "
+ "to reasons listed in issue: https://github.com/urllib3/urllib3/issues/2282. "
+ "If you are a user of this module please comment in the mentioned issue.",
+ DeprecationWarning,
+)
+
log = getLogger(__name__)
@@ -29,7 +29,7 @@ class NTLMConnectionPool(HTTPSConnectionPool):
Implements an NTLM authentication version of an urllib3 connection pool
"""
- scheme = "https"
+ scheme = "https"
def __init__(self, user, pw, authurl, *args, **kwargs):
"""
@@ -40,7 +40,7 @@ class NTLMConnectionPool(HTTPSConnectionPool):
super(NTLMConnectionPool, self).__init__(*args, **kwargs)
self.authurl = authurl
self.rawuser = user
- user_parts = user.split("\\", 1)
+ user_parts = user.split("\\", 1)
self.domain = user_parts[0].upper()
self.user = user_parts[1]
self.pw = pw
@@ -49,82 +49,82 @@ class NTLMConnectionPool(HTTPSConnectionPool):
# Performs the NTLM handshake that secures the connection. The socket
# must be kept open while requests are performed.
self.num_connections += 1
- log.debug(
- "Starting NTLM HTTPS connection no. %d: https://%s%s",
- self.num_connections,
- self.host,
- self.authurl,
- )
+ log.debug(
+ "Starting NTLM HTTPS connection no. %d: https://%s%s",
+ self.num_connections,
+ self.host,
+ self.authurl,
+ )
- headers = {"Connection": "Keep-Alive"}
- req_header = "Authorization"
- resp_header = "www-authenticate"
+ headers = {"Connection": "Keep-Alive"}
+ req_header = "Authorization"
+ resp_header = "www-authenticate"
conn = HTTPSConnection(host=self.host, port=self.port)
# Send negotiation message
- headers[req_header] = "NTLM %s" % ntlm.create_NTLM_NEGOTIATE_MESSAGE(
- self.rawuser
- )
- log.debug("Request headers: %s", headers)
- conn.request("GET", self.authurl, None, headers)
+ headers[req_header] = "NTLM %s" % ntlm.create_NTLM_NEGOTIATE_MESSAGE(
+ self.rawuser
+ )
+ log.debug("Request headers: %s", headers)
+ conn.request("GET", self.authurl, None, headers)
res = conn.getresponse()
reshdr = dict(res.getheaders())
- log.debug("Response status: %s %s", res.status, res.reason)
- log.debug("Response headers: %s", reshdr)
- log.debug("Response data: %s [...]", res.read(100))
+ log.debug("Response status: %s %s", res.status, res.reason)
+ log.debug("Response headers: %s", reshdr)
+ log.debug("Response data: %s [...]", res.read(100))
# Remove the reference to the socket, so that it can not be closed by
# the response object (we want to keep the socket open)
res.fp = None
# Server should respond with a challenge message
- auth_header_values = reshdr[resp_header].split(", ")
+ auth_header_values = reshdr[resp_header].split(", ")
auth_header_value = None
for s in auth_header_values:
- if s[:5] == "NTLM ":
+ if s[:5] == "NTLM ":
auth_header_value = s[5:]
if auth_header_value is None:
- raise Exception(
- "Unexpected %s response header: %s" % (resp_header, reshdr[resp_header])
- )
+ raise Exception(
+ "Unexpected %s response header: %s" % (resp_header, reshdr[resp_header])
+ )
# Send authentication message
- ServerChallenge, NegotiateFlags = ntlm.parse_NTLM_CHALLENGE_MESSAGE(
- auth_header_value
- )
- auth_msg = ntlm.create_NTLM_AUTHENTICATE_MESSAGE(
- ServerChallenge, self.user, self.domain, self.pw, NegotiateFlags
- )
- headers[req_header] = "NTLM %s" % auth_msg
- log.debug("Request headers: %s", headers)
- conn.request("GET", self.authurl, None, headers)
+ ServerChallenge, NegotiateFlags = ntlm.parse_NTLM_CHALLENGE_MESSAGE(
+ auth_header_value
+ )
+ auth_msg = ntlm.create_NTLM_AUTHENTICATE_MESSAGE(
+ ServerChallenge, self.user, self.domain, self.pw, NegotiateFlags
+ )
+ headers[req_header] = "NTLM %s" % auth_msg
+ log.debug("Request headers: %s", headers)
+ conn.request("GET", self.authurl, None, headers)
res = conn.getresponse()
- log.debug("Response status: %s %s", res.status, res.reason)
- log.debug("Response headers: %s", dict(res.getheaders()))
- log.debug("Response data: %s [...]", res.read()[:100])
+ log.debug("Response status: %s %s", res.status, res.reason)
+ log.debug("Response headers: %s", dict(res.getheaders()))
+ log.debug("Response data: %s [...]", res.read()[:100])
if res.status != 200:
if res.status == 401:
- raise Exception("Server rejected request: wrong username or password")
- raise Exception("Wrong server response: %s %s" % (res.status, res.reason))
+ raise Exception("Server rejected request: wrong username or password")
+ raise Exception("Wrong server response: %s %s" % (res.status, res.reason))
res.fp = None
- log.debug("Connection established")
+ log.debug("Connection established")
return conn
- def urlopen(
- self,
- method,
- url,
- body=None,
- headers=None,
- retries=3,
- redirect=True,
- assert_same_host=True,
- ):
+ def urlopen(
+ self,
+ method,
+ url,
+ body=None,
+ headers=None,
+ retries=3,
+ redirect=True,
+ assert_same_host=True,
+ ):
if headers is None:
headers = {}
- headers["Connection"] = "Keep-Alive"
- return super(NTLMConnectionPool, self).urlopen(
- method, url, body, headers, retries, redirect, assert_same_host
- )
+ headers["Connection"] = "Keep-Alive"
+ return super(NTLMConnectionPool, self).urlopen(
+ method, url, body, headers, retries, redirect, assert_same_host
+ )
diff --git a/contrib/python/urllib3/urllib3/contrib/pyopenssl.py b/contrib/python/urllib3/urllib3/contrib/pyopenssl.py
index def83afdb2..e46e9b0a9f 100644
--- a/contrib/python/urllib3/urllib3/contrib/pyopenssl.py
+++ b/contrib/python/urllib3/urllib3/contrib/pyopenssl.py
@@ -1,32 +1,32 @@
"""
-TLS with SNI_-support for Python 2. Follow these instructions if you would
-like to verify TLS certificates in Python 2. Note, the default libraries do
+TLS with SNI_-support for Python 2. Follow these instructions if you would
+like to verify TLS certificates in Python 2. Note, the default libraries do
*not* do certificate checking; you need to do additional work to validate
certificates yourself.
This needs the following packages installed:
-* `pyOpenSSL`_ (tested with 16.0.0)
-* `cryptography`_ (minimum 1.3.4, from pyopenssl)
-* `idna`_ (minimum 2.0, from cryptography)
+* `pyOpenSSL`_ (tested with 16.0.0)
+* `cryptography`_ (minimum 1.3.4, from pyopenssl)
+* `idna`_ (minimum 2.0, from cryptography)
However, pyopenssl depends on cryptography, which depends on idna, so while we
use all three directly here we end up having relatively few packages required.
You can install them with the following command:
-.. code-block:: bash
-
- $ python -m pip install pyopenssl cryptography idna
+.. code-block:: bash
+ $ python -m pip install pyopenssl cryptography idna
+
To activate certificate checking, call
:func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code
before you begin making HTTP requests. This can be done in a ``sitecustomize``
module, or at any other time before your application begins using ``urllib3``,
-like this:
-
-.. code-block:: python
+like this:
+.. code-block:: python
+
try:
import urllib3.contrib.pyopenssl
urllib3.contrib.pyopenssl.inject_into_urllib3()
@@ -41,9 +41,9 @@ compression in Python 2 (see `CRIME attack`_).
.. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication
.. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit)
-.. _pyopenssl: https://www.pyopenssl.org
-.. _cryptography: https://cryptography.io
-.. _idna: https://github.com/kjd/idna
+.. _pyopenssl: https://www.pyopenssl.org
+.. _cryptography: https://cryptography.io
+.. _idna: https://github.com/kjd/idna
"""
from __future__ import absolute_import
@@ -51,18 +51,18 @@ import OpenSSL.SSL
from cryptography import x509
from cryptography.hazmat.backends.openssl import backend as openssl_backend
from cryptography.hazmat.backends.openssl.x509 import _Certificate
-
-try:
- from cryptography.x509 import UnsupportedExtension
-except ImportError:
- # UnsupportedExtension is gone in cryptography >= 2.1.0
- class UnsupportedExtension(Exception):
- pass
-
-
+
+try:
+ from cryptography.x509 import UnsupportedExtension
+except ImportError:
+ # UnsupportedExtension is gone in cryptography >= 2.1.0
+ class UnsupportedExtension(Exception):
+ pass
+
+
from io import BytesIO
-from socket import error as SocketError
-from socket import timeout
+from socket import error as SocketError
+from socket import timeout
try: # Platform-specific: Python 2
from socket import _fileobject
@@ -75,38 +75,38 @@ import ssl
import sys
from .. import util
-from ..packages import six
-from ..util.ssl_ import PROTOCOL_TLS_CLIENT
-
-__all__ = ["inject_into_urllib3", "extract_from_urllib3"]
+from ..packages import six
+from ..util.ssl_ import PROTOCOL_TLS_CLIENT
+__all__ = ["inject_into_urllib3", "extract_from_urllib3"]
+
# SNI always works.
HAS_SNI = True
# Map from urllib3 to PyOpenSSL compatible parameter-values.
_openssl_versions = {
- util.PROTOCOL_TLS: OpenSSL.SSL.SSLv23_METHOD,
- PROTOCOL_TLS_CLIENT: OpenSSL.SSL.SSLv23_METHOD,
+ util.PROTOCOL_TLS: OpenSSL.SSL.SSLv23_METHOD,
+ PROTOCOL_TLS_CLIENT: OpenSSL.SSL.SSLv23_METHOD,
ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD,
}
-if hasattr(ssl, "PROTOCOL_SSLv3") and hasattr(OpenSSL.SSL, "SSLv3_METHOD"):
- _openssl_versions[ssl.PROTOCOL_SSLv3] = OpenSSL.SSL.SSLv3_METHOD
-
-if hasattr(ssl, "PROTOCOL_TLSv1_1") and hasattr(OpenSSL.SSL, "TLSv1_1_METHOD"):
+if hasattr(ssl, "PROTOCOL_SSLv3") and hasattr(OpenSSL.SSL, "SSLv3_METHOD"):
+ _openssl_versions[ssl.PROTOCOL_SSLv3] = OpenSSL.SSL.SSLv3_METHOD
+
+if hasattr(ssl, "PROTOCOL_TLSv1_1") and hasattr(OpenSSL.SSL, "TLSv1_1_METHOD"):
_openssl_versions[ssl.PROTOCOL_TLSv1_1] = OpenSSL.SSL.TLSv1_1_METHOD
-if hasattr(ssl, "PROTOCOL_TLSv1_2") and hasattr(OpenSSL.SSL, "TLSv1_2_METHOD"):
+if hasattr(ssl, "PROTOCOL_TLSv1_2") and hasattr(OpenSSL.SSL, "TLSv1_2_METHOD"):
_openssl_versions[ssl.PROTOCOL_TLSv1_2] = OpenSSL.SSL.TLSv1_2_METHOD
_stdlib_to_openssl_verify = {
ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE,
ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER,
- ssl.CERT_REQUIRED: OpenSSL.SSL.VERIFY_PEER
- + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT,
+ ssl.CERT_REQUIRED: OpenSSL.SSL.VERIFY_PEER
+ + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT,
}
-_openssl_to_stdlib_verify = dict((v, k) for k, v in _stdlib_to_openssl_verify.items())
+_openssl_to_stdlib_verify = dict((v, k) for k, v in _stdlib_to_openssl_verify.items())
# OpenSSL will only write 16K at a time
SSL_WRITE_BLOCKSIZE = 16384
@@ -119,11 +119,11 @@ log = logging.getLogger(__name__)
def inject_into_urllib3():
- "Monkey-patch urllib3 with PyOpenSSL-backed SSL-support."
+ "Monkey-patch urllib3 with PyOpenSSL-backed SSL-support."
_validate_dependencies_met()
- util.SSLContext = PyOpenSSLContext
+ util.SSLContext = PyOpenSSLContext
util.ssl_.SSLContext = PyOpenSSLContext
util.HAS_SNI = HAS_SNI
util.ssl_.HAS_SNI = HAS_SNI
@@ -132,9 +132,9 @@ def inject_into_urllib3():
def extract_from_urllib3():
- "Undo monkey-patching by :func:`inject_into_urllib3`."
+ "Undo monkey-patching by :func:`inject_into_urllib3`."
- util.SSLContext = orig_util_SSLContext
+ util.SSLContext = orig_util_SSLContext
util.ssl_.SSLContext = orig_util_SSLContext
util.HAS_SNI = orig_util_HAS_SNI
util.ssl_.HAS_SNI = orig_util_HAS_SNI
@@ -149,23 +149,23 @@ def _validate_dependencies_met():
"""
# Method added in `cryptography==1.1`; not available in older versions
from cryptography.x509.extensions import Extensions
-
+
if getattr(Extensions, "get_extension_for_class", None) is None:
- raise ImportError(
- "'cryptography' module missing required functionality. "
- "Try upgrading to v1.3.4 or newer."
- )
+ raise ImportError(
+ "'cryptography' module missing required functionality. "
+ "Try upgrading to v1.3.4 or newer."
+ )
# pyOpenSSL 0.14 and above use cryptography for OpenSSL bindings. The _x509
# attribute is only present on those versions.
from OpenSSL.crypto import X509
-
+
x509 = X509()
if getattr(x509, "_x509", None) is None:
- raise ImportError(
- "'pyOpenSSL' module missing required functionality. "
- "Try upgrading to v0.14 or newer."
- )
+ raise ImportError(
+ "'pyOpenSSL' module missing required functionality. "
+ "Try upgrading to v0.14 or newer."
+ )
def _dnsname_to_stdlib(name):
@@ -177,11 +177,11 @@ def _dnsname_to_stdlib(name):
from ASCII bytes. We need to idna-encode that string to get it back, and
then on Python 3 we also need to convert to unicode via UTF-8 (the stdlib
uses PyUnicode_FromStringAndSize on it, which decodes via UTF-8).
-
- If the name cannot be idna-encoded then we return None signalling that
- the name given should be skipped.
+
+ If the name cannot be idna-encoded then we return None signalling that
+ the name given should be skipped.
"""
-
+
def idna_encode(name):
"""
Borrowed wholesale from the Python Cryptography Project. It turns out
@@ -190,24 +190,24 @@ def _dnsname_to_stdlib(name):
"""
import idna
- try:
- for prefix in [u"*.", u"."]:
- if name.startswith(prefix):
- name = name[len(prefix) :]
- return prefix.encode("ascii") + idna.encode(name)
- return idna.encode(name)
- except idna.core.IDNAError:
- return None
-
- # Don't send IPv6 addresses through the IDNA encoder.
- if ":" in name:
- return name
-
+ try:
+ for prefix in [u"*.", u"."]:
+ if name.startswith(prefix):
+ name = name[len(prefix) :]
+ return prefix.encode("ascii") + idna.encode(name)
+ return idna.encode(name)
+ except idna.core.IDNAError:
+ return None
+
+ # Don't send IPv6 addresses through the IDNA encoder.
+ if ":" in name:
+ return name
+
name = idna_encode(name)
- if name is None:
- return None
- elif sys.version_info >= (3, 0):
- name = name.decode("utf-8")
+ if name is None:
+ return None
+ elif sys.version_info >= (3, 0):
+ name = name.decode("utf-8")
return name
@@ -226,16 +226,16 @@ def get_subj_alt_name(peer_cert):
# We want to find the SAN extension. Ask Cryptography to locate it (it's
# faster than looping in Python)
try:
- ext = cert.extensions.get_extension_for_class(x509.SubjectAlternativeName).value
+ ext = cert.extensions.get_extension_for_class(x509.SubjectAlternativeName).value
except x509.ExtensionNotFound:
# No such extension, return the empty list.
return []
- except (
- x509.DuplicateExtension,
- UnsupportedExtension,
- x509.UnsupportedGeneralNameType,
- UnicodeError,
- ) as e:
+ except (
+ x509.DuplicateExtension,
+ UnsupportedExtension,
+ x509.UnsupportedGeneralNameType,
+ UnicodeError,
+ ) as e:
# A problem has been found with the quality of the certificate. Assume
# no SAN field is present.
log.warning(
@@ -252,25 +252,25 @@ def get_subj_alt_name(peer_cert):
# Sadly the DNS names need to be idna encoded and then, on Python 3, UTF-8
# decoded. This is pretty frustrating, but that's what the standard library
# does with certificates, and so we need to attempt to do the same.
- # We also want to skip over names which cannot be idna encoded.
+ # We also want to skip over names which cannot be idna encoded.
names = [
- ("DNS", name)
- for name in map(_dnsname_to_stdlib, ext.get_values_for_type(x509.DNSName))
- if name is not None
+ ("DNS", name)
+ for name in map(_dnsname_to_stdlib, ext.get_values_for_type(x509.DNSName))
+ if name is not None
]
names.extend(
- ("IP Address", str(name)) for name in ext.get_values_for_type(x509.IPAddress)
+ ("IP Address", str(name)) for name in ext.get_values_for_type(x509.IPAddress)
)
return names
class WrappedSocket(object):
- """API-compatibility wrapper for Python OpenSSL's Connection-class.
+ """API-compatibility wrapper for Python OpenSSL's Connection-class.
Note: _makefile_refs, _drop() and _reuse() are needed for the garbage
collector of pypy.
- """
+ """
def __init__(self, connection, socket, suppress_ragged_eofs=True):
self.connection = connection
@@ -293,24 +293,24 @@ class WrappedSocket(object):
try:
data = self.connection.recv(*args, **kwargs)
except OpenSSL.SSL.SysCallError as e:
- if self.suppress_ragged_eofs and e.args == (-1, "Unexpected EOF"):
- return b""
+ if self.suppress_ragged_eofs and e.args == (-1, "Unexpected EOF"):
+ return b""
else:
raise SocketError(str(e))
- except OpenSSL.SSL.ZeroReturnError:
+ except OpenSSL.SSL.ZeroReturnError:
if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:
- return b""
+ return b""
else:
raise
except OpenSSL.SSL.WantReadError:
- if not util.wait_for_read(self.socket, self.socket.gettimeout()):
- raise timeout("The read operation timed out")
+ if not util.wait_for_read(self.socket, self.socket.gettimeout()):
+ raise timeout("The read operation timed out")
else:
return self.recv(*args, **kwargs)
-
- # TLS 1.3 post-handshake authentication
- except OpenSSL.SSL.Error as e:
- raise ssl.SSLError("read error: %r" % e)
+
+ # TLS 1.3 post-handshake authentication
+ except OpenSSL.SSL.Error as e:
+ raise ssl.SSLError("read error: %r" % e)
else:
return data
@@ -318,25 +318,25 @@ class WrappedSocket(object):
try:
return self.connection.recv_into(*args, **kwargs)
except OpenSSL.SSL.SysCallError as e:
- if self.suppress_ragged_eofs and e.args == (-1, "Unexpected EOF"):
+ if self.suppress_ragged_eofs and e.args == (-1, "Unexpected EOF"):
return 0
else:
raise SocketError(str(e))
- except OpenSSL.SSL.ZeroReturnError:
+ except OpenSSL.SSL.ZeroReturnError:
if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:
return 0
else:
raise
except OpenSSL.SSL.WantReadError:
- if not util.wait_for_read(self.socket, self.socket.gettimeout()):
- raise timeout("The read operation timed out")
+ if not util.wait_for_read(self.socket, self.socket.gettimeout()):
+ raise timeout("The read operation timed out")
else:
return self.recv_into(*args, **kwargs)
- # TLS 1.3 post-handshake authentication
- except OpenSSL.SSL.Error as e:
- raise ssl.SSLError("read error: %r" % e)
-
+ # TLS 1.3 post-handshake authentication
+ except OpenSSL.SSL.Error as e:
+ raise ssl.SSLError("read error: %r" % e)
+
def settimeout(self, timeout):
return self.socket.settimeout(timeout)
@@ -345,7 +345,7 @@ class WrappedSocket(object):
try:
return self.connection.send(data)
except OpenSSL.SSL.WantWriteError:
- if not util.wait_for_write(self.socket, self.socket.gettimeout()):
+ if not util.wait_for_write(self.socket, self.socket.gettimeout()):
raise timeout()
continue
except OpenSSL.SSL.SysCallError as e:
@@ -354,9 +354,9 @@ class WrappedSocket(object):
def sendall(self, data):
total_sent = 0
while total_sent < len(data):
- sent = self._send_until_done(
- data[total_sent : total_sent + SSL_WRITE_BLOCKSIZE]
- )
+ sent = self._send_until_done(
+ data[total_sent : total_sent + SSL_WRITE_BLOCKSIZE]
+ )
total_sent += sent
def shutdown(self):
@@ -380,16 +380,16 @@ class WrappedSocket(object):
return x509
if binary_form:
- return OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_ASN1, x509)
+ return OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_ASN1, x509)
return {
- "subject": ((("commonName", x509.get_subject().CN),),),
- "subjectAltName": get_subj_alt_name(x509),
+ "subject": ((("commonName", x509.get_subject().CN),),),
+ "subjectAltName": get_subj_alt_name(x509),
}
- def version(self):
- return self.connection.get_protocol_version_name()
-
+ def version(self):
+ return self.connection.get_protocol_version_name()
+
def _reuse(self):
self._makefile_refs += 1
@@ -401,12 +401,12 @@ class WrappedSocket(object):
if _fileobject: # Platform-specific: Python 2
-
+
def makefile(self, mode, bufsize=-1):
self._makefile_refs += 1
return _fileobject(self, mode, bufsize, close=True)
-
-
+
+
else: # Platform-specific: Python 3
makefile = backport_makefile
@@ -419,7 +419,7 @@ class PyOpenSSLContext(object):
for translating the interface of the standard library ``SSLContext`` object
to calls into PyOpenSSL.
"""
-
+
def __init__(self, protocol):
self.protocol = _openssl_versions[protocol]
self._ctx = OpenSSL.SSL.Context(self.protocol)
@@ -441,52 +441,52 @@ class PyOpenSSLContext(object):
@verify_mode.setter
def verify_mode(self, value):
- self._ctx.set_verify(_stdlib_to_openssl_verify[value], _verify_callback)
+ self._ctx.set_verify(_stdlib_to_openssl_verify[value], _verify_callback)
def set_default_verify_paths(self):
self._ctx.set_default_verify_paths()
def set_ciphers(self, ciphers):
if isinstance(ciphers, six.text_type):
- ciphers = ciphers.encode("utf-8")
+ ciphers = ciphers.encode("utf-8")
self._ctx.set_cipher_list(ciphers)
def load_verify_locations(self, cafile=None, capath=None, cadata=None):
if cafile is not None:
- cafile = cafile.encode("utf-8")
+ cafile = cafile.encode("utf-8")
if capath is not None:
- capath = capath.encode("utf-8")
- try:
- self._ctx.load_verify_locations(cafile, capath)
- if cadata is not None:
- self._ctx.load_verify_locations(BytesIO(cadata))
- except OpenSSL.SSL.Error as e:
- raise ssl.SSLError("unable to load trusted certificates: %r" % e)
+ capath = capath.encode("utf-8")
+ try:
+ self._ctx.load_verify_locations(cafile, capath)
+ if cadata is not None:
+ self._ctx.load_verify_locations(BytesIO(cadata))
+ except OpenSSL.SSL.Error as e:
+ raise ssl.SSLError("unable to load trusted certificates: %r" % e)
def load_cert_chain(self, certfile, keyfile=None, password=None):
- self._ctx.use_certificate_chain_file(certfile)
+ self._ctx.use_certificate_chain_file(certfile)
if password is not None:
- if not isinstance(password, six.binary_type):
- password = password.encode("utf-8")
- self._ctx.set_passwd_cb(lambda *_: password)
+ if not isinstance(password, six.binary_type):
+ password = password.encode("utf-8")
+ self._ctx.set_passwd_cb(lambda *_: password)
self._ctx.use_privatekey_file(keyfile or certfile)
- def set_alpn_protocols(self, protocols):
- protocols = [six.ensure_binary(p) for p in protocols]
- return self._ctx.set_alpn_protos(protocols)
-
- def wrap_socket(
- self,
- sock,
- server_side=False,
- do_handshake_on_connect=True,
- suppress_ragged_eofs=True,
- server_hostname=None,
- ):
+ def set_alpn_protocols(self, protocols):
+ protocols = [six.ensure_binary(p) for p in protocols]
+ return self._ctx.set_alpn_protos(protocols)
+
+ def wrap_socket(
+ self,
+ sock,
+ server_side=False,
+ do_handshake_on_connect=True,
+ suppress_ragged_eofs=True,
+ server_hostname=None,
+ ):
cnx = OpenSSL.SSL.Connection(self._ctx, sock)
if isinstance(server_hostname, six.text_type): # Platform-specific: Python 3
- server_hostname = server_hostname.encode("utf-8")
+ server_hostname = server_hostname.encode("utf-8")
if server_hostname is not None:
cnx.set_tlsext_host_name(server_hostname)
@@ -497,11 +497,11 @@ class PyOpenSSLContext(object):
try:
cnx.do_handshake()
except OpenSSL.SSL.WantReadError:
- if not util.wait_for_read(sock, sock.gettimeout()):
- raise timeout("select timed out")
+ if not util.wait_for_read(sock, sock.gettimeout()):
+ raise timeout("select timed out")
continue
except OpenSSL.SSL.Error as e:
- raise ssl.SSLError("bad handshake: %r" % e)
+ raise ssl.SSLError("bad handshake: %r" % e)
break
return WrappedSocket(cnx, sock)
diff --git a/contrib/python/urllib3/urllib3/contrib/securetransport.py b/contrib/python/urllib3/urllib3/contrib/securetransport.py
index 554c015fed..1221378b81 100644
--- a/contrib/python/urllib3/urllib3/contrib/securetransport.py
+++ b/contrib/python/urllib3/urllib3/contrib/securetransport.py
@@ -23,33 +23,33 @@ To use this module, simply import and inject it::
urllib3.contrib.securetransport.inject_into_urllib3()
Happy TLSing!
-
-This code is a bastardised version of the code found in Will Bond's oscrypto
-library. An enormous debt is owed to him for blazing this trail for us. For
-that reason, this code should be considered to be covered both by urllib3's
-license and by oscrypto's:
-
-.. code-block::
-
- Copyright (c) 2015-2016 Will Bond <will@wbond.net>
-
- Permission is hereby granted, free of charge, to any person obtaining a
- copy of this software and associated documentation files (the "Software"),
- to deal in the Software without restriction, including without limitation
- the rights to use, copy, modify, merge, publish, distribute, sublicense,
- and/or sell copies of the Software, and to permit persons to whom the
- Software is furnished to do so, subject to the following conditions:
-
- The above copyright notice and this permission notice shall be included in
- all copies or substantial portions of the Software.
-
- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
- IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
- AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
- FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
- DEALINGS IN THE SOFTWARE.
+
+This code is a bastardised version of the code found in Will Bond's oscrypto
+library. An enormous debt is owed to him for blazing this trail for us. For
+that reason, this code should be considered to be covered both by urllib3's
+license and by oscrypto's:
+
+.. code-block::
+
+ Copyright (c) 2015-2016 Will Bond <will@wbond.net>
+
+ Permission is hereby granted, free of charge, to any person obtaining a
+ copy of this software and associated documentation files (the "Software"),
+ to deal in the Software without restriction, including without limitation
+ the rights to use, copy, modify, merge, publish, distribute, sublicense,
+ and/or sell copies of the Software, and to permit persons to whom the
+ Software is furnished to do so, subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be included in
+ all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+ DEALINGS IN THE SOFTWARE.
"""
from __future__ import absolute_import
@@ -60,22 +60,22 @@ import os.path
import shutil
import socket
import ssl
-import struct
+import struct
import threading
import weakref
-import six
-
+import six
+
from .. import util
-from ..util.ssl_ import PROTOCOL_TLS_CLIENT
-from ._securetransport.bindings import CoreFoundation, Security, SecurityConst
+from ..util.ssl_ import PROTOCOL_TLS_CLIENT
+from ._securetransport.bindings import CoreFoundation, Security, SecurityConst
from ._securetransport.low_level import (
- _assert_no_error,
- _build_tls_unknown_ca_alert,
- _cert_array_from_pem,
- _create_cfstring_array,
- _load_client_cert_chain,
- _temporary_keychain,
+ _assert_no_error,
+ _build_tls_unknown_ca_alert,
+ _cert_array_from_pem,
+ _create_cfstring_array,
+ _load_client_cert_chain,
+ _temporary_keychain,
)
try: # Platform-specific: Python 2
@@ -84,7 +84,7 @@ except ImportError: # Platform-specific: Python 3
_fileobject = None
from ..packages.backports.makefile import backport_makefile
-__all__ = ["inject_into_urllib3", "extract_from_urllib3"]
+__all__ = ["inject_into_urllib3", "extract_from_urllib3"]
# SNI always works
HAS_SNI = True
@@ -116,35 +116,35 @@ _connection_ref_lock = threading.Lock()
SSL_WRITE_BLOCKSIZE = 16384
# This is our equivalent of util.ssl_.DEFAULT_CIPHERS, but expanded out to
-# individual cipher suites. We need to do this because this is how
+# individual cipher suites. We need to do this because this is how
# SecureTransport wants them.
CIPHER_SUITES = [
SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,
- SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
+ SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,
SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
- SecurityConst.TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,
- SecurityConst.TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256,
+ SecurityConst.TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,
+ SecurityConst.TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256,
SecurityConst.TLS_DHE_RSA_WITH_AES_256_GCM_SHA384,
SecurityConst.TLS_DHE_RSA_WITH_AES_128_GCM_SHA256,
SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384,
- SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA,
- SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256,
- SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA,
+ SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA,
+ SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256,
+ SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA,
SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384,
SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA,
- SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256,
- SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA,
+ SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256,
+ SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA,
SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA256,
SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA,
SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA256,
SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA,
- SecurityConst.TLS_AES_256_GCM_SHA384,
- SecurityConst.TLS_AES_128_GCM_SHA256,
+ SecurityConst.TLS_AES_256_GCM_SHA384,
+ SecurityConst.TLS_AES_128_GCM_SHA256,
SecurityConst.TLS_RSA_WITH_AES_256_GCM_SHA384,
SecurityConst.TLS_RSA_WITH_AES_128_GCM_SHA256,
- SecurityConst.TLS_AES_128_CCM_8_SHA256,
- SecurityConst.TLS_AES_128_CCM_SHA256,
+ SecurityConst.TLS_AES_128_CCM_8_SHA256,
+ SecurityConst.TLS_AES_128_CCM_SHA256,
SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA256,
SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA256,
SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA,
@@ -153,36 +153,36 @@ CIPHER_SUITES = [
# Basically this is simple: for PROTOCOL_SSLv23 we turn it into a low of
# TLSv1 and a high of TLSv1.2. For everything else, we pin to that version.
-# TLSv1 to 1.2 are supported on macOS 10.8+
+# TLSv1 to 1.2 are supported on macOS 10.8+
_protocol_to_min_max = {
- util.PROTOCOL_TLS: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12),
- PROTOCOL_TLS_CLIENT: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12),
+ util.PROTOCOL_TLS: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12),
+ PROTOCOL_TLS_CLIENT: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12),
}
if hasattr(ssl, "PROTOCOL_SSLv2"):
_protocol_to_min_max[ssl.PROTOCOL_SSLv2] = (
- SecurityConst.kSSLProtocol2,
- SecurityConst.kSSLProtocol2,
+ SecurityConst.kSSLProtocol2,
+ SecurityConst.kSSLProtocol2,
)
if hasattr(ssl, "PROTOCOL_SSLv3"):
_protocol_to_min_max[ssl.PROTOCOL_SSLv3] = (
- SecurityConst.kSSLProtocol3,
- SecurityConst.kSSLProtocol3,
+ SecurityConst.kSSLProtocol3,
+ SecurityConst.kSSLProtocol3,
)
if hasattr(ssl, "PROTOCOL_TLSv1"):
_protocol_to_min_max[ssl.PROTOCOL_TLSv1] = (
- SecurityConst.kTLSProtocol1,
- SecurityConst.kTLSProtocol1,
+ SecurityConst.kTLSProtocol1,
+ SecurityConst.kTLSProtocol1,
)
if hasattr(ssl, "PROTOCOL_TLSv1_1"):
_protocol_to_min_max[ssl.PROTOCOL_TLSv1_1] = (
- SecurityConst.kTLSProtocol11,
- SecurityConst.kTLSProtocol11,
+ SecurityConst.kTLSProtocol11,
+ SecurityConst.kTLSProtocol11,
)
if hasattr(ssl, "PROTOCOL_TLSv1_2"):
_protocol_to_min_max[ssl.PROTOCOL_TLSv1_2] = (
- SecurityConst.kTLSProtocol12,
- SecurityConst.kTLSProtocol12,
+ SecurityConst.kTLSProtocol12,
+ SecurityConst.kTLSProtocol12,
)
@@ -190,7 +190,7 @@ def inject_into_urllib3():
"""
Monkey-patch urllib3 with SecureTransport-backed SSL-support.
"""
- util.SSLContext = SecureTransportContext
+ util.SSLContext = SecureTransportContext
util.ssl_.SSLContext = SecureTransportContext
util.HAS_SNI = HAS_SNI
util.ssl_.HAS_SNI = HAS_SNI
@@ -202,7 +202,7 @@ def extract_from_urllib3():
"""
Undo monkey-patching by :func:`inject_into_urllib3`.
"""
- util.SSLContext = orig_util_SSLContext
+ util.SSLContext = orig_util_SSLContext
util.ssl_.SSLContext = orig_util_SSLContext
util.HAS_SNI = orig_util_HAS_SNI
util.ssl_.HAS_SNI = orig_util_HAS_SNI
@@ -231,14 +231,14 @@ def _read_callback(connection_id, data_buffer, data_length_pointer):
try:
while read_count < requested_length:
if timeout is None or timeout >= 0:
- if not util.wait_for_read(base_socket, timeout):
- raise socket.error(errno.EAGAIN, "timed out")
+ if not util.wait_for_read(base_socket, timeout):
+ raise socket.error(errno.EAGAIN, "timed out")
- remaining = requested_length - read_count
- buffer = (ctypes.c_char * remaining).from_address(
- data_buffer + read_count
+ remaining = requested_length - read_count
+ buffer = (ctypes.c_char * remaining).from_address(
+ data_buffer + read_count
)
- chunk_size = base_socket.recv_into(buffer, remaining)
+ chunk_size = base_socket.recv_into(buffer, remaining)
read_count += chunk_size
if not chunk_size:
if not read_count:
@@ -248,8 +248,8 @@ def _read_callback(connection_id, data_buffer, data_length_pointer):
error = e.errno
if error is not None and error != errno.EAGAIN:
- data_length_pointer[0] = read_count
- if error == errno.ECONNRESET or error == errno.EPIPE:
+ data_length_pointer[0] = read_count
+ if error == errno.ECONNRESET or error == errno.EPIPE:
return SecurityConst.errSSLClosedAbort
raise
@@ -287,8 +287,8 @@ def _write_callback(connection_id, data_buffer, data_length_pointer):
try:
while sent < bytes_to_write:
if timeout is None or timeout >= 0:
- if not util.wait_for_write(base_socket, timeout):
- raise socket.error(errno.EAGAIN, "timed out")
+ if not util.wait_for_write(base_socket, timeout):
+ raise socket.error(errno.EAGAIN, "timed out")
chunk_sent = base_socket.send(data)
sent += chunk_sent
@@ -299,13 +299,13 @@ def _write_callback(connection_id, data_buffer, data_length_pointer):
error = e.errno
if error is not None and error != errno.EAGAIN:
- data_length_pointer[0] = sent
- if error == errno.ECONNRESET or error == errno.EPIPE:
+ data_length_pointer[0] = sent
+ if error == errno.ECONNRESET or error == errno.EPIPE:
return SecurityConst.errSSLClosedAbort
raise
data_length_pointer[0] = sent
-
+
if sent != bytes_to_write:
return SecurityConst.errSSLWouldBlock
@@ -330,7 +330,7 @@ class WrappedSocket(object):
Note: _makefile_refs, _drop(), and _reuse() are needed for the garbage
collector of PyPy.
"""
-
+
def __init__(self, socket):
self.socket = socket
self.context = None
@@ -383,58 +383,58 @@ class WrappedSocket(object):
)
_assert_no_error(result)
- def _set_alpn_protocols(self, protocols):
- """
- Sets up the ALPN protocols on the context.
- """
- if not protocols:
- return
- protocols_arr = _create_cfstring_array(protocols)
- try:
- result = Security.SSLSetALPNProtocols(self.context, protocols_arr)
- _assert_no_error(result)
- finally:
- CoreFoundation.CFRelease(protocols_arr)
-
+ def _set_alpn_protocols(self, protocols):
+ """
+ Sets up the ALPN protocols on the context.
+ """
+ if not protocols:
+ return
+ protocols_arr = _create_cfstring_array(protocols)
+ try:
+ result = Security.SSLSetALPNProtocols(self.context, protocols_arr)
+ _assert_no_error(result)
+ finally:
+ CoreFoundation.CFRelease(protocols_arr)
+
def _custom_validate(self, verify, trust_bundle):
"""
Called when we have set custom validation. We do this in two cases:
first, when cert validation is entirely disabled; and second, when
using a custom trust DB.
- Raises an SSLError if the connection is not trusted.
+ Raises an SSLError if the connection is not trusted.
"""
# If we disabled cert validation, just say: cool.
if not verify:
return
- successes = (
- SecurityConst.kSecTrustResultUnspecified,
- SecurityConst.kSecTrustResultProceed,
- )
- try:
- trust_result = self._evaluate_trust(trust_bundle)
- if trust_result in successes:
- return
- reason = "error code: %d" % (trust_result,)
- except Exception as e:
- # Do not trust on error
- reason = "exception: %r" % (e,)
-
- # SecureTransport does not send an alert nor shuts down the connection.
- rec = _build_tls_unknown_ca_alert(self.version())
- self.socket.sendall(rec)
- # close the connection immediately
- # l_onoff = 1, activate linger
- # l_linger = 0, linger for 0 seoncds
- opts = struct.pack("ii", 1, 0)
- self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, opts)
- self.close()
- raise ssl.SSLError("certificate verify failed, %s" % reason)
-
- def _evaluate_trust(self, trust_bundle):
+ successes = (
+ SecurityConst.kSecTrustResultUnspecified,
+ SecurityConst.kSecTrustResultProceed,
+ )
+ try:
+ trust_result = self._evaluate_trust(trust_bundle)
+ if trust_result in successes:
+ return
+ reason = "error code: %d" % (trust_result,)
+ except Exception as e:
+ # Do not trust on error
+ reason = "exception: %r" % (e,)
+
+ # SecureTransport does not send an alert nor shuts down the connection.
+ rec = _build_tls_unknown_ca_alert(self.version())
+ self.socket.sendall(rec)
+ # close the connection immediately
+ # l_onoff = 1, activate linger
+ # l_linger = 0, linger for 0 seoncds
+ opts = struct.pack("ii", 1, 0)
+ self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, opts)
+ self.close()
+ raise ssl.SSLError("certificate verify failed, %s" % reason)
+
+ def _evaluate_trust(self, trust_bundle):
# We want data in memory, so load it up.
if os.path.isfile(trust_bundle):
- with open(trust_bundle, "rb") as f:
+ with open(trust_bundle, "rb") as f:
trust_bundle = f.read()
cert_array = None
@@ -448,7 +448,7 @@ class WrappedSocket(object):
# created for this connection, shove our CAs into it, tell ST to
# ignore everything else it knows, and then ask if it can build a
# chain. This is a buuuunch of code.
- result = Security.SSLCopyPeerTrust(self.context, ctypes.byref(trust))
+ result = Security.SSLCopyPeerTrust(self.context, ctypes.byref(trust))
_assert_no_error(result)
if not trust:
raise ssl.SSLError("Failed to copy trust reference")
@@ -460,29 +460,29 @@ class WrappedSocket(object):
_assert_no_error(result)
trust_result = Security.SecTrustResultType()
- result = Security.SecTrustEvaluate(trust, ctypes.byref(trust_result))
+ result = Security.SecTrustEvaluate(trust, ctypes.byref(trust_result))
_assert_no_error(result)
finally:
if trust:
CoreFoundation.CFRelease(trust)
- if cert_array is not None:
+ if cert_array is not None:
CoreFoundation.CFRelease(cert_array)
- return trust_result.value
-
- def handshake(
- self,
- server_hostname,
- verify,
- trust_bundle,
- min_version,
- max_version,
- client_cert,
- client_key,
- client_key_passphrase,
- alpn_protocols,
- ):
+ return trust_result.value
+
+ def handshake(
+ self,
+ server_hostname,
+ verify,
+ trust_bundle,
+ min_version,
+ max_version,
+ client_cert,
+ client_key,
+ client_key_passphrase,
+ alpn_protocols,
+ ):
"""
Actually performs the TLS handshake. This is run automatically by
wrapped socket, and shouldn't be needed in user code.
@@ -512,7 +512,7 @@ class WrappedSocket(object):
# If we have a server hostname, we should set that too.
if server_hostname:
if not isinstance(server_hostname, bytes):
- server_hostname = server_hostname.encode("utf-8")
+ server_hostname = server_hostname.encode("utf-8")
result = Security.SSLSetPeerDomainName(
self.context, server_hostname, len(server_hostname)
@@ -522,13 +522,13 @@ class WrappedSocket(object):
# Setup the ciphers.
self._set_ciphers()
- # Setup the ALPN protocols.
- self._set_alpn_protocols(alpn_protocols)
-
+ # Setup the ALPN protocols.
+ self._set_alpn_protocols(alpn_protocols)
+
# Set the minimum and maximum TLS versions.
result = Security.SSLSetProtocolVersionMin(self.context, min_version)
_assert_no_error(result)
-
+
result = Security.SSLSetProtocolVersionMax(self.context, max_version)
_assert_no_error(result)
@@ -538,7 +538,7 @@ class WrappedSocket(object):
# authing in that case.
if not verify or trust_bundle is not None:
result = Security.SSLSetSessionOption(
- self.context, SecurityConst.kSSLSessionOptionBreakOnServerAuth, True
+ self.context, SecurityConst.kSSLSessionOptionBreakOnServerAuth, True
)
_assert_no_error(result)
@@ -548,7 +548,7 @@ class WrappedSocket(object):
self._client_cert_chain = _load_client_cert_chain(
self._keychain, client_cert, client_key
)
- result = Security.SSLSetCertificate(self.context, self._client_cert_chain)
+ result = Security.SSLSetCertificate(self.context, self._client_cert_chain)
_assert_no_error(result)
while True:
@@ -599,7 +599,7 @@ class WrappedSocket(object):
# There are some result codes that we want to treat as "not always
# errors". Specifically, those are errSSLWouldBlock,
# errSSLClosedGraceful, and errSSLClosedNoNotify.
- if result == SecurityConst.errSSLWouldBlock:
+ if result == SecurityConst.errSSLWouldBlock:
# If we didn't process any bytes, then this was just a time out.
# However, we can get errSSLWouldBlock in situations when we *did*
# read some data, and in those cases we should just read "short"
@@ -607,10 +607,10 @@ class WrappedSocket(object):
if processed_bytes.value == 0:
# Timed out, no data read.
raise socket.timeout("recv timed out")
- elif result in (
- SecurityConst.errSSLClosedGraceful,
- SecurityConst.errSSLClosedNoNotify,
- ):
+ elif result in (
+ SecurityConst.errSSLClosedGraceful,
+ SecurityConst.errSSLClosedNoNotify,
+ ):
# The remote peer has closed this connection. We should do so as
# well. Note that we don't actually return here because in
# principle this could actually be fired along with return data.
@@ -649,7 +649,7 @@ class WrappedSocket(object):
def sendall(self, data):
total_sent = 0
while total_sent < len(data):
- sent = self.send(data[total_sent : total_sent + SSL_WRITE_BLOCKSIZE])
+ sent = self.send(data[total_sent : total_sent + SSL_WRITE_BLOCKSIZE])
total_sent += sent
def shutdown(self):
@@ -696,14 +696,14 @@ class WrappedSocket(object):
# instead to just flag to urllib3 that it shouldn't do its own hostname
# validation when using SecureTransport.
if not binary_form:
- raise ValueError("SecureTransport only supports dumping binary certs")
+ raise ValueError("SecureTransport only supports dumping binary certs")
trust = Security.SecTrustRef()
certdata = None
der_bytes = None
try:
# Grab the trust store.
- result = Security.SSLCopyPeerTrust(self.context, ctypes.byref(trust))
+ result = Security.SSLCopyPeerTrust(self.context, ctypes.byref(trust))
_assert_no_error(result)
if not trust:
# Probably we haven't done the handshake yet. No biggie.
@@ -733,27 +733,27 @@ class WrappedSocket(object):
return der_bytes
- def version(self):
- protocol = Security.SSLProtocol()
- result = Security.SSLGetNegotiatedProtocolVersion(
- self.context, ctypes.byref(protocol)
- )
- _assert_no_error(result)
- if protocol.value == SecurityConst.kTLSProtocol13:
- raise ssl.SSLError("SecureTransport does not support TLS 1.3")
- elif protocol.value == SecurityConst.kTLSProtocol12:
- return "TLSv1.2"
- elif protocol.value == SecurityConst.kTLSProtocol11:
- return "TLSv1.1"
- elif protocol.value == SecurityConst.kTLSProtocol1:
- return "TLSv1"
- elif protocol.value == SecurityConst.kSSLProtocol3:
- return "SSLv3"
- elif protocol.value == SecurityConst.kSSLProtocol2:
- return "SSLv2"
- else:
- raise ssl.SSLError("Unknown TLS version: %r" % protocol)
-
+ def version(self):
+ protocol = Security.SSLProtocol()
+ result = Security.SSLGetNegotiatedProtocolVersion(
+ self.context, ctypes.byref(protocol)
+ )
+ _assert_no_error(result)
+ if protocol.value == SecurityConst.kTLSProtocol13:
+ raise ssl.SSLError("SecureTransport does not support TLS 1.3")
+ elif protocol.value == SecurityConst.kTLSProtocol12:
+ return "TLSv1.2"
+ elif protocol.value == SecurityConst.kTLSProtocol11:
+ return "TLSv1.1"
+ elif protocol.value == SecurityConst.kTLSProtocol1:
+ return "TLSv1"
+ elif protocol.value == SecurityConst.kSSLProtocol3:
+ return "SSLv3"
+ elif protocol.value == SecurityConst.kSSLProtocol2:
+ return "SSLv2"
+ else:
+ raise ssl.SSLError("Unknown TLS version: %r" % protocol)
+
def _reuse(self):
self._makefile_refs += 1
@@ -765,21 +765,21 @@ class WrappedSocket(object):
if _fileobject: # Platform-specific: Python 2
-
+
def makefile(self, mode, bufsize=-1):
self._makefile_refs += 1
return _fileobject(self, mode, bufsize, close=True)
-
-
+
+
else: # Platform-specific: Python 3
-
+
def makefile(self, mode="r", buffering=None, *args, **kwargs):
# We disable buffering with SecureTransport because it conflicts with
# the buffering that ST does internally (see issue #1153 for more).
buffering = 0
return backport_makefile(self, mode, buffering, *args, **kwargs)
-
+
WrappedSocket.makefile = makefile
@@ -789,7 +789,7 @@ class SecureTransportContext(object):
interface of the standard library ``SSLContext`` object to calls into
SecureTransport.
"""
-
+
def __init__(self, protocol):
self._min_version, self._max_version = _protocol_to_min_max[protocol]
self._options = 0
@@ -798,7 +798,7 @@ class SecureTransportContext(object):
self._client_cert = None
self._client_key = None
self._client_key_passphrase = None
- self._alpn_protocols = None
+ self._alpn_protocols = None
@property
def check_hostname(self):
@@ -857,18 +857,18 @@ class SecureTransportContext(object):
def set_ciphers(self, ciphers):
# For now, we just require the default cipher string.
if ciphers != util.ssl_.DEFAULT_CIPHERS:
- raise ValueError("SecureTransport doesn't support custom cipher strings")
+ raise ValueError("SecureTransport doesn't support custom cipher strings")
def load_verify_locations(self, cafile=None, capath=None, cadata=None):
# OK, we only really support cadata and cafile.
if capath is not None:
- raise ValueError("SecureTransport does not support cert directories")
-
- # Raise if cafile does not exist.
- if cafile is not None:
- with open(cafile):
- pass
+ raise ValueError("SecureTransport does not support cert directories")
+ # Raise if cafile does not exist.
+ if cafile is not None:
+ with open(cafile):
+ pass
+
self._trust_bundle = cafile or cadata
def load_cert_chain(self, certfile, keyfile=None, password=None):
@@ -876,26 +876,26 @@ class SecureTransportContext(object):
self._client_key = keyfile
self._client_cert_passphrase = password
- def set_alpn_protocols(self, protocols):
- """
- Sets the ALPN protocols that will later be set on the context.
-
- Raises a NotImplementedError if ALPN is not supported.
- """
- if not hasattr(Security, "SSLSetALPNProtocols"):
- raise NotImplementedError(
- "SecureTransport supports ALPN only in macOS 10.12+"
- )
- self._alpn_protocols = [six.ensure_binary(p) for p in protocols]
-
- def wrap_socket(
- self,
- sock,
- server_side=False,
- do_handshake_on_connect=True,
- suppress_ragged_eofs=True,
- server_hostname=None,
- ):
+ def set_alpn_protocols(self, protocols):
+ """
+ Sets the ALPN protocols that will later be set on the context.
+
+ Raises a NotImplementedError if ALPN is not supported.
+ """
+ if not hasattr(Security, "SSLSetALPNProtocols"):
+ raise NotImplementedError(
+ "SecureTransport supports ALPN only in macOS 10.12+"
+ )
+ self._alpn_protocols = [six.ensure_binary(p) for p in protocols]
+
+ def wrap_socket(
+ self,
+ sock,
+ server_side=False,
+ do_handshake_on_connect=True,
+ suppress_ragged_eofs=True,
+ server_hostname=None,
+ ):
# So, what do we do here? Firstly, we assert some properties. This is a
# stripped down shim, so there is some functionality we don't support.
# See PEP 543 for the real deal.
@@ -909,14 +909,14 @@ class SecureTransportContext(object):
# Now we can handshake
wrapped_socket.handshake(
- server_hostname,
- self._verify,
- self._trust_bundle,
- self._min_version,
- self._max_version,
- self._client_cert,
- self._client_key,
- self._client_key_passphrase,
- self._alpn_protocols,
+ server_hostname,
+ self._verify,
+ self._trust_bundle,
+ self._min_version,
+ self._max_version,
+ self._client_cert,
+ self._client_key,
+ self._client_key_passphrase,
+ self._alpn_protocols,
)
return wrapped_socket
diff --git a/contrib/python/urllib3/urllib3/contrib/socks.py b/contrib/python/urllib3/urllib3/contrib/socks.py
index c326e80dd1..d9e2b49473 100644
--- a/contrib/python/urllib3/urllib3/contrib/socks.py
+++ b/contrib/python/urllib3/urllib3/contrib/socks.py
@@ -1,42 +1,42 @@
# -*- coding: utf-8 -*-
"""
This module contains provisional support for SOCKS proxies from within
-urllib3. This module supports SOCKS4, SOCKS4A (an extension of SOCKS4), and
+urllib3. This module supports SOCKS4, SOCKS4A (an extension of SOCKS4), and
SOCKS5. To enable its functionality, either install PySocks or install this
module with the ``socks`` extra.
The SOCKS implementation supports the full range of urllib3 features. It also
supports the following SOCKS features:
-- SOCKS4A (``proxy_url='socks4a://...``)
-- SOCKS4 (``proxy_url='socks4://...``)
-- SOCKS5 with remote DNS (``proxy_url='socks5h://...``)
-- SOCKS5 with local DNS (``proxy_url='socks5://...``)
+- SOCKS4A (``proxy_url='socks4a://...``)
+- SOCKS4 (``proxy_url='socks4://...``)
+- SOCKS5 with remote DNS (``proxy_url='socks5h://...``)
+- SOCKS5 with local DNS (``proxy_url='socks5://...``)
- Usernames and passwords for the SOCKS proxy
-.. note::
- It is recommended to use ``socks5h://`` or ``socks4a://`` schemes in
- your ``proxy_url`` to ensure that DNS resolution is done from the remote
- server instead of client-side when connecting to a domain name.
-
-SOCKS4 supports IPv4 and domain names with the SOCKS4A extension. SOCKS5
-supports IPv4, IPv6, and domain names.
-
-When connecting to a SOCKS4 proxy the ``username`` portion of the ``proxy_url``
-will be sent as the ``userid`` section of the SOCKS request:
-
-.. code-block:: python
-
- proxy_url="socks4a://<userid>@proxy-host"
-
-When connecting to a SOCKS5 proxy the ``username`` and ``password`` portion
-of the ``proxy_url`` will be sent as the username/password to authenticate
-with the proxy:
-
-.. code-block:: python
-
- proxy_url="socks5h://<username>:<password>@proxy-host"
-
+.. note::
+ It is recommended to use ``socks5h://`` or ``socks4a://`` schemes in
+ your ``proxy_url`` to ensure that DNS resolution is done from the remote
+ server instead of client-side when connecting to a domain name.
+
+SOCKS4 supports IPv4 and domain names with the SOCKS4A extension. SOCKS5
+supports IPv4, IPv6, and domain names.
+
+When connecting to a SOCKS4 proxy the ``username`` portion of the ``proxy_url``
+will be sent as the ``userid`` section of the SOCKS request:
+
+.. code-block:: python
+
+ proxy_url="socks4a://<userid>@proxy-host"
+
+When connecting to a SOCKS5 proxy the ``username`` and ``password`` portion
+of the ``proxy_url`` will be sent as the username/password to authenticate
+with the proxy:
+
+.. code-block:: python
+
+ proxy_url="socks5h://<username>:<password>@proxy-host"
+
"""
from __future__ import absolute_import
@@ -44,24 +44,24 @@ try:
import socks
except ImportError:
import warnings
-
+
from ..exceptions import DependencyWarning
- warnings.warn(
- (
- "SOCKS support in urllib3 requires the installation of optional "
- "dependencies: specifically, PySocks. For more information, see "
- "https://urllib3.readthedocs.io/en/1.26.x/contrib.html#socks-proxies"
+ warnings.warn(
+ (
+ "SOCKS support in urllib3 requires the installation of optional "
+ "dependencies: specifically, PySocks. For more information, see "
+ "https://urllib3.readthedocs.io/en/1.26.x/contrib.html#socks-proxies"
),
- DependencyWarning,
+ DependencyWarning,
)
raise
-from socket import error as SocketError
-from socket import timeout as SocketTimeout
+from socket import error as SocketError
+from socket import timeout as SocketTimeout
-from ..connection import HTTPConnection, HTTPSConnection
-from ..connectionpool import HTTPConnectionPool, HTTPSConnectionPool
+from ..connection import HTTPConnection, HTTPSConnection
+from ..connectionpool import HTTPConnectionPool, HTTPSConnectionPool
from ..exceptions import ConnectTimeoutError, NewConnectionError
from ..poolmanager import PoolManager
from ..util.url import parse_url
@@ -76,9 +76,9 @@ class SOCKSConnection(HTTPConnection):
"""
A plain-text HTTP connection that connects via a SOCKS proxy.
"""
-
+
def __init__(self, *args, **kwargs):
- self._socks_options = kwargs.pop("_socks_options")
+ self._socks_options = kwargs.pop("_socks_options")
super(SOCKSConnection, self).__init__(*args, **kwargs)
def _new_conn(self):
@@ -87,30 +87,30 @@ class SOCKSConnection(HTTPConnection):
"""
extra_kw = {}
if self.source_address:
- extra_kw["source_address"] = self.source_address
+ extra_kw["source_address"] = self.source_address
if self.socket_options:
- extra_kw["socket_options"] = self.socket_options
+ extra_kw["socket_options"] = self.socket_options
try:
conn = socks.create_connection(
(self.host, self.port),
- proxy_type=self._socks_options["socks_version"],
- proxy_addr=self._socks_options["proxy_host"],
- proxy_port=self._socks_options["proxy_port"],
- proxy_username=self._socks_options["username"],
- proxy_password=self._socks_options["password"],
- proxy_rdns=self._socks_options["rdns"],
+ proxy_type=self._socks_options["socks_version"],
+ proxy_addr=self._socks_options["proxy_host"],
+ proxy_port=self._socks_options["proxy_port"],
+ proxy_username=self._socks_options["username"],
+ proxy_password=self._socks_options["password"],
+ proxy_rdns=self._socks_options["rdns"],
timeout=self.timeout,
**extra_kw
)
- except SocketTimeout:
+ except SocketTimeout:
raise ConnectTimeoutError(
- self,
- "Connection to %s timed out. (connect timeout=%s)"
- % (self.host, self.timeout),
- )
+ self,
+ "Connection to %s timed out. (connect timeout=%s)"
+ % (self.host, self.timeout),
+ )
except socks.ProxyError as e:
# This is fragile as hell, but it seems to be the only way to raise
@@ -120,22 +120,22 @@ class SOCKSConnection(HTTPConnection):
if isinstance(error, SocketTimeout):
raise ConnectTimeoutError(
self,
- "Connection to %s timed out. (connect timeout=%s)"
- % (self.host, self.timeout),
+ "Connection to %s timed out. (connect timeout=%s)"
+ % (self.host, self.timeout),
)
else:
raise NewConnectionError(
- self, "Failed to establish a new connection: %s" % error
+ self, "Failed to establish a new connection: %s" % error
)
else:
raise NewConnectionError(
- self, "Failed to establish a new connection: %s" % e
+ self, "Failed to establish a new connection: %s" % e
)
except SocketError as e: # Defensive: PySocks should catch all these.
raise NewConnectionError(
- self, "Failed to establish a new connection: %s" % e
- )
+ self, "Failed to establish a new connection: %s" % e
+ )
return conn
@@ -161,53 +161,53 @@ class SOCKSProxyManager(PoolManager):
A version of the urllib3 ProxyManager that routes connections via the
defined SOCKS proxy.
"""
-
+
pool_classes_by_scheme = {
- "http": SOCKSHTTPConnectionPool,
- "https": SOCKSHTTPSConnectionPool,
+ "http": SOCKSHTTPConnectionPool,
+ "https": SOCKSHTTPSConnectionPool,
}
- def __init__(
- self,
- proxy_url,
- username=None,
- password=None,
- num_pools=10,
- headers=None,
- **connection_pool_kw
- ):
+ def __init__(
+ self,
+ proxy_url,
+ username=None,
+ password=None,
+ num_pools=10,
+ headers=None,
+ **connection_pool_kw
+ ):
parsed = parse_url(proxy_url)
- if username is None and password is None and parsed.auth is not None:
- split = parsed.auth.split(":")
- if len(split) == 2:
- username, password = split
- if parsed.scheme == "socks5":
+ if username is None and password is None and parsed.auth is not None:
+ split = parsed.auth.split(":")
+ if len(split) == 2:
+ username, password = split
+ if parsed.scheme == "socks5":
socks_version = socks.PROXY_TYPE_SOCKS5
rdns = False
- elif parsed.scheme == "socks5h":
+ elif parsed.scheme == "socks5h":
socks_version = socks.PROXY_TYPE_SOCKS5
rdns = True
- elif parsed.scheme == "socks4":
+ elif parsed.scheme == "socks4":
socks_version = socks.PROXY_TYPE_SOCKS4
rdns = False
- elif parsed.scheme == "socks4a":
+ elif parsed.scheme == "socks4a":
socks_version = socks.PROXY_TYPE_SOCKS4
rdns = True
else:
- raise ValueError("Unable to determine SOCKS version from %s" % proxy_url)
+ raise ValueError("Unable to determine SOCKS version from %s" % proxy_url)
self.proxy_url = proxy_url
socks_options = {
- "socks_version": socks_version,
- "proxy_host": parsed.host,
- "proxy_port": parsed.port,
- "username": username,
- "password": password,
- "rdns": rdns,
+ "socks_version": socks_version,
+ "proxy_host": parsed.host,
+ "proxy_port": parsed.port,
+ "username": username,
+ "password": password,
+ "rdns": rdns,
}
- connection_pool_kw["_socks_options"] = socks_options
+ connection_pool_kw["_socks_options"] = socks_options
super(SOCKSProxyManager, self).__init__(
num_pools, headers, **connection_pool_kw
diff --git a/contrib/python/urllib3/urllib3/exceptions.py b/contrib/python/urllib3/urllib3/exceptions.py
index cba6f3f560..f3d99191a4 100644
--- a/contrib/python/urllib3/urllib3/exceptions.py
+++ b/contrib/python/urllib3/urllib3/exceptions.py
@@ -1,25 +1,25 @@
from __future__ import absolute_import
-
-from .packages.six.moves.http_client import IncompleteRead as httplib_IncompleteRead
-
+
+from .packages.six.moves.http_client import IncompleteRead as httplib_IncompleteRead
+
# Base Exceptions
class HTTPError(Exception):
- """Base exception used by this module."""
-
+ """Base exception used by this module."""
+
pass
class HTTPWarning(Warning):
- """Base warning used by this module."""
-
+ """Base warning used by this module."""
+
pass
class PoolError(HTTPError):
- """Base exception for errors caused within a pool."""
-
+ """Base exception for errors caused within a pool."""
+
def __init__(self, pool, message):
self.pool = pool
HTTPError.__init__(self, "%s: %s" % (pool, message))
@@ -30,8 +30,8 @@ class PoolError(HTTPError):
class RequestError(PoolError):
- """Base exception for PoolErrors that have associated URLs."""
-
+ """Base exception for PoolErrors that have associated URLs."""
+
def __init__(self, pool, url, message):
self.url = url
PoolError.__init__(self, pool, message)
@@ -42,28 +42,28 @@ class RequestError(PoolError):
class SSLError(HTTPError):
- """Raised when SSL certificate fails in an HTTPS connection."""
-
+ """Raised when SSL certificate fails in an HTTPS connection."""
+
pass
class ProxyError(HTTPError):
- """Raised when the connection to a proxy fails."""
-
- def __init__(self, message, error, *args):
- super(ProxyError, self).__init__(message, error, *args)
- self.original_error = error
+ """Raised when the connection to a proxy fails."""
+ def __init__(self, message, error, *args):
+ super(ProxyError, self).__init__(message, error, *args)
+ self.original_error = error
+
class DecodeError(HTTPError):
- """Raised when automatic decoding based on Content-Type fails."""
-
+ """Raised when automatic decoding based on Content-Type fails."""
+
pass
class ProtocolError(HTTPError):
- """Raised when something unexpected happens mid-request/response."""
-
+ """Raised when something unexpected happens mid-request/response."""
+
pass
@@ -73,7 +73,7 @@ ConnectionError = ProtocolError
# Leaf Exceptions
-
+
class MaxRetryError(RequestError):
"""Raised when the maximum number of retries is exceeded.
@@ -87,13 +87,13 @@ class MaxRetryError(RequestError):
def __init__(self, pool, url, reason=None):
self.reason = reason
- message = "Max retries exceeded with url: %s (Caused by %r)" % (url, reason)
+ message = "Max retries exceeded with url: %s (Caused by %r)" % (url, reason)
RequestError.__init__(self, pool, url, message)
class HostChangedError(RequestError):
- """Raised when an existing pool gets a request for a foreign host."""
+ """Raised when an existing pool gets a request for a foreign host."""
def __init__(self, pool, url, retries=3):
message = "Tried to open a foreign host with url: %s" % url
@@ -102,61 +102,61 @@ class HostChangedError(RequestError):
class TimeoutStateError(HTTPError):
- """Raised when passing an invalid state to a timeout"""
-
+ """Raised when passing an invalid state to a timeout"""
+
pass
class TimeoutError(HTTPError):
- """Raised when a socket timeout error occurs.
+ """Raised when a socket timeout error occurs.
Catching this error will catch both :exc:`ReadTimeoutErrors
<ReadTimeoutError>` and :exc:`ConnectTimeoutErrors <ConnectTimeoutError>`.
"""
-
+
pass
class ReadTimeoutError(TimeoutError, RequestError):
- """Raised when a socket timeout occurs while receiving data from a server"""
-
+ """Raised when a socket timeout occurs while receiving data from a server"""
+
pass
# This timeout error does not have a URL attached and needs to inherit from the
# base HTTPError
class ConnectTimeoutError(TimeoutError):
- """Raised when a socket timeout occurs while connecting to a server"""
-
+ """Raised when a socket timeout occurs while connecting to a server"""
+
pass
class NewConnectionError(ConnectTimeoutError, PoolError):
- """Raised when we fail to establish a new connection. Usually ECONNREFUSED."""
-
+ """Raised when we fail to establish a new connection. Usually ECONNREFUSED."""
+
pass
class EmptyPoolError(PoolError):
- """Raised when a pool runs out of connections and no more are allowed."""
-
+ """Raised when a pool runs out of connections and no more are allowed."""
+
pass
class ClosedPoolError(PoolError):
- """Raised when a request enters a pool after the pool has been closed."""
-
+ """Raised when a request enters a pool after the pool has been closed."""
+
pass
class LocationValueError(ValueError, HTTPError):
- """Raised when there is something wrong with a given URL input."""
-
+ """Raised when there is something wrong with a given URL input."""
+
pass
class LocationParseError(LocationValueError):
- """Raised when get_host or similar fails to parse the URL input."""
+ """Raised when get_host or similar fails to parse the URL input."""
def __init__(self, location):
message = "Failed to parse: %s" % location
@@ -165,56 +165,56 @@ class LocationParseError(LocationValueError):
self.location = location
-class URLSchemeUnknown(LocationValueError):
- """Raised when a URL input has an unsupported scheme."""
-
- def __init__(self, scheme):
- message = "Not supported URL scheme %s" % scheme
- super(URLSchemeUnknown, self).__init__(message)
-
- self.scheme = scheme
-
-
+class URLSchemeUnknown(LocationValueError):
+ """Raised when a URL input has an unsupported scheme."""
+
+ def __init__(self, scheme):
+ message = "Not supported URL scheme %s" % scheme
+ super(URLSchemeUnknown, self).__init__(message)
+
+ self.scheme = scheme
+
+
class ResponseError(HTTPError):
- """Used as a container for an error reason supplied in a MaxRetryError."""
-
- GENERIC_ERROR = "too many error responses"
- SPECIFIC_ERROR = "too many {status_code} error responses"
+ """Used as a container for an error reason supplied in a MaxRetryError."""
+
+ GENERIC_ERROR = "too many error responses"
+ SPECIFIC_ERROR = "too many {status_code} error responses"
class SecurityWarning(HTTPWarning):
- """Warned when performing security reducing actions"""
-
+ """Warned when performing security reducing actions"""
+
pass
class SubjectAltNameWarning(SecurityWarning):
- """Warned when connecting to a host with a certificate missing a SAN."""
-
+ """Warned when connecting to a host with a certificate missing a SAN."""
+
pass
class InsecureRequestWarning(SecurityWarning):
- """Warned when making an unverified HTTPS request."""
-
+ """Warned when making an unverified HTTPS request."""
+
pass
class SystemTimeWarning(SecurityWarning):
- """Warned when system time is suspected to be wrong"""
-
+ """Warned when system time is suspected to be wrong"""
+
pass
class InsecurePlatformWarning(SecurityWarning):
- """Warned when certain TLS/SSL configuration is not available on a platform."""
-
+ """Warned when certain TLS/SSL configuration is not available on a platform."""
+
pass
class SNIMissingWarning(HTTPWarning):
- """Warned when making a HTTPS request without SNI available."""
-
+ """Warned when making a HTTPS request without SNI available."""
+
pass
@@ -223,22 +223,22 @@ class DependencyWarning(HTTPWarning):
Warned when an attempt is made to import a module with missing optional
dependencies.
"""
-
- pass
-
-
-class ResponseNotChunked(ProtocolError, ValueError):
- """Response needs to be chunked in order to read it as chunks."""
-
+
pass
+class ResponseNotChunked(ProtocolError, ValueError):
+ """Response needs to be chunked in order to read it as chunks."""
+
+ pass
+
+
class BodyNotHttplibCompatible(HTTPError):
"""
- Body should be :class:`http.client.HTTPResponse` like
- (have an fp attribute which returns raw chunks) for read_chunked().
+ Body should be :class:`http.client.HTTPResponse` like
+ (have an fp attribute which returns raw chunks) for read_chunked().
"""
-
+
pass
@@ -246,78 +246,78 @@ class IncompleteRead(HTTPError, httplib_IncompleteRead):
"""
Response length doesn't match expected Content-Length
- Subclass of :class:`http.client.IncompleteRead` to allow int value
- for ``partial`` to avoid creating large objects on streamed reads.
+ Subclass of :class:`http.client.IncompleteRead` to allow int value
+ for ``partial`` to avoid creating large objects on streamed reads.
"""
-
+
def __init__(self, partial, expected):
super(IncompleteRead, self).__init__(partial, expected)
def __repr__(self):
- return "IncompleteRead(%i bytes read, %i more expected)" % (
- self.partial,
- self.expected,
- )
-
-
-class InvalidChunkLength(HTTPError, httplib_IncompleteRead):
- """Invalid chunk length in a chunked response."""
-
- def __init__(self, response, length):
- super(InvalidChunkLength, self).__init__(
- response.tell(), response.length_remaining
- )
- self.response = response
- self.length = length
-
- def __repr__(self):
- return "InvalidChunkLength(got length %r, %i bytes read)" % (
- self.length,
- self.partial,
- )
-
-
+ return "IncompleteRead(%i bytes read, %i more expected)" % (
+ self.partial,
+ self.expected,
+ )
+
+
+class InvalidChunkLength(HTTPError, httplib_IncompleteRead):
+ """Invalid chunk length in a chunked response."""
+
+ def __init__(self, response, length):
+ super(InvalidChunkLength, self).__init__(
+ response.tell(), response.length_remaining
+ )
+ self.response = response
+ self.length = length
+
+ def __repr__(self):
+ return "InvalidChunkLength(got length %r, %i bytes read)" % (
+ self.length,
+ self.partial,
+ )
+
+
class InvalidHeader(HTTPError):
- """The header provided was somehow invalid."""
-
+ """The header provided was somehow invalid."""
+
pass
-class ProxySchemeUnknown(AssertionError, URLSchemeUnknown):
- """ProxyManager does not support the supplied scheme"""
-
+class ProxySchemeUnknown(AssertionError, URLSchemeUnknown):
+ """ProxyManager does not support the supplied scheme"""
+
# TODO(t-8ch): Stop inheriting from AssertionError in v2.0.
def __init__(self, scheme):
- # 'localhost' is here because our URL parser parses
- # localhost:8080 -> scheme=localhost, remove if we fix this.
- if scheme == "localhost":
- scheme = None
- if scheme is None:
- message = "Proxy URL had no scheme, should start with http:// or https://"
- else:
- message = (
- "Proxy URL had unsupported scheme %s, should use http:// or https://"
- % scheme
- )
+ # 'localhost' is here because our URL parser parses
+ # localhost:8080 -> scheme=localhost, remove if we fix this.
+ if scheme == "localhost":
+ scheme = None
+ if scheme is None:
+ message = "Proxy URL had no scheme, should start with http:// or https://"
+ else:
+ message = (
+ "Proxy URL had unsupported scheme %s, should use http:// or https://"
+ % scheme
+ )
super(ProxySchemeUnknown, self).__init__(message)
-class ProxySchemeUnsupported(ValueError):
- """Fetching HTTPS resources through HTTPS proxies is unsupported"""
-
- pass
-
-
+class ProxySchemeUnsupported(ValueError):
+ """Fetching HTTPS resources through HTTPS proxies is unsupported"""
+
+ pass
+
+
class HeaderParsingError(HTTPError):
- """Raised by assert_header_parsing, but we convert it to a log.warning statement."""
-
+ """Raised by assert_header_parsing, but we convert it to a log.warning statement."""
+
def __init__(self, defects, unparsed_data):
- message = "%s, unparsed data: %r" % (defects or "Unknown", unparsed_data)
+ message = "%s, unparsed data: %r" % (defects or "Unknown", unparsed_data)
super(HeaderParsingError, self).__init__(message)
class UnrewindableBodyError(HTTPError):
- """urllib3 encountered an error when trying to rewind a body"""
-
+ """urllib3 encountered an error when trying to rewind a body"""
+
pass
diff --git a/contrib/python/urllib3/urllib3/fields.py b/contrib/python/urllib3/urllib3/fields.py
index 9d630f491d..649e0070d9 100644
--- a/contrib/python/urllib3/urllib3/fields.py
+++ b/contrib/python/urllib3/urllib3/fields.py
@@ -1,13 +1,13 @@
from __future__ import absolute_import
-
+
import email.utils
import mimetypes
-import re
+import re
from .packages import six
-def guess_content_type(filename, default="application/octet-stream"):
+def guess_content_type(filename, default="application/octet-stream"):
"""
Guess the "Content-Type" of a file.
@@ -21,143 +21,143 @@ def guess_content_type(filename, default="application/octet-stream"):
return default
-def format_header_param_rfc2231(name, value):
+def format_header_param_rfc2231(name, value):
"""
- Helper function to format and quote a single header parameter using the
- strategy defined in RFC 2231.
+ Helper function to format and quote a single header parameter using the
+ strategy defined in RFC 2231.
Particularly useful for header parameters which might contain
- non-ASCII values, like file names. This follows
- `RFC 2388 Section 4.4 <https://tools.ietf.org/html/rfc2388#section-4.4>`_.
+ non-ASCII values, like file names. This follows
+ `RFC 2388 Section 4.4 <https://tools.ietf.org/html/rfc2388#section-4.4>`_.
:param name:
The name of the parameter, a string expected to be ASCII only.
:param value:
- The value of the parameter, provided as ``bytes`` or `str``.
- :ret:
- An RFC-2231-formatted unicode string.
+ The value of the parameter, provided as ``bytes`` or `str``.
+ :ret:
+ An RFC-2231-formatted unicode string.
"""
- if isinstance(value, six.binary_type):
- value = value.decode("utf-8")
-
+ if isinstance(value, six.binary_type):
+ value = value.decode("utf-8")
+
if not any(ch in value for ch in '"\\\r\n'):
- result = u'%s="%s"' % (name, value)
+ result = u'%s="%s"' % (name, value)
try:
- result.encode("ascii")
+ result.encode("ascii")
except (UnicodeEncodeError, UnicodeDecodeError):
pass
else:
return result
-
- if six.PY2: # Python 2:
- value = value.encode("utf-8")
-
- # encode_rfc2231 accepts an encoded string and returns an ascii-encoded
- # string in Python 2 but accepts and returns unicode strings in Python 3
- value = email.utils.encode_rfc2231(value, "utf-8")
- value = "%s*=%s" % (name, value)
-
- if six.PY2: # Python 2:
- value = value.decode("utf-8")
-
+
+ if six.PY2: # Python 2:
+ value = value.encode("utf-8")
+
+ # encode_rfc2231 accepts an encoded string and returns an ascii-encoded
+ # string in Python 2 but accepts and returns unicode strings in Python 3
+ value = email.utils.encode_rfc2231(value, "utf-8")
+ value = "%s*=%s" % (name, value)
+
+ if six.PY2: # Python 2:
+ value = value.decode("utf-8")
+
return value
-_HTML5_REPLACEMENTS = {
- u"\u0022": u"%22",
- # Replace "\" with "\\".
- u"\u005C": u"\u005C\u005C",
-}
-
-# All control characters from 0x00 to 0x1F *except* 0x1B.
-_HTML5_REPLACEMENTS.update(
- {
- six.unichr(cc): u"%{:02X}".format(cc)
- for cc in range(0x00, 0x1F + 1)
- if cc not in (0x1B,)
- }
-)
-
-
-def _replace_multiple(value, needles_and_replacements):
- def replacer(match):
- return needles_and_replacements[match.group(0)]
-
- pattern = re.compile(
- r"|".join([re.escape(needle) for needle in needles_and_replacements.keys()])
- )
-
- result = pattern.sub(replacer, value)
-
- return result
-
-
-def format_header_param_html5(name, value):
- """
- Helper function to format and quote a single header parameter using the
- HTML5 strategy.
-
- Particularly useful for header parameters which might contain
- non-ASCII values, like file names. This follows the `HTML5 Working Draft
- Section 4.10.22.7`_ and matches the behavior of curl and modern browsers.
-
- .. _HTML5 Working Draft Section 4.10.22.7:
- https://w3c.github.io/html/sec-forms.html#multipart-form-data
-
- :param name:
- The name of the parameter, a string expected to be ASCII only.
- :param value:
- The value of the parameter, provided as ``bytes`` or `str``.
- :ret:
- A unicode string, stripped of troublesome characters.
- """
- if isinstance(value, six.binary_type):
- value = value.decode("utf-8")
-
- value = _replace_multiple(value, _HTML5_REPLACEMENTS)
-
- return u'%s="%s"' % (name, value)
-
-
-# For backwards-compatibility.
-format_header_param = format_header_param_html5
-
-
+_HTML5_REPLACEMENTS = {
+ u"\u0022": u"%22",
+ # Replace "\" with "\\".
+ u"\u005C": u"\u005C\u005C",
+}
+
+# All control characters from 0x00 to 0x1F *except* 0x1B.
+_HTML5_REPLACEMENTS.update(
+ {
+ six.unichr(cc): u"%{:02X}".format(cc)
+ for cc in range(0x00, 0x1F + 1)
+ if cc not in (0x1B,)
+ }
+)
+
+
+def _replace_multiple(value, needles_and_replacements):
+ def replacer(match):
+ return needles_and_replacements[match.group(0)]
+
+ pattern = re.compile(
+ r"|".join([re.escape(needle) for needle in needles_and_replacements.keys()])
+ )
+
+ result = pattern.sub(replacer, value)
+
+ return result
+
+
+def format_header_param_html5(name, value):
+ """
+ Helper function to format and quote a single header parameter using the
+ HTML5 strategy.
+
+ Particularly useful for header parameters which might contain
+ non-ASCII values, like file names. This follows the `HTML5 Working Draft
+ Section 4.10.22.7`_ and matches the behavior of curl and modern browsers.
+
+ .. _HTML5 Working Draft Section 4.10.22.7:
+ https://w3c.github.io/html/sec-forms.html#multipart-form-data
+
+ :param name:
+ The name of the parameter, a string expected to be ASCII only.
+ :param value:
+ The value of the parameter, provided as ``bytes`` or `str``.
+ :ret:
+ A unicode string, stripped of troublesome characters.
+ """
+ if isinstance(value, six.binary_type):
+ value = value.decode("utf-8")
+
+ value = _replace_multiple(value, _HTML5_REPLACEMENTS)
+
+ return u'%s="%s"' % (name, value)
+
+
+# For backwards-compatibility.
+format_header_param = format_header_param_html5
+
+
class RequestField(object):
"""
A data container for request body parameters.
:param name:
- The name of this request field. Must be unicode.
+ The name of this request field. Must be unicode.
:param data:
The data/value body.
:param filename:
- An optional filename of the request field. Must be unicode.
+ An optional filename of the request field. Must be unicode.
:param headers:
An optional dict-like object of headers to initially use for the field.
- :param header_formatter:
- An optional callable that is used to encode and format the headers. By
- default, this is :func:`format_header_param_html5`.
+ :param header_formatter:
+ An optional callable that is used to encode and format the headers. By
+ default, this is :func:`format_header_param_html5`.
"""
-
- def __init__(
- self,
- name,
- data,
- filename=None,
- headers=None,
- header_formatter=format_header_param_html5,
- ):
+
+ def __init__(
+ self,
+ name,
+ data,
+ filename=None,
+ headers=None,
+ header_formatter=format_header_param_html5,
+ ):
self._name = name
self._filename = filename
self.data = data
self.headers = {}
if headers:
self.headers = dict(headers)
- self.header_formatter = header_formatter
+ self.header_formatter = header_formatter
@classmethod
- def from_tuples(cls, fieldname, value, header_formatter=format_header_param_html5):
+ def from_tuples(cls, fieldname, value, header_formatter=format_header_param_html5):
"""
A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters.
@@ -185,17 +185,17 @@ class RequestField(object):
content_type = None
data = value
- request_param = cls(
- fieldname, data, filename=filename, header_formatter=header_formatter
- )
+ request_param = cls(
+ fieldname, data, filename=filename, header_formatter=header_formatter
+ )
request_param.make_multipart(content_type=content_type)
return request_param
def _render_part(self, name, value):
"""
- Overridable helper function to format a single header parameter. By
- default, this calls ``self.header_formatter``.
+ Overridable helper function to format a single header parameter. By
+ default, this calls ``self.header_formatter``.
:param name:
The name of the parameter, a string expected to be ASCII only.
@@ -203,8 +203,8 @@ class RequestField(object):
The value of the parameter, provided as a unicode string.
"""
- return self.header_formatter(name, value)
-
+ return self.header_formatter(name, value)
+
def _render_parts(self, header_parts):
"""
Helper function to format and quote a single header.
@@ -213,7 +213,7 @@ class RequestField(object):
'Content-Disposition' fields.
:param header_parts:
- A sequence of (k, v) tuples or a :class:`dict` of (k, v) to format
+ A sequence of (k, v) tuples or a :class:`dict` of (k, v) to format
as `k1="v1"; k2="v2"; ...`.
"""
parts = []
@@ -225,7 +225,7 @@ class RequestField(object):
if value is not None:
parts.append(self._render_part(name, value))
- return u"; ".join(parts)
+ return u"; ".join(parts)
def render_headers(self):
"""
@@ -233,22 +233,22 @@ class RequestField(object):
"""
lines = []
- sort_keys = ["Content-Disposition", "Content-Type", "Content-Location"]
+ sort_keys = ["Content-Disposition", "Content-Type", "Content-Location"]
for sort_key in sort_keys:
if self.headers.get(sort_key, False):
- lines.append(u"%s: %s" % (sort_key, self.headers[sort_key]))
+ lines.append(u"%s: %s" % (sort_key, self.headers[sort_key]))
for header_name, header_value in self.headers.items():
if header_name not in sort_keys:
if header_value:
- lines.append(u"%s: %s" % (header_name, header_value))
+ lines.append(u"%s: %s" % (header_name, header_value))
- lines.append(u"\r\n")
- return u"\r\n".join(lines)
+ lines.append(u"\r\n")
+ return u"\r\n".join(lines)
- def make_multipart(
- self, content_disposition=None, content_type=None, content_location=None
- ):
+ def make_multipart(
+ self, content_disposition=None, content_type=None, content_location=None
+ ):
"""
Makes this request field into a multipart request field.
@@ -261,14 +261,14 @@ class RequestField(object):
The 'Content-Location' of the request body.
"""
- self.headers["Content-Disposition"] = content_disposition or u"form-data"
- self.headers["Content-Disposition"] += u"; ".join(
- [
- u"",
- self._render_parts(
- ((u"name", self._name), (u"filename", self._filename))
- ),
- ]
- )
- self.headers["Content-Type"] = content_type
- self.headers["Content-Location"] = content_location
+ self.headers["Content-Disposition"] = content_disposition or u"form-data"
+ self.headers["Content-Disposition"] += u"; ".join(
+ [
+ u"",
+ self._render_parts(
+ ((u"name", self._name), (u"filename", self._filename))
+ ),
+ ]
+ )
+ self.headers["Content-Type"] = content_type
+ self.headers["Content-Location"] = content_location
diff --git a/contrib/python/urllib3/urllib3/filepost.py b/contrib/python/urllib3/urllib3/filepost.py
index 36c9252c64..a38c1f0176 100644
--- a/contrib/python/urllib3/urllib3/filepost.py
+++ b/contrib/python/urllib3/urllib3/filepost.py
@@ -1,25 +1,25 @@
from __future__ import absolute_import
-
-import binascii
+
+import binascii
import codecs
-import os
+import os
from io import BytesIO
-from .fields import RequestField
+from .fields import RequestField
from .packages import six
from .packages.six import b
-writer = codecs.lookup("utf-8")[3]
+writer = codecs.lookup("utf-8")[3]
def choose_boundary():
"""
Our embarrassingly-simple replacement for mimetools.choose_boundary.
"""
- boundary = binascii.hexlify(os.urandom(16))
- if not six.PY2:
- boundary = boundary.decode("ascii")
- return boundary
+ boundary = binascii.hexlify(os.urandom(16))
+ if not six.PY2:
+ boundary = boundary.decode("ascii")
+ return boundary
def iter_field_objects(fields):
@@ -69,14 +69,14 @@ def encode_multipart_formdata(fields, boundary=None):
:param boundary:
If not specified, then a random boundary will be generated using
- :func:`urllib3.filepost.choose_boundary`.
+ :func:`urllib3.filepost.choose_boundary`.
"""
body = BytesIO()
if boundary is None:
boundary = choose_boundary()
for field in iter_field_objects(fields):
- body.write(b("--%s\r\n" % (boundary)))
+ body.write(b("--%s\r\n" % (boundary)))
writer(body).write(field.render_headers())
data = field.data
@@ -89,10 +89,10 @@ def encode_multipart_formdata(fields, boundary=None):
else:
body.write(data)
- body.write(b"\r\n")
+ body.write(b"\r\n")
- body.write(b("--%s--\r\n" % (boundary)))
+ body.write(b("--%s--\r\n" % (boundary)))
- content_type = str("multipart/form-data; boundary=%s" % boundary)
+ content_type = str("multipart/form-data; boundary=%s" % boundary)
return body.getvalue(), content_type
diff --git a/contrib/python/urllib3/urllib3/packages/backports/makefile.py b/contrib/python/urllib3/urllib3/packages/backports/makefile.py
index b8fb2154b6..91ef3daf4a 100644
--- a/contrib/python/urllib3/urllib3/packages/backports/makefile.py
+++ b/contrib/python/urllib3/urllib3/packages/backports/makefile.py
@@ -10,14 +10,14 @@ import io
from socket import SocketIO
-def backport_makefile(
- self, mode="r", buffering=None, encoding=None, errors=None, newline=None
-):
+def backport_makefile(
+ self, mode="r", buffering=None, encoding=None, errors=None, newline=None
+):
"""
Backport of ``socket.makefile`` from Python 3.5.
"""
- if not set(mode) <= {"r", "w", "b"}:
- raise ValueError("invalid mode %r (only r, w, b allowed)" % (mode,))
+ if not set(mode) <= {"r", "w", "b"}:
+ raise ValueError("invalid mode %r (only r, w, b allowed)" % (mode,))
writing = "w" in mode
reading = "r" in mode or not writing
assert reading or writing
diff --git a/contrib/python/urllib3/urllib3/packages/six.py b/contrib/python/urllib3/urllib3/packages/six.py
index ba50acb062..3cb1e9b396 100644
--- a/contrib/python/urllib3/urllib3/packages/six.py
+++ b/contrib/python/urllib3/urllib3/packages/six.py
@@ -1,4 +1,4 @@
-# Copyright (c) 2010-2020 Benjamin Peterson
+# Copyright (c) 2010-2020 Benjamin Peterson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
@@ -18,8 +18,8 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
-"""Utilities for writing code that runs on Python 2 and 3"""
-
+"""Utilities for writing code that runs on Python 2 and 3"""
+
from __future__ import absolute_import
import functools
@@ -29,7 +29,7 @@ import sys
import types
__author__ = "Benjamin Peterson <benjamin@python.org>"
-__version__ = "1.16.0"
+__version__ = "1.16.0"
# Useful for very coarse version differentiation.
@@ -38,15 +38,15 @@ PY3 = sys.version_info[0] == 3
PY34 = sys.version_info[0:2] >= (3, 4)
if PY3:
- string_types = (str,)
- integer_types = (int,)
- class_types = (type,)
+ string_types = (str,)
+ integer_types = (int,)
+ class_types = (type,)
text_type = str
binary_type = bytes
MAXSIZE = sys.maxsize
else:
- string_types = (basestring,)
+ string_types = (basestring,)
integer_types = (int, long)
class_types = (type, types.ClassType)
text_type = unicode
@@ -60,7 +60,7 @@ else:
class X(object):
def __len__(self):
return 1 << 31
-
+
try:
len(X())
except OverflowError:
@@ -71,12 +71,12 @@ else:
MAXSIZE = int((1 << 63) - 1)
del X
-if PY34:
- from importlib.util import spec_from_loader
-else:
- spec_from_loader = None
-
+if PY34:
+ from importlib.util import spec_from_loader
+else:
+ spec_from_loader = None
+
def _add_doc(func, doc):
"""Add documentation to a function."""
func.__doc__ = doc
@@ -187,11 +187,11 @@ class _SixMetaPathImporter(object):
return self
return None
- def find_spec(self, fullname, path, target=None):
- if fullname in self.known_modules:
- return spec_from_loader(fullname, self)
- return None
-
+ def find_spec(self, fullname, path, target=None):
+ if fullname in self.known_modules:
+ return spec_from_loader(fullname, self)
+ return None
+
def __get_module(self, fullname):
try:
return self.known_modules[fullname]
@@ -227,42 +227,42 @@ class _SixMetaPathImporter(object):
Required, if is_package is implemented"""
self.__get_module(fullname) # eventually raises ImportError
return None
-
+
get_source = get_code # same as get_code
- def create_module(self, spec):
- return self.load_module(spec.name)
-
- def exec_module(self, module):
- pass
-
-
+ def create_module(self, spec):
+ return self.load_module(spec.name)
+
+ def exec_module(self, module):
+ pass
+
+
_importer = _SixMetaPathImporter(__name__)
class _MovedItems(_LazyModule):
"""Lazy loading of moved objects"""
-
+
__path__ = [] # mark as package
_moved_attributes = [
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
- MovedAttribute(
- "filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"
- ),
+ MovedAttribute(
+ "filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"
+ ),
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
MovedAttribute("intern", "__builtin__", "sys"),
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
- MovedAttribute("getoutput", "commands", "subprocess"),
+ MovedAttribute("getoutput", "commands", "subprocess"),
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
- MovedAttribute(
- "reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"
- ),
+ MovedAttribute(
+ "reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"
+ ),
MovedAttribute("reduce", "__builtin__", "functools"),
MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
MovedAttribute("StringIO", "StringIO", "io"),
@@ -271,35 +271,35 @@ _moved_attributes = [
MovedAttribute("UserString", "UserString", "collections"),
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
- MovedAttribute(
- "zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"
- ),
+ MovedAttribute(
+ "zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"
+ ),
MovedModule("builtins", "__builtin__"),
MovedModule("configparser", "ConfigParser"),
- MovedModule(
- "collections_abc",
- "collections",
- "collections.abc" if sys.version_info >= (3, 3) else "collections",
- ),
+ MovedModule(
+ "collections_abc",
+ "collections",
+ "collections.abc" if sys.version_info >= (3, 3) else "collections",
+ ),
MovedModule("copyreg", "copy_reg"),
MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
- MovedModule("dbm_ndbm", "dbm", "dbm.ndbm"),
- MovedModule(
- "_dummy_thread",
- "dummy_thread",
- "_dummy_thread" if sys.version_info < (3, 9) else "_thread",
- ),
+ MovedModule("dbm_ndbm", "dbm", "dbm.ndbm"),
+ MovedModule(
+ "_dummy_thread",
+ "dummy_thread",
+ "_dummy_thread" if sys.version_info < (3, 9) else "_thread",
+ ),
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
MovedModule("http_cookies", "Cookie", "http.cookies"),
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
MovedModule("html_parser", "HTMLParser", "html.parser"),
MovedModule("http_client", "httplib", "http.client"),
- MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
- MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"),
+ MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
+ MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"),
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
- MovedModule(
- "email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"
- ),
+ MovedModule(
+ "email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"
+ ),
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
@@ -318,12 +318,12 @@ _moved_attributes = [
MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
- MovedModule("tkinter_colorchooser", "tkColorChooser", "tkinter.colorchooser"),
- MovedModule("tkinter_commondialog", "tkCommonDialog", "tkinter.commondialog"),
+ MovedModule("tkinter_colorchooser", "tkColorChooser", "tkinter.colorchooser"),
+ MovedModule("tkinter_commondialog", "tkCommonDialog", "tkinter.commondialog"),
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
- MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", "tkinter.simpledialog"),
+ MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", "tkinter.simpledialog"),
MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
@@ -333,9 +333,9 @@ _moved_attributes = [
]
# Add windows specific modules.
if sys.platform == "win32":
- _moved_attributes += [
- MovedModule("winreg", "_winreg"),
- ]
+ _moved_attributes += [
+ MovedModule("winreg", "_winreg"),
+ ]
for attr in _moved_attributes:
setattr(_MovedItems, attr.name, attr)
@@ -369,14 +369,14 @@ _urllib_parse_moved_attributes = [
MovedAttribute("quote_plus", "urllib", "urllib.parse"),
MovedAttribute("unquote", "urllib", "urllib.parse"),
MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
- MovedAttribute(
- "unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"
- ),
+ MovedAttribute(
+ "unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"
+ ),
MovedAttribute("urlencode", "urllib", "urllib.parse"),
MovedAttribute("splitquery", "urllib", "urllib.parse"),
MovedAttribute("splittag", "urllib", "urllib.parse"),
MovedAttribute("splituser", "urllib", "urllib.parse"),
- MovedAttribute("splitvalue", "urllib", "urllib.parse"),
+ MovedAttribute("splitvalue", "urllib", "urllib.parse"),
MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
MovedAttribute("uses_params", "urlparse", "urllib.parse"),
@@ -389,11 +389,11 @@ del attr
Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
-_importer._add_module(
- Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
- "moves.urllib_parse",
- "moves.urllib.parse",
-)
+_importer._add_module(
+ Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
+ "moves.urllib_parse",
+ "moves.urllib.parse",
+)
class Module_six_moves_urllib_error(_LazyModule):
@@ -412,11 +412,11 @@ del attr
Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
-_importer._add_module(
- Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
- "moves.urllib_error",
- "moves.urllib.error",
-)
+_importer._add_module(
+ Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
+ "moves.urllib_error",
+ "moves.urllib.error",
+)
class Module_six_moves_urllib_request(_LazyModule):
@@ -458,8 +458,8 @@ _urllib_request_moved_attributes = [
MovedAttribute("URLopener", "urllib", "urllib.request"),
MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
- MovedAttribute("parse_http_list", "urllib2", "urllib.request"),
- MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"),
+ MovedAttribute("parse_http_list", "urllib2", "urllib.request"),
+ MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"),
]
for attr in _urllib_request_moved_attributes:
setattr(Module_six_moves_urllib_request, attr.name, attr)
@@ -467,11 +467,11 @@ del attr
Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
-_importer._add_module(
- Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
- "moves.urllib_request",
- "moves.urllib.request",
-)
+_importer._add_module(
+ Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
+ "moves.urllib_request",
+ "moves.urllib.request",
+)
class Module_six_moves_urllib_response(_LazyModule):
@@ -491,11 +491,11 @@ del attr
Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
-_importer._add_module(
- Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
- "moves.urllib_response",
- "moves.urllib.response",
-)
+_importer._add_module(
+ Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
+ "moves.urllib_response",
+ "moves.urllib.response",
+)
class Module_six_moves_urllib_robotparser(_LazyModule):
@@ -504,27 +504,27 @@ class Module_six_moves_urllib_robotparser(_LazyModule):
_urllib_robotparser_moved_attributes = [
- MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
+ MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
]
for attr in _urllib_robotparser_moved_attributes:
setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
del attr
-Module_six_moves_urllib_robotparser._moved_attributes = (
- _urllib_robotparser_moved_attributes
-)
+Module_six_moves_urllib_robotparser._moved_attributes = (
+ _urllib_robotparser_moved_attributes
+)
-_importer._add_module(
- Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
- "moves.urllib_robotparser",
- "moves.urllib.robotparser",
-)
+_importer._add_module(
+ Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
+ "moves.urllib_robotparser",
+ "moves.urllib.robotparser",
+)
class Module_six_moves_urllib(types.ModuleType):
"""Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
-
+
__path__ = [] # mark as package
parse = _importer._get_module("moves.urllib_parse")
error = _importer._get_module("moves.urllib_error")
@@ -533,14 +533,14 @@ class Module_six_moves_urllib(types.ModuleType):
robotparser = _importer._get_module("moves.urllib_robotparser")
def __dir__(self):
- return ["parse", "error", "request", "response", "robotparser"]
+ return ["parse", "error", "request", "response", "robotparser"]
-_importer._add_module(
- Module_six_moves_urllib(__name__ + ".moves.urllib"), "moves.urllib"
-)
-
+_importer._add_module(
+ Module_six_moves_urllib(__name__ + ".moves.urllib"), "moves.urllib"
+)
+
def add_move(move):
"""Add an item to six.moves."""
setattr(_MovedItems, move.name, move)
@@ -578,24 +578,24 @@ else:
try:
advance_iterator = next
except NameError:
-
+
def advance_iterator(it):
return it.next()
-
-
+
+
next = advance_iterator
try:
callable = callable
except NameError:
-
+
def callable(obj):
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
if PY3:
-
+
def get_unbound_function(unbound):
return unbound
@@ -606,7 +606,7 @@ if PY3:
Iterator = object
else:
-
+
def get_unbound_function(unbound):
return unbound.im_func
@@ -621,9 +621,9 @@ else:
return type(self).__next__(self)
callable = callable
-_add_doc(
- get_unbound_function, """Get the function out of a possibly unbound function"""
-)
+_add_doc(
+ get_unbound_function, """Get the function out of a possibly unbound function"""
+)
get_method_function = operator.attrgetter(_meth_func)
@@ -635,7 +635,7 @@ get_function_globals = operator.attrgetter(_func_globals)
if PY3:
-
+
def iterkeys(d, **kw):
return iter(d.keys(**kw))
@@ -654,7 +654,7 @@ if PY3:
viewitems = operator.methodcaller("items")
else:
-
+
def iterkeys(d, **kw):
return d.iterkeys(**kw)
@@ -675,52 +675,52 @@ else:
_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
-_add_doc(iteritems, "Return an iterator over the (key, value) pairs of a dictionary.")
-_add_doc(
- iterlists, "Return an iterator over the (key, [values]) pairs of a dictionary."
-)
+_add_doc(iteritems, "Return an iterator over the (key, value) pairs of a dictionary.")
+_add_doc(
+ iterlists, "Return an iterator over the (key, [values]) pairs of a dictionary."
+)
if PY3:
-
+
def b(s):
return s.encode("latin-1")
def u(s):
return s
-
+
unichr = chr
import struct
-
+
int2byte = struct.Struct(">B").pack
del struct
byte2int = operator.itemgetter(0)
indexbytes = operator.getitem
iterbytes = iter
import io
-
+
StringIO = io.StringIO
BytesIO = io.BytesIO
- del io
+ del io
_assertCountEqual = "assertCountEqual"
if sys.version_info[1] <= 1:
_assertRaisesRegex = "assertRaisesRegexp"
_assertRegex = "assertRegexpMatches"
- _assertNotRegex = "assertNotRegexpMatches"
+ _assertNotRegex = "assertNotRegexpMatches"
else:
_assertRaisesRegex = "assertRaisesRegex"
_assertRegex = "assertRegex"
- _assertNotRegex = "assertNotRegex"
+ _assertNotRegex = "assertNotRegex"
else:
-
+
def b(s):
return s
-
+
# Workaround for standalone backslash
def u(s):
- return unicode(s.replace(r"\\", r"\\\\"), "unicode_escape")
-
+ return unicode(s.replace(r"\\", r"\\\\"), "unicode_escape")
+
unichr = unichr
int2byte = chr
@@ -729,15 +729,15 @@ else:
def indexbytes(buf, i):
return ord(buf[i])
-
+
iterbytes = functools.partial(itertools.imap, ord)
import StringIO
-
+
StringIO = BytesIO = StringIO.StringIO
_assertCountEqual = "assertItemsEqual"
_assertRaisesRegex = "assertRaisesRegexp"
_assertRegex = "assertRegexpMatches"
- _assertNotRegex = "assertNotRegexpMatches"
+ _assertNotRegex = "assertNotRegexpMatches"
_add_doc(b, """Byte literal""")
_add_doc(u, """Text literal""")
@@ -754,27 +754,27 @@ def assertRegex(self, *args, **kwargs):
return getattr(self, _assertRegex)(*args, **kwargs)
-def assertNotRegex(self, *args, **kwargs):
- return getattr(self, _assertNotRegex)(*args, **kwargs)
-
-
+def assertNotRegex(self, *args, **kwargs):
+ return getattr(self, _assertNotRegex)(*args, **kwargs)
+
+
if PY3:
exec_ = getattr(moves.builtins, "exec")
def reraise(tp, value, tb=None):
- try:
- if value is None:
- value = tp()
- if value.__traceback__ is not tb:
- raise value.with_traceback(tb)
- raise value
- finally:
- value = None
- tb = None
-
-
+ try:
+ if value is None:
+ value = tp()
+ if value.__traceback__ is not tb:
+ raise value.with_traceback(tb)
+ raise value
+ finally:
+ value = None
+ tb = None
+
+
else:
-
+
def exec_(_code_, _globs_=None, _locs_=None):
"""Execute code in a namespace."""
if _globs_ is None:
@@ -785,36 +785,36 @@ else:
del frame
elif _locs_ is None:
_locs_ = _globs_
- exec ("""exec _code_ in _globs_, _locs_""")
-
- exec_(
- """def reraise(tp, value, tb=None):
- try:
- raise tp, value, tb
- finally:
- tb = None
-"""
- )
-
-
-if sys.version_info[:2] > (3,):
- exec_(
- """def raise_from(value, from_value):
- try:
- raise value from from_value
- finally:
- value = None
-"""
- )
+ exec ("""exec _code_ in _globs_, _locs_""")
+
+ exec_(
+ """def reraise(tp, value, tb=None):
+ try:
+ raise tp, value, tb
+ finally:
+ tb = None
+"""
+ )
+
+
+if sys.version_info[:2] > (3,):
+ exec_(
+ """def raise_from(value, from_value):
+ try:
+ raise value from from_value
+ finally:
+ value = None
+"""
+ )
else:
-
+
def raise_from(value, from_value):
raise value
print_ = getattr(moves.builtins, "print", None)
if print_ is None:
-
+
def print_(*args, **kwargs):
"""The new-style print function for Python 2.4 and 2.5."""
fp = kwargs.pop("file", sys.stdout)
@@ -825,17 +825,17 @@ if print_ is None:
if not isinstance(data, basestring):
data = str(data)
# If the file has an encoding, encode unicode with it.
- if (
- isinstance(fp, file)
- and isinstance(data, unicode)
- and fp.encoding is not None
- ):
+ if (
+ isinstance(fp, file)
+ and isinstance(data, unicode)
+ and fp.encoding is not None
+ ):
errors = getattr(fp, "errors", None)
if errors is None:
errors = "strict"
data = data.encode(fp.encoding, errors)
fp.write(data)
-
+
want_unicode = False
sep = kwargs.pop("sep", None)
if sep is not None:
@@ -871,8 +871,8 @@ if print_ is None:
write(sep)
write(arg)
write(end)
-
-
+
+
if sys.version_info[:2] < (3, 3):
_print = print_
@@ -883,46 +883,46 @@ if sys.version_info[:2] < (3, 3):
if flush and fp is not None:
fp.flush()
-
+
_add_doc(reraise, """Reraise an exception.""")
if sys.version_info[0:2] < (3, 4):
- # This does exactly the same what the :func:`py3:functools.update_wrapper`
- # function does on Python versions after 3.2. It sets the ``__wrapped__``
- # attribute on ``wrapper`` object and it doesn't raise an error if any of
- # the attributes mentioned in ``assigned`` and ``updated`` are missing on
- # ``wrapped`` object.
- def _update_wrapper(
- wrapper,
- wrapped,
- assigned=functools.WRAPPER_ASSIGNMENTS,
- updated=functools.WRAPPER_UPDATES,
- ):
- for attr in assigned:
- try:
- value = getattr(wrapped, attr)
- except AttributeError:
- continue
- else:
- setattr(wrapper, attr, value)
- for attr in updated:
- getattr(wrapper, attr).update(getattr(wrapped, attr, {}))
- wrapper.__wrapped__ = wrapped
- return wrapper
-
- _update_wrapper.__doc__ = functools.update_wrapper.__doc__
-
- def wraps(
- wrapped,
- assigned=functools.WRAPPER_ASSIGNMENTS,
- updated=functools.WRAPPER_UPDATES,
- ):
- return functools.partial(
- _update_wrapper, wrapped=wrapped, assigned=assigned, updated=updated
- )
-
- wraps.__doc__ = functools.wraps.__doc__
-
+ # This does exactly the same what the :func:`py3:functools.update_wrapper`
+ # function does on Python versions after 3.2. It sets the ``__wrapped__``
+ # attribute on ``wrapper`` object and it doesn't raise an error if any of
+ # the attributes mentioned in ``assigned`` and ``updated`` are missing on
+ # ``wrapped`` object.
+ def _update_wrapper(
+ wrapper,
+ wrapped,
+ assigned=functools.WRAPPER_ASSIGNMENTS,
+ updated=functools.WRAPPER_UPDATES,
+ ):
+ for attr in assigned:
+ try:
+ value = getattr(wrapped, attr)
+ except AttributeError:
+ continue
+ else:
+ setattr(wrapper, attr, value)
+ for attr in updated:
+ getattr(wrapper, attr).update(getattr(wrapped, attr, {}))
+ wrapper.__wrapped__ = wrapped
+ return wrapper
+
+ _update_wrapper.__doc__ = functools.update_wrapper.__doc__
+
+ def wraps(
+ wrapped,
+ assigned=functools.WRAPPER_ASSIGNMENTS,
+ updated=functools.WRAPPER_UPDATES,
+ ):
+ return functools.partial(
+ _update_wrapper, wrapped=wrapped, assigned=assigned, updated=updated
+ )
+
+ wraps.__doc__ = functools.wraps.__doc__
+
else:
wraps = functools.wraps
@@ -932,121 +932,121 @@ def with_metaclass(meta, *bases):
# This requires a bit of explanation: the basic idea is to make a dummy
# metaclass for one level of class instantiation that replaces itself with
# the actual metaclass.
- class metaclass(type):
+ class metaclass(type):
def __new__(cls, name, this_bases, d):
- if sys.version_info[:2] >= (3, 7):
- # This version introduced PEP 560 that requires a bit
- # of extra care (we mimic what is done by __build_class__).
- resolved_bases = types.resolve_bases(bases)
- if resolved_bases is not bases:
- d["__orig_bases__"] = bases
- else:
- resolved_bases = bases
- return meta(name, resolved_bases, d)
-
- @classmethod
- def __prepare__(cls, name, this_bases):
- return meta.__prepare__(name, bases)
-
- return type.__new__(metaclass, "temporary_class", (), {})
-
-
+ if sys.version_info[:2] >= (3, 7):
+ # This version introduced PEP 560 that requires a bit
+ # of extra care (we mimic what is done by __build_class__).
+ resolved_bases = types.resolve_bases(bases)
+ if resolved_bases is not bases:
+ d["__orig_bases__"] = bases
+ else:
+ resolved_bases = bases
+ return meta(name, resolved_bases, d)
+
+ @classmethod
+ def __prepare__(cls, name, this_bases):
+ return meta.__prepare__(name, bases)
+
+ return type.__new__(metaclass, "temporary_class", (), {})
+
+
def add_metaclass(metaclass):
"""Class decorator for creating a class with a metaclass."""
-
+
def wrapper(cls):
orig_vars = cls.__dict__.copy()
- slots = orig_vars.get("__slots__")
+ slots = orig_vars.get("__slots__")
if slots is not None:
if isinstance(slots, str):
slots = [slots]
for slots_var in slots:
orig_vars.pop(slots_var)
- orig_vars.pop("__dict__", None)
- orig_vars.pop("__weakref__", None)
- if hasattr(cls, "__qualname__"):
- orig_vars["__qualname__"] = cls.__qualname__
+ orig_vars.pop("__dict__", None)
+ orig_vars.pop("__weakref__", None)
+ if hasattr(cls, "__qualname__"):
+ orig_vars["__qualname__"] = cls.__qualname__
return metaclass(cls.__name__, cls.__bases__, orig_vars)
-
+
return wrapper
-def ensure_binary(s, encoding="utf-8", errors="strict"):
- """Coerce **s** to six.binary_type.
-
- For Python 2:
- - `unicode` -> encoded to `str`
- - `str` -> `str`
-
- For Python 3:
- - `str` -> encoded to `bytes`
- - `bytes` -> `bytes`
- """
- if isinstance(s, binary_type):
- return s
- if isinstance(s, text_type):
- return s.encode(encoding, errors)
- raise TypeError("not expecting type '%s'" % type(s))
-
-
-def ensure_str(s, encoding="utf-8", errors="strict"):
- """Coerce *s* to `str`.
-
- For Python 2:
- - `unicode` -> encoded to `str`
- - `str` -> `str`
-
- For Python 3:
- - `str` -> `str`
- - `bytes` -> decoded to `str`
- """
- # Optimization: Fast return for the common case.
- if type(s) is str:
- return s
- if PY2 and isinstance(s, text_type):
- return s.encode(encoding, errors)
- elif PY3 and isinstance(s, binary_type):
- return s.decode(encoding, errors)
- elif not isinstance(s, (text_type, binary_type)):
- raise TypeError("not expecting type '%s'" % type(s))
- return s
-
-
-def ensure_text(s, encoding="utf-8", errors="strict"):
- """Coerce *s* to six.text_type.
-
- For Python 2:
- - `unicode` -> `unicode`
- - `str` -> `unicode`
-
- For Python 3:
- - `str` -> `str`
- - `bytes` -> decoded to `str`
- """
- if isinstance(s, binary_type):
- return s.decode(encoding, errors)
- elif isinstance(s, text_type):
- return s
- else:
- raise TypeError("not expecting type '%s'" % type(s))
-
-
+def ensure_binary(s, encoding="utf-8", errors="strict"):
+ """Coerce **s** to six.binary_type.
+
+ For Python 2:
+ - `unicode` -> encoded to `str`
+ - `str` -> `str`
+
+ For Python 3:
+ - `str` -> encoded to `bytes`
+ - `bytes` -> `bytes`
+ """
+ if isinstance(s, binary_type):
+ return s
+ if isinstance(s, text_type):
+ return s.encode(encoding, errors)
+ raise TypeError("not expecting type '%s'" % type(s))
+
+
+def ensure_str(s, encoding="utf-8", errors="strict"):
+ """Coerce *s* to `str`.
+
+ For Python 2:
+ - `unicode` -> encoded to `str`
+ - `str` -> `str`
+
+ For Python 3:
+ - `str` -> `str`
+ - `bytes` -> decoded to `str`
+ """
+ # Optimization: Fast return for the common case.
+ if type(s) is str:
+ return s
+ if PY2 and isinstance(s, text_type):
+ return s.encode(encoding, errors)
+ elif PY3 and isinstance(s, binary_type):
+ return s.decode(encoding, errors)
+ elif not isinstance(s, (text_type, binary_type)):
+ raise TypeError("not expecting type '%s'" % type(s))
+ return s
+
+
+def ensure_text(s, encoding="utf-8", errors="strict"):
+ """Coerce *s* to six.text_type.
+
+ For Python 2:
+ - `unicode` -> `unicode`
+ - `str` -> `unicode`
+
+ For Python 3:
+ - `str` -> `str`
+ - `bytes` -> decoded to `str`
+ """
+ if isinstance(s, binary_type):
+ return s.decode(encoding, errors)
+ elif isinstance(s, text_type):
+ return s
+ else:
+ raise TypeError("not expecting type '%s'" % type(s))
+
+
def python_2_unicode_compatible(klass):
"""
- A class decorator that defines __unicode__ and __str__ methods under Python 2.
+ A class decorator that defines __unicode__ and __str__ methods under Python 2.
Under Python 3 it does nothing.
To support Python 2 and 3 with a single code base, define a __str__ method
returning text and apply this decorator to the class.
"""
if PY2:
- if "__str__" not in klass.__dict__:
- raise ValueError(
- "@python_2_unicode_compatible cannot be applied "
- "to %s because it doesn't define __str__()." % klass.__name__
- )
+ if "__str__" not in klass.__dict__:
+ raise ValueError(
+ "@python_2_unicode_compatible cannot be applied "
+ "to %s because it doesn't define __str__()." % klass.__name__
+ )
klass.__unicode__ = klass.__str__
- klass.__str__ = lambda self: self.__unicode__().encode("utf-8")
+ klass.__str__ = lambda self: self.__unicode__().encode("utf-8")
return klass
@@ -1066,10 +1066,10 @@ if sys.meta_path:
# be floating around. Therefore, we can't use isinstance() to check for
# the six meta path importer, since the other six instance will have
# inserted an importer with different class.
- if (
- type(importer).__name__ == "_SixMetaPathImporter"
- and importer.name == __name__
- ):
+ if (
+ type(importer).__name__ == "_SixMetaPathImporter"
+ and importer.name == __name__
+ ):
del sys.meta_path[i]
break
del i, importer
diff --git a/contrib/python/urllib3/urllib3/poolmanager.py b/contrib/python/urllib3/urllib3/poolmanager.py
index 3a31a285bf..784b431f1e 100644
--- a/contrib/python/urllib3/urllib3/poolmanager.py
+++ b/contrib/python/urllib3/urllib3/poolmanager.py
@@ -1,80 +1,80 @@
from __future__ import absolute_import
-
+
import collections
import functools
import logging
from ._collections import RecentlyUsedContainer
-from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, port_by_scheme
-from .exceptions import (
- LocationValueError,
- MaxRetryError,
- ProxySchemeUnknown,
- ProxySchemeUnsupported,
- URLSchemeUnknown,
-)
-from .packages import six
+from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, port_by_scheme
+from .exceptions import (
+ LocationValueError,
+ MaxRetryError,
+ ProxySchemeUnknown,
+ ProxySchemeUnsupported,
+ URLSchemeUnknown,
+)
+from .packages import six
from .packages.six.moves.urllib.parse import urljoin
from .request import RequestMethods
-from .util.proxy import connection_requires_http_tunnel
-from .util.retry import Retry
-from .util.url import parse_url
+from .util.proxy import connection_requires_http_tunnel
+from .util.retry import Retry
+from .util.url import parse_url
-__all__ = ["PoolManager", "ProxyManager", "proxy_from_url"]
+__all__ = ["PoolManager", "ProxyManager", "proxy_from_url"]
log = logging.getLogger(__name__)
-SSL_KEYWORDS = (
- "key_file",
- "cert_file",
- "cert_reqs",
- "ca_certs",
- "ssl_version",
- "ca_cert_dir",
- "ssl_context",
- "key_password",
-)
+SSL_KEYWORDS = (
+ "key_file",
+ "cert_file",
+ "cert_reqs",
+ "ca_certs",
+ "ssl_version",
+ "ca_cert_dir",
+ "ssl_context",
+ "key_password",
+)
# All known keyword arguments that could be provided to the pool manager, its
# pools, or the underlying connections. This is used to construct a pool key.
_key_fields = (
- "key_scheme", # str
- "key_host", # str
- "key_port", # int
- "key_timeout", # int or float or Timeout
- "key_retries", # int or Retry
- "key_strict", # bool
- "key_block", # bool
- "key_source_address", # str
- "key_key_file", # str
- "key_key_password", # str
- "key_cert_file", # str
- "key_cert_reqs", # str
- "key_ca_certs", # str
- "key_ssl_version", # str
- "key_ca_cert_dir", # str
- "key_ssl_context", # instance of ssl.SSLContext or urllib3.util.ssl_.SSLContext
- "key_maxsize", # int
- "key_headers", # dict
- "key__proxy", # parsed proxy url
- "key__proxy_headers", # dict
- "key__proxy_config", # class
- "key_socket_options", # list of (level (int), optname (int), value (int or str)) tuples
- "key__socks_options", # dict
- "key_assert_hostname", # bool or string
- "key_assert_fingerprint", # str
- "key_server_hostname", # str
+ "key_scheme", # str
+ "key_host", # str
+ "key_port", # int
+ "key_timeout", # int or float or Timeout
+ "key_retries", # int or Retry
+ "key_strict", # bool
+ "key_block", # bool
+ "key_source_address", # str
+ "key_key_file", # str
+ "key_key_password", # str
+ "key_cert_file", # str
+ "key_cert_reqs", # str
+ "key_ca_certs", # str
+ "key_ssl_version", # str
+ "key_ca_cert_dir", # str
+ "key_ssl_context", # instance of ssl.SSLContext or urllib3.util.ssl_.SSLContext
+ "key_maxsize", # int
+ "key_headers", # dict
+ "key__proxy", # parsed proxy url
+ "key__proxy_headers", # dict
+ "key__proxy_config", # class
+ "key_socket_options", # list of (level (int), optname (int), value (int or str)) tuples
+ "key__socks_options", # dict
+ "key_assert_hostname", # bool or string
+ "key_assert_fingerprint", # str
+ "key_server_hostname", # str
)
#: The namedtuple class used to construct keys for the connection pool.
#: All custom key schemes should include the fields in this key at a minimum.
-PoolKey = collections.namedtuple("PoolKey", _key_fields)
-
-_proxy_config_fields = ("ssl_context", "use_forwarding_for_https")
-ProxyConfig = collections.namedtuple("ProxyConfig", _proxy_config_fields)
+PoolKey = collections.namedtuple("PoolKey", _key_fields)
+_proxy_config_fields = ("ssl_context", "use_forwarding_for_https")
+ProxyConfig = collections.namedtuple("ProxyConfig", _proxy_config_fields)
+
def _default_key_normalizer(key_class, request_context):
"""
Create a pool key out of a request context dictionary.
@@ -97,24 +97,24 @@ def _default_key_normalizer(key_class, request_context):
"""
# Since we mutate the dictionary, make a copy first
context = request_context.copy()
- context["scheme"] = context["scheme"].lower()
- context["host"] = context["host"].lower()
+ context["scheme"] = context["scheme"].lower()
+ context["host"] = context["host"].lower()
# These are both dictionaries and need to be transformed into frozensets
- for key in ("headers", "_proxy_headers", "_socks_options"):
+ for key in ("headers", "_proxy_headers", "_socks_options"):
if key in context and context[key] is not None:
context[key] = frozenset(context[key].items())
# The socket_options key may be a list and needs to be transformed into a
# tuple.
- socket_opts = context.get("socket_options")
+ socket_opts = context.get("socket_options")
if socket_opts is not None:
- context["socket_options"] = tuple(socket_opts)
+ context["socket_options"] = tuple(socket_opts)
# Map the kwargs to the names in the namedtuple - this is necessary since
# namedtuples can't have fields starting with '_'.
for key in list(context.keys()):
- context["key_" + key] = context.pop(key)
+ context["key_" + key] = context.pop(key)
# Default to ``None`` for keys missing from the context
for field in key_class._fields:
@@ -129,11 +129,11 @@ def _default_key_normalizer(key_class, request_context):
#: Each PoolManager makes a copy of this dictionary so they can be configured
#: globally here, or individually on the instance.
key_fn_by_scheme = {
- "http": functools.partial(_default_key_normalizer, PoolKey),
- "https": functools.partial(_default_key_normalizer, PoolKey),
+ "http": functools.partial(_default_key_normalizer, PoolKey),
+ "https": functools.partial(_default_key_normalizer, PoolKey),
}
-pool_classes_by_scheme = {"http": HTTPConnectionPool, "https": HTTPSConnectionPool}
+pool_classes_by_scheme = {"http": HTTPConnectionPool, "https": HTTPSConnectionPool}
class PoolManager(RequestMethods):
@@ -165,12 +165,12 @@ class PoolManager(RequestMethods):
"""
proxy = None
- proxy_config = None
+ proxy_config = None
def __init__(self, num_pools=10, headers=None, **connection_pool_kw):
RequestMethods.__init__(self, headers)
self.connection_pool_kw = connection_pool_kw
- self.pools = RecentlyUsedContainer(num_pools, dispose_func=lambda p: p.close())
+ self.pools = RecentlyUsedContainer(num_pools, dispose_func=lambda p: p.close())
# Locally set the pool classes and keys so other PoolManagers can
# override them.
@@ -187,7 +187,7 @@ class PoolManager(RequestMethods):
def _new_pool(self, scheme, host, port, request_context=None):
"""
- Create a new :class:`urllib3.connectionpool.ConnectionPool` based on host, port, scheme, and
+ Create a new :class:`urllib3.connectionpool.ConnectionPool` based on host, port, scheme, and
any additional pool keyword arguments.
If ``request_context`` is provided, it is provided as keyword arguments
@@ -203,10 +203,10 @@ class PoolManager(RequestMethods):
# this function has historically only used the scheme, host, and port
# in the positional args. When an API change is acceptable these can
# be removed.
- for key in ("scheme", "host", "port"):
+ for key in ("scheme", "host", "port"):
request_context.pop(key, None)
- if scheme == "http":
+ if scheme == "http":
for kw in SSL_KEYWORDS:
request_context.pop(kw, None)
@@ -221,9 +221,9 @@ class PoolManager(RequestMethods):
"""
self.pools.clear()
- def connection_from_host(self, host, port=None, scheme="http", pool_kwargs=None):
+ def connection_from_host(self, host, port=None, scheme="http", pool_kwargs=None):
"""
- Get a :class:`urllib3.connectionpool.ConnectionPool` based on the host, port, and scheme.
+ Get a :class:`urllib3.connectionpool.ConnectionPool` based on the host, port, and scheme.
If ``port`` isn't given, it will be derived from the ``scheme`` using
``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is
@@ -236,32 +236,32 @@ class PoolManager(RequestMethods):
raise LocationValueError("No host specified.")
request_context = self._merge_pool_kwargs(pool_kwargs)
- request_context["scheme"] = scheme or "http"
+ request_context["scheme"] = scheme or "http"
if not port:
- port = port_by_scheme.get(request_context["scheme"].lower(), 80)
- request_context["port"] = port
- request_context["host"] = host
+ port = port_by_scheme.get(request_context["scheme"].lower(), 80)
+ request_context["port"] = port
+ request_context["host"] = host
return self.connection_from_context(request_context)
def connection_from_context(self, request_context):
"""
- Get a :class:`urllib3.connectionpool.ConnectionPool` based on the request context.
+ Get a :class:`urllib3.connectionpool.ConnectionPool` based on the request context.
``request_context`` must at least contain the ``scheme`` key and its
value must be a key in ``key_fn_by_scheme`` instance variable.
"""
- scheme = request_context["scheme"].lower()
- pool_key_constructor = self.key_fn_by_scheme.get(scheme)
- if not pool_key_constructor:
- raise URLSchemeUnknown(scheme)
+ scheme = request_context["scheme"].lower()
+ pool_key_constructor = self.key_fn_by_scheme.get(scheme)
+ if not pool_key_constructor:
+ raise URLSchemeUnknown(scheme)
pool_key = pool_key_constructor(request_context)
return self.connection_from_pool_key(pool_key, request_context=request_context)
def connection_from_pool_key(self, pool_key, request_context=None):
"""
- Get a :class:`urllib3.connectionpool.ConnectionPool` based on the provided pool key.
+ Get a :class:`urllib3.connectionpool.ConnectionPool` based on the provided pool key.
``pool_key`` should be a namedtuple that only contains immutable
objects. At a minimum it must have the ``scheme``, ``host``, and
@@ -275,9 +275,9 @@ class PoolManager(RequestMethods):
return pool
# Make a fresh ConnectionPool of the desired type
- scheme = request_context["scheme"]
- host = request_context["host"]
- port = request_context["port"]
+ scheme = request_context["scheme"]
+ host = request_context["host"]
+ port = request_context["port"]
pool = self._new_pool(scheme, host, port, request_context=request_context)
self.pools[pool_key] = pool
@@ -295,9 +295,9 @@ class PoolManager(RequestMethods):
not used.
"""
u = parse_url(url)
- return self.connection_from_host(
- u.host, port=u.port, scheme=u.scheme, pool_kwargs=pool_kwargs
- )
+ return self.connection_from_host(
+ u.host, port=u.port, scheme=u.scheme, pool_kwargs=pool_kwargs
+ )
def _merge_pool_kwargs(self, override):
"""
@@ -319,39 +319,39 @@ class PoolManager(RequestMethods):
base_pool_kwargs[key] = value
return base_pool_kwargs
- def _proxy_requires_url_absolute_form(self, parsed_url):
- """
- Indicates if the proxy requires the complete destination URL in the
- request. Normally this is only needed when not using an HTTP CONNECT
- tunnel.
- """
- if self.proxy is None:
- return False
-
- return not connection_requires_http_tunnel(
- self.proxy, self.proxy_config, parsed_url.scheme
- )
-
- def _validate_proxy_scheme_url_selection(self, url_scheme):
- """
- Validates that were not attempting to do TLS in TLS connections on
- Python2 or with unsupported SSL implementations.
- """
- if self.proxy is None or url_scheme != "https":
- return
-
- if self.proxy.scheme != "https":
- return
-
- if six.PY2 and not self.proxy_config.use_forwarding_for_https:
- raise ProxySchemeUnsupported(
- "Contacting HTTPS destinations through HTTPS proxies "
- "'via CONNECT tunnels' is not supported in Python 2"
- )
-
+ def _proxy_requires_url_absolute_form(self, parsed_url):
+ """
+ Indicates if the proxy requires the complete destination URL in the
+ request. Normally this is only needed when not using an HTTP CONNECT
+ tunnel.
+ """
+ if self.proxy is None:
+ return False
+
+ return not connection_requires_http_tunnel(
+ self.proxy, self.proxy_config, parsed_url.scheme
+ )
+
+ def _validate_proxy_scheme_url_selection(self, url_scheme):
+ """
+ Validates that were not attempting to do TLS in TLS connections on
+ Python2 or with unsupported SSL implementations.
+ """
+ if self.proxy is None or url_scheme != "https":
+ return
+
+ if self.proxy.scheme != "https":
+ return
+
+ if six.PY2 and not self.proxy_config.use_forwarding_for_https:
+ raise ProxySchemeUnsupported(
+ "Contacting HTTPS destinations through HTTPS proxies "
+ "'via CONNECT tunnels' is not supported in Python 2"
+ )
+
def urlopen(self, method, url, redirect=True, **kw):
"""
- Same as :meth:`urllib3.HTTPConnectionPool.urlopen`
+ Same as :meth:`urllib3.HTTPConnectionPool.urlopen`
with custom cross-host redirect logic and only sends the request-uri
portion of the ``url``.
@@ -359,17 +359,17 @@ class PoolManager(RequestMethods):
:class:`urllib3.connectionpool.ConnectionPool` can be chosen for it.
"""
u = parse_url(url)
- self._validate_proxy_scheme_url_selection(u.scheme)
-
+ self._validate_proxy_scheme_url_selection(u.scheme)
+
conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme)
- kw["assert_same_host"] = False
- kw["redirect"] = False
+ kw["assert_same_host"] = False
+ kw["redirect"] = False
+
+ if "headers" not in kw:
+ kw["headers"] = self.headers.copy()
- if "headers" not in kw:
- kw["headers"] = self.headers.copy()
-
- if self._proxy_requires_url_absolute_form(u):
+ if self._proxy_requires_url_absolute_form(u):
response = conn.urlopen(method, url, **kw)
else:
response = conn.urlopen(method, u.request_uri, **kw)
@@ -383,37 +383,37 @@ class PoolManager(RequestMethods):
# RFC 7231, Section 6.4.4
if response.status == 303:
- method = "GET"
+ method = "GET"
- retries = kw.get("retries")
+ retries = kw.get("retries")
if not isinstance(retries, Retry):
retries = Retry.from_int(retries, redirect=redirect)
- # Strip headers marked as unsafe to forward to the redirected location.
- # Check remove_headers_on_redirect to avoid a potential network call within
- # conn.is_same_host() which may use socket.gethostbyname() in the future.
- if retries.remove_headers_on_redirect and not conn.is_same_host(
- redirect_location
- ):
- headers = list(six.iterkeys(kw["headers"]))
- for header in headers:
- if header.lower() in retries.remove_headers_on_redirect:
- kw["headers"].pop(header, None)
-
+ # Strip headers marked as unsafe to forward to the redirected location.
+ # Check remove_headers_on_redirect to avoid a potential network call within
+ # conn.is_same_host() which may use socket.gethostbyname() in the future.
+ if retries.remove_headers_on_redirect and not conn.is_same_host(
+ redirect_location
+ ):
+ headers = list(six.iterkeys(kw["headers"]))
+ for header in headers:
+ if header.lower() in retries.remove_headers_on_redirect:
+ kw["headers"].pop(header, None)
+
try:
retries = retries.increment(method, url, response=response, _pool=conn)
except MaxRetryError:
if retries.raise_on_redirect:
- response.drain_conn()
+ response.drain_conn()
raise
return response
- kw["retries"] = retries
- kw["redirect"] = redirect
+ kw["retries"] = retries
+ kw["redirect"] = redirect
log.info("Redirecting %s -> %s", url, redirect_location)
-
- response.drain_conn()
+
+ response.drain_conn()
return self.urlopen(method, redirect_location, **kw)
@@ -426,24 +426,24 @@ class ProxyManager(PoolManager):
The URL of the proxy to be used.
:param proxy_headers:
- A dictionary containing headers that will be sent to the proxy. In case
+ A dictionary containing headers that will be sent to the proxy. In case
of HTTP they are being sent with each request, while in the
HTTPS/CONNECT case they are sent only once. Could be used for proxy
authentication.
- :param proxy_ssl_context:
- The proxy SSL context is used to establish the TLS connection to the
- proxy when using HTTPS proxies.
-
- :param use_forwarding_for_https:
- (Defaults to False) If set to True will forward requests to the HTTPS
- proxy to be made on behalf of the client instead of creating a TLS
- tunnel via the CONNECT method. **Enabling this flag means that request
- and response headers and content will be visible from the HTTPS proxy**
- whereas tunneling keeps request and response headers and content
- private. IP address, target hostname, SNI, and port are always visible
- to an HTTPS proxy even when this flag is disabled.
-
+ :param proxy_ssl_context:
+ The proxy SSL context is used to establish the TLS connection to the
+ proxy when using HTTPS proxies.
+
+ :param use_forwarding_for_https:
+ (Defaults to False) If set to True will forward requests to the HTTPS
+ proxy to be made on behalf of the client instead of creating a TLS
+ tunnel via the CONNECT method. **Enabling this flag means that request
+ and response headers and content will be visible from the HTTPS proxy**
+ whereas tunneling keeps request and response headers and content
+ private. IP address, target hostname, SNI, and port are always visible
+ to an HTTPS proxy even when this flag is disabled.
+
Example:
>>> proxy = urllib3.ProxyManager('http://localhost:3128/')
>>> r1 = proxy.request('GET', 'http://google.com/')
@@ -457,63 +457,63 @@ class ProxyManager(PoolManager):
"""
- def __init__(
- self,
- proxy_url,
- num_pools=10,
- headers=None,
- proxy_headers=None,
- proxy_ssl_context=None,
- use_forwarding_for_https=False,
- **connection_pool_kw
- ):
+ def __init__(
+ self,
+ proxy_url,
+ num_pools=10,
+ headers=None,
+ proxy_headers=None,
+ proxy_ssl_context=None,
+ use_forwarding_for_https=False,
+ **connection_pool_kw
+ ):
if isinstance(proxy_url, HTTPConnectionPool):
- proxy_url = "%s://%s:%i" % (
- proxy_url.scheme,
- proxy_url.host,
- proxy_url.port,
- )
+ proxy_url = "%s://%s:%i" % (
+ proxy_url.scheme,
+ proxy_url.host,
+ proxy_url.port,
+ )
proxy = parse_url(proxy_url)
-
- if proxy.scheme not in ("http", "https"):
- raise ProxySchemeUnknown(proxy.scheme)
-
- if not proxy.port:
- port = port_by_scheme.get(proxy.scheme, 80)
- proxy = proxy._replace(port=port)
-
+
+ if proxy.scheme not in ("http", "https"):
+ raise ProxySchemeUnknown(proxy.scheme)
+
+ if not proxy.port:
+ port = port_by_scheme.get(proxy.scheme, 80)
+ proxy = proxy._replace(port=port)
+
self.proxy = proxy
self.proxy_headers = proxy_headers or {}
- self.proxy_ssl_context = proxy_ssl_context
- self.proxy_config = ProxyConfig(proxy_ssl_context, use_forwarding_for_https)
+ self.proxy_ssl_context = proxy_ssl_context
+ self.proxy_config = ProxyConfig(proxy_ssl_context, use_forwarding_for_https)
- connection_pool_kw["_proxy"] = self.proxy
- connection_pool_kw["_proxy_headers"] = self.proxy_headers
- connection_pool_kw["_proxy_config"] = self.proxy_config
+ connection_pool_kw["_proxy"] = self.proxy
+ connection_pool_kw["_proxy_headers"] = self.proxy_headers
+ connection_pool_kw["_proxy_config"] = self.proxy_config
- super(ProxyManager, self).__init__(num_pools, headers, **connection_pool_kw)
+ super(ProxyManager, self).__init__(num_pools, headers, **connection_pool_kw)
- def connection_from_host(self, host, port=None, scheme="http", pool_kwargs=None):
+ def connection_from_host(self, host, port=None, scheme="http", pool_kwargs=None):
if scheme == "https":
return super(ProxyManager, self).connection_from_host(
- host, port, scheme, pool_kwargs=pool_kwargs
- )
+ host, port, scheme, pool_kwargs=pool_kwargs
+ )
return super(ProxyManager, self).connection_from_host(
- self.proxy.host, self.proxy.port, self.proxy.scheme, pool_kwargs=pool_kwargs
- )
+ self.proxy.host, self.proxy.port, self.proxy.scheme, pool_kwargs=pool_kwargs
+ )
def _set_proxy_headers(self, url, headers=None):
"""
Sets headers needed by proxies: specifically, the Accept and Host
headers. Only sets headers not provided by the user.
"""
- headers_ = {"Accept": "*/*"}
+ headers_ = {"Accept": "*/*"}
netloc = parse_url(url).netloc
if netloc:
- headers_["Host"] = netloc
+ headers_["Host"] = netloc
if headers:
headers_.update(headers)
@@ -522,12 +522,12 @@ class ProxyManager(PoolManager):
def urlopen(self, method, url, redirect=True, **kw):
"Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute."
u = parse_url(url)
- if not connection_requires_http_tunnel(self.proxy, self.proxy_config, u.scheme):
- # For connections using HTTP CONNECT, httplib sets the necessary
- # headers on the CONNECT to the proxy. If we're not using CONNECT,
- # we'll definitely need to set 'Host' at the very least.
- headers = kw.get("headers", self.headers)
- kw["headers"] = self._set_proxy_headers(url, headers)
+ if not connection_requires_http_tunnel(self.proxy, self.proxy_config, u.scheme):
+ # For connections using HTTP CONNECT, httplib sets the necessary
+ # headers on the CONNECT to the proxy. If we're not using CONNECT,
+ # we'll definitely need to set 'Host' at the very least.
+ headers = kw.get("headers", self.headers)
+ kw["headers"] = self._set_proxy_headers(url, headers)
return super(ProxyManager, self).urlopen(method, url, redirect=redirect, **kw)
diff --git a/contrib/python/urllib3/urllib3/request.py b/contrib/python/urllib3/urllib3/request.py
index 398386a5b9..7b81397000 100644
--- a/contrib/python/urllib3/urllib3/request.py
+++ b/contrib/python/urllib3/urllib3/request.py
@@ -3,14 +3,14 @@ from __future__ import absolute_import
from .filepost import encode_multipart_formdata
from .packages.six.moves.urllib.parse import urlencode
-__all__ = ["RequestMethods"]
+__all__ = ["RequestMethods"]
class RequestMethods(object):
"""
Convenience mixin for classes who implement a :meth:`urlopen` method, such
- as :class:`urllib3.HTTPConnectionPool` and
- :class:`urllib3.PoolManager`.
+ as :class:`urllib3.HTTPConnectionPool` and
+ :class:`urllib3.PoolManager`.
Provides behavior for making common types of HTTP request methods and
decides which type of request field encoding to use.
@@ -35,25 +35,25 @@ class RequestMethods(object):
explicitly.
"""
- _encode_url_methods = {"DELETE", "GET", "HEAD", "OPTIONS"}
+ _encode_url_methods = {"DELETE", "GET", "HEAD", "OPTIONS"}
def __init__(self, headers=None):
self.headers = headers or {}
- def urlopen(
- self,
- method,
- url,
- body=None,
- headers=None,
- encode_multipart=True,
- multipart_boundary=None,
- **kw
- ): # Abstract
- raise NotImplementedError(
- "Classes extending RequestMethods must implement "
- "their own ``urlopen`` method."
- )
+ def urlopen(
+ self,
+ method,
+ url,
+ body=None,
+ headers=None,
+ encode_multipart=True,
+ multipart_boundary=None,
+ **kw
+ ): # Abstract
+ raise NotImplementedError(
+ "Classes extending RequestMethods must implement "
+ "their own ``urlopen`` method."
+ )
def request(self, method, url, fields=None, headers=None, **urlopen_kw):
"""
@@ -68,18 +68,18 @@ class RequestMethods(object):
"""
method = method.upper()
- urlopen_kw["request_url"] = url
-
+ urlopen_kw["request_url"] = url
+
if method in self._encode_url_methods:
- return self.request_encode_url(
- method, url, fields=fields, headers=headers, **urlopen_kw
- )
+ return self.request_encode_url(
+ method, url, fields=fields, headers=headers, **urlopen_kw
+ )
else:
- return self.request_encode_body(
- method, url, fields=fields, headers=headers, **urlopen_kw
- )
+ return self.request_encode_body(
+ method, url, fields=fields, headers=headers, **urlopen_kw
+ )
- def request_encode_url(self, method, url, fields=None, headers=None, **urlopen_kw):
+ def request_encode_url(self, method, url, fields=None, headers=None, **urlopen_kw):
"""
Make a request using :meth:`urlopen` with the ``fields`` encoded in
the url. This is useful for request methods like GET, HEAD, DELETE, etc.
@@ -87,32 +87,32 @@ class RequestMethods(object):
if headers is None:
headers = self.headers
- extra_kw = {"headers": headers}
+ extra_kw = {"headers": headers}
extra_kw.update(urlopen_kw)
if fields:
- url += "?" + urlencode(fields)
+ url += "?" + urlencode(fields)
return self.urlopen(method, url, **extra_kw)
- def request_encode_body(
- self,
- method,
- url,
- fields=None,
- headers=None,
- encode_multipart=True,
- multipart_boundary=None,
- **urlopen_kw
- ):
+ def request_encode_body(
+ self,
+ method,
+ url,
+ fields=None,
+ headers=None,
+ encode_multipart=True,
+ multipart_boundary=None,
+ **urlopen_kw
+ ):
"""
Make a request using :meth:`urlopen` with the ``fields`` encoded in
the body. This is useful for request methods like POST, PUT, PATCH, etc.
When ``encode_multipart=True`` (default), then
- :func:`urllib3.encode_multipart_formdata` is used to encode
+ :func:`urllib3.encode_multipart_formdata` is used to encode
the payload with the appropriate content type. Otherwise
- :func:`urllib.parse.urlencode` is used with the
+ :func:`urllib.parse.urlencode` is used with the
'application/x-www-form-urlencoded' content type.
Multipart encoding must be used when posting files, and it's reasonably
@@ -133,7 +133,7 @@ class RequestMethods(object):
}
When uploading a file, providing a filename (the first parameter of the
- tuple) is optional but recommended to best mimic behavior of browsers.
+ tuple) is optional but recommended to best mimic behavior of browsers.
Note that if ``headers`` are supplied, the 'Content-Type' header will
be overwritten because it depends on the dynamic random boundary string
@@ -143,28 +143,28 @@ class RequestMethods(object):
if headers is None:
headers = self.headers
- extra_kw = {"headers": {}}
+ extra_kw = {"headers": {}}
if fields:
- if "body" in urlopen_kw:
+ if "body" in urlopen_kw:
raise TypeError(
- "request got values for both 'fields' and 'body', can only specify one."
- )
+ "request got values for both 'fields' and 'body', can only specify one."
+ )
if encode_multipart:
- body, content_type = encode_multipart_formdata(
- fields, boundary=multipart_boundary
- )
+ body, content_type = encode_multipart_formdata(
+ fields, boundary=multipart_boundary
+ )
else:
- body, content_type = (
- urlencode(fields),
- "application/x-www-form-urlencoded",
- )
+ body, content_type = (
+ urlencode(fields),
+ "application/x-www-form-urlencoded",
+ )
- extra_kw["body"] = body
- extra_kw["headers"] = {"Content-Type": content_type}
+ extra_kw["body"] = body
+ extra_kw["headers"] = {"Content-Type": content_type}
- extra_kw["headers"].update(headers)
+ extra_kw["headers"].update(headers)
extra_kw.update(urlopen_kw)
return self.urlopen(method, url, **extra_kw)
diff --git a/contrib/python/urllib3/urllib3/response.py b/contrib/python/urllib3/urllib3/response.py
index f66f14e2de..ebfae47032 100644
--- a/contrib/python/urllib3/urllib3/response.py
+++ b/contrib/python/urllib3/urllib3/response.py
@@ -1,33 +1,33 @@
from __future__ import absolute_import
-
+
import io
import logging
-import zlib
-from contextlib import contextmanager
-from socket import error as SocketError
-from socket import timeout as SocketTimeout
-
-try:
- import brotli
- brotli.error
-except (ImportError, Exception):
- brotli = None
-
+import zlib
+from contextlib import contextmanager
+from socket import error as SocketError
+from socket import timeout as SocketTimeout
+
+try:
+ import brotli
+ brotli.error
+except (ImportError, Exception):
+ brotli = None
+
from ._collections import HTTPHeaderDict
-from .connection import BaseSSLError, HTTPException
+from .connection import BaseSSLError, HTTPException
from .exceptions import (
- BodyNotHttplibCompatible,
- DecodeError,
- HTTPError,
- IncompleteRead,
- InvalidChunkLength,
- InvalidHeader,
- ProtocolError,
- ReadTimeoutError,
- ResponseNotChunked,
- SSLError,
+ BodyNotHttplibCompatible,
+ DecodeError,
+ HTTPError,
+ IncompleteRead,
+ InvalidChunkLength,
+ InvalidHeader,
+ ProtocolError,
+ ReadTimeoutError,
+ ResponseNotChunked,
+ SSLError,
)
-from .packages import six
+from .packages import six
from .util.response import is_fp_closed, is_response_to_head
log = logging.getLogger(__name__)
@@ -36,7 +36,7 @@ log = logging.getLogger(__name__)
class DeflateDecoder(object):
def __init__(self):
self._first_try = True
- self._data = b""
+ self._data = b""
self._obj = zlib.decompressobj()
def __getattr__(self, name):
@@ -65,93 +65,93 @@ class DeflateDecoder(object):
self._data = None
-class GzipDecoderState(object):
-
- FIRST_MEMBER = 0
- OTHER_MEMBERS = 1
- SWALLOW_DATA = 2
-
-
+class GzipDecoderState(object):
+
+ FIRST_MEMBER = 0
+ OTHER_MEMBERS = 1
+ SWALLOW_DATA = 2
+
+
class GzipDecoder(object):
def __init__(self):
self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
- self._state = GzipDecoderState.FIRST_MEMBER
+ self._state = GzipDecoderState.FIRST_MEMBER
def __getattr__(self, name):
return getattr(self._obj, name)
def decompress(self, data):
- ret = bytearray()
- if self._state == GzipDecoderState.SWALLOW_DATA or not data:
- return bytes(ret)
- while True:
- try:
- ret += self._obj.decompress(data)
- except zlib.error:
- previous_state = self._state
- # Ignore data after the first error
- self._state = GzipDecoderState.SWALLOW_DATA
- if previous_state == GzipDecoderState.OTHER_MEMBERS:
- # Allow trailing garbage acceptable in other gzip clients
- return bytes(ret)
- raise
- data = self._obj.unused_data
- if not data:
- return bytes(ret)
- self._state = GzipDecoderState.OTHER_MEMBERS
- self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
-
-
-if brotli is not None:
-
- class BrotliDecoder(object):
- # Supports both 'brotlipy' and 'Brotli' packages
- # since they share an import name. The top branches
- # are for 'brotlipy' and bottom branches for 'Brotli'
- def __init__(self):
- self._obj = brotli.Decompressor()
- if hasattr(self._obj, "decompress"):
- self.decompress = self._obj.decompress
- else:
- self.decompress = self._obj.process
-
- def flush(self):
- if hasattr(self._obj, "flush"):
- return self._obj.flush()
- return b""
-
-
-class MultiDecoder(object):
- """
- From RFC7231:
- If one or more encodings have been applied to a representation, the
- sender that applied the encodings MUST generate a Content-Encoding
- header field that lists the content codings in the order in which
- they were applied.
- """
-
- def __init__(self, modes):
- self._decoders = [_get_decoder(m.strip()) for m in modes.split(",")]
-
- def flush(self):
- return self._decoders[0].flush()
-
- def decompress(self, data):
- for d in reversed(self._decoders):
- data = d.decompress(data)
- return data
-
-
+ ret = bytearray()
+ if self._state == GzipDecoderState.SWALLOW_DATA or not data:
+ return bytes(ret)
+ while True:
+ try:
+ ret += self._obj.decompress(data)
+ except zlib.error:
+ previous_state = self._state
+ # Ignore data after the first error
+ self._state = GzipDecoderState.SWALLOW_DATA
+ if previous_state == GzipDecoderState.OTHER_MEMBERS:
+ # Allow trailing garbage acceptable in other gzip clients
+ return bytes(ret)
+ raise
+ data = self._obj.unused_data
+ if not data:
+ return bytes(ret)
+ self._state = GzipDecoderState.OTHER_MEMBERS
+ self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
+
+
+if brotli is not None:
+
+ class BrotliDecoder(object):
+ # Supports both 'brotlipy' and 'Brotli' packages
+ # since they share an import name. The top branches
+ # are for 'brotlipy' and bottom branches for 'Brotli'
+ def __init__(self):
+ self._obj = brotli.Decompressor()
+ if hasattr(self._obj, "decompress"):
+ self.decompress = self._obj.decompress
+ else:
+ self.decompress = self._obj.process
+
+ def flush(self):
+ if hasattr(self._obj, "flush"):
+ return self._obj.flush()
+ return b""
+
+
+class MultiDecoder(object):
+ """
+ From RFC7231:
+ If one or more encodings have been applied to a representation, the
+ sender that applied the encodings MUST generate a Content-Encoding
+ header field that lists the content codings in the order in which
+ they were applied.
+ """
+
+ def __init__(self, modes):
+ self._decoders = [_get_decoder(m.strip()) for m in modes.split(",")]
+
+ def flush(self):
+ return self._decoders[0].flush()
+
+ def decompress(self, data):
+ for d in reversed(self._decoders):
+ data = d.decompress(data)
+ return data
+
+
def _get_decoder(mode):
- if "," in mode:
- return MultiDecoder(mode)
-
- if mode == "gzip":
+ if "," in mode:
+ return MultiDecoder(mode)
+
+ if mode == "gzip":
return GzipDecoder()
- if brotli is not None and mode == "br":
- return BrotliDecoder()
-
+ if brotli is not None and mode == "br":
+ return BrotliDecoder()
+
return DeflateDecoder()
@@ -159,23 +159,23 @@ class HTTPResponse(io.IOBase):
"""
HTTP Response container.
- Backwards-compatible with :class:`http.client.HTTPResponse` but the response ``body`` is
+ Backwards-compatible with :class:`http.client.HTTPResponse` but the response ``body`` is
loaded and decoded on-demand when the ``data`` property is accessed. This
class is also compatible with the Python standard library's :mod:`io`
module, and can hence be treated as a readable object in the context of that
framework.
- Extra parameters for behaviour not present in :class:`http.client.HTTPResponse`:
+ Extra parameters for behaviour not present in :class:`http.client.HTTPResponse`:
:param preload_content:
If True, the response's body will be preloaded during construction.
:param decode_content:
- If True, will attempt to decode the body based on the
- 'content-encoding' header.
+ If True, will attempt to decode the body based on the
+ 'content-encoding' header.
:param original_response:
- When this HTTPResponse wrapper is generated from an :class:`http.client.HTTPResponse`
+ When this HTTPResponse wrapper is generated from an :class:`http.client.HTTPResponse`
object, it's convenient to include the original for debug purposes. It's
otherwise unused.
@@ -188,31 +188,31 @@ class HTTPResponse(io.IOBase):
value of Content-Length header, if present. Otherwise, raise error.
"""
- CONTENT_DECODERS = ["gzip", "deflate"]
- if brotli is not None:
- CONTENT_DECODERS += ["br"]
+ CONTENT_DECODERS = ["gzip", "deflate"]
+ if brotli is not None:
+ CONTENT_DECODERS += ["br"]
REDIRECT_STATUSES = [301, 302, 303, 307, 308]
- def __init__(
- self,
- body="",
- headers=None,
- status=0,
- version=0,
- reason=None,
- strict=0,
- preload_content=True,
- decode_content=True,
- original_response=None,
- pool=None,
- connection=None,
- msg=None,
- retries=None,
- enforce_content_length=False,
- request_method=None,
- request_url=None,
- auto_close=True,
- ):
+ def __init__(
+ self,
+ body="",
+ headers=None,
+ status=0,
+ version=0,
+ reason=None,
+ strict=0,
+ preload_content=True,
+ decode_content=True,
+ original_response=None,
+ pool=None,
+ connection=None,
+ msg=None,
+ retries=None,
+ enforce_content_length=False,
+ request_method=None,
+ request_url=None,
+ auto_close=True,
+ ):
if isinstance(headers, HTTPHeaderDict):
self.headers = headers
@@ -225,29 +225,29 @@ class HTTPResponse(io.IOBase):
self.decode_content = decode_content
self.retries = retries
self.enforce_content_length = enforce_content_length
- self.auto_close = auto_close
+ self.auto_close = auto_close
self._decoder = None
self._body = None
self._fp = None
self._original_response = original_response
self._fp_bytes_read = 0
- self.msg = msg
- self._request_url = request_url
+ self.msg = msg
+ self._request_url = request_url
- if body and isinstance(body, (six.string_types, bytes)):
+ if body and isinstance(body, (six.string_types, bytes)):
self._body = body
self._pool = pool
self._connection = connection
- if hasattr(body, "read"):
+ if hasattr(body, "read"):
self._fp = body
# Are we using the chunked-style of transfer encoding?
self.chunked = False
self.chunk_left = None
- tr_enc = self.headers.get("transfer-encoding", "").lower()
+ tr_enc = self.headers.get("transfer-encoding", "").lower()
# Don't incur the penalty of creating a list and then discarding it
encodings = (enc.strip() for enc in tr_enc.split(","))
if "chunked" in encodings:
@@ -269,7 +269,7 @@ class HTTPResponse(io.IOBase):
location. ``False`` if not a redirect status code.
"""
if self.status in self.REDIRECT_STATUSES:
- return self.headers.get("location")
+ return self.headers.get("location")
return False
@@ -280,20 +280,20 @@ class HTTPResponse(io.IOBase):
self._pool._put_conn(self._connection)
self._connection = None
- def drain_conn(self):
- """
- Read and discard any remaining HTTP response data in the response connection.
-
- Unread data in the HTTPResponse connection blocks the connection from being released back to the pool.
- """
- try:
- self.read()
- except (HTTPError, SocketError, BaseSSLError, HTTPException):
- pass
-
+ def drain_conn(self):
+ """
+ Read and discard any remaining HTTP response data in the response connection.
+
+ Unread data in the HTTPResponse connection blocks the connection from being released back to the pool.
+ """
+ try:
+ self.read()
+ except (HTTPError, SocketError, BaseSSLError, HTTPException):
+ pass
+
@property
def data(self):
- # For backwards-compat with earlier urllib3 0.4 and earlier.
+ # For backwards-compat with earlier urllib3 0.4 and earlier.
if self._body:
return self._body
@@ -304,14 +304,14 @@ class HTTPResponse(io.IOBase):
def connection(self):
return self._connection
- def isclosed(self):
- return is_fp_closed(self._fp)
-
+ def isclosed(self):
+ return is_fp_closed(self._fp)
+
def tell(self):
"""
Obtain the number of bytes pulled over the wire so far. May differ from
- the amount of content returned by :meth:``urllib3.response.HTTPResponse.read``
- if bytes are encoded on the wire (e.g, compressed).
+ the amount of content returned by :meth:``urllib3.response.HTTPResponse.read``
+ if bytes are encoded on the wire (e.g, compressed).
"""
return self._fp_bytes_read
@@ -319,21 +319,21 @@ class HTTPResponse(io.IOBase):
"""
Set initial length value for Response content if available.
"""
- length = self.headers.get("content-length")
-
- if length is not None:
- if self.chunked:
- # This Response will fail with an IncompleteRead if it can't be
- # received as chunked. This method falls back to attempt reading
- # the response before raising an exception.
- log.warning(
- "Received response with both Content-Length and "
- "Transfer-Encoding set. This is expressly forbidden "
- "by RFC 7230 sec 3.3.2. Ignoring Content-Length and "
- "attempting to process response as Transfer-Encoding: "
- "chunked."
- )
- return None
+ length = self.headers.get("content-length")
+
+ if length is not None:
+ if self.chunked:
+ # This Response will fail with an IncompleteRead if it can't be
+ # received as chunked. This method falls back to attempt reading
+ # the response before raising an exception.
+ log.warning(
+ "Received response with both Content-Length and "
+ "Transfer-Encoding set. This is expressly forbidden "
+ "by RFC 7230 sec 3.3.2. Ignoring Content-Length and "
+ "attempting to process response as Transfer-Encoding: "
+ "chunked."
+ )
+ return None
try:
# RFC 7230 section 3.3.2 specifies multiple content lengths can
@@ -341,12 +341,12 @@ class HTTPResponse(io.IOBase):
# (e.g. Content-Length: 42, 42). This line ensures the values
# are all valid ints and that as long as the `set` length is 1,
# all values are the same. Otherwise, the header is invalid.
- lengths = set([int(val) for val in length.split(",")])
+ lengths = set([int(val) for val in length.split(",")])
if len(lengths) > 1:
- raise InvalidHeader(
- "Content-Length contained multiple "
- "unmatching values (%s)" % length
- )
+ raise InvalidHeader(
+ "Content-Length contained multiple "
+ "unmatching values (%s)" % length
+ )
length = lengths.pop()
except ValueError:
length = None
@@ -362,7 +362,7 @@ class HTTPResponse(io.IOBase):
status = 0
# Check for responses that shouldn't include a body
- if status in (204, 304) or 100 <= status < 200 or request_method == "HEAD":
+ if status in (204, 304) or 100 <= status < 200 or request_method == "HEAD":
length = 0
return length
@@ -373,41 +373,41 @@ class HTTPResponse(io.IOBase):
"""
# Note: content-encoding value should be case-insensitive, per RFC 7230
# Section 3.2
- content_encoding = self.headers.get("content-encoding", "").lower()
- if self._decoder is None:
- if content_encoding in self.CONTENT_DECODERS:
- self._decoder = _get_decoder(content_encoding)
- elif "," in content_encoding:
- encodings = [
- e.strip()
- for e in content_encoding.split(",")
- if e.strip() in self.CONTENT_DECODERS
- ]
- if len(encodings):
- self._decoder = _get_decoder(content_encoding)
-
- DECODER_ERROR_CLASSES = (IOError, zlib.error)
- if brotli is not None:
- DECODER_ERROR_CLASSES += (brotli.error,)
-
+ content_encoding = self.headers.get("content-encoding", "").lower()
+ if self._decoder is None:
+ if content_encoding in self.CONTENT_DECODERS:
+ self._decoder = _get_decoder(content_encoding)
+ elif "," in content_encoding:
+ encodings = [
+ e.strip()
+ for e in content_encoding.split(",")
+ if e.strip() in self.CONTENT_DECODERS
+ ]
+ if len(encodings):
+ self._decoder = _get_decoder(content_encoding)
+
+ DECODER_ERROR_CLASSES = (IOError, zlib.error)
+ if brotli is not None:
+ DECODER_ERROR_CLASSES += (brotli.error,)
+
def _decode(self, data, decode_content, flush_decoder):
"""
Decode the data passed in and potentially flush the decoder.
"""
- if not decode_content:
- return data
-
+ if not decode_content:
+ return data
+
try:
- if self._decoder:
+ if self._decoder:
data = self._decoder.decompress(data)
- except self.DECODER_ERROR_CLASSES as e:
- content_encoding = self.headers.get("content-encoding", "").lower()
+ except self.DECODER_ERROR_CLASSES as e:
+ content_encoding = self.headers.get("content-encoding", "").lower()
raise DecodeError(
"Received response with content-encoding: %s, but "
- "failed to decode it." % content_encoding,
- e,
- )
- if flush_decoder:
+ "failed to decode it." % content_encoding,
+ e,
+ )
+ if flush_decoder:
data += self._flush_decoder()
return data
@@ -418,10 +418,10 @@ class HTTPResponse(io.IOBase):
being used.
"""
if self._decoder:
- buf = self._decoder.decompress(b"")
+ buf = self._decoder.decompress(b"")
return buf + self._decoder.flush()
- return b""
+ return b""
@contextmanager
def _error_catcher(self):
@@ -441,19 +441,19 @@ class HTTPResponse(io.IOBase):
except SocketTimeout:
# FIXME: Ideally we'd like to include the url in the ReadTimeoutError but
# there is yet no clean way to get at it from this context.
- raise ReadTimeoutError(self._pool, None, "Read timed out.")
+ raise ReadTimeoutError(self._pool, None, "Read timed out.")
except BaseSSLError as e:
# FIXME: Is there a better way to differentiate between SSLErrors?
- if "read operation timed out" not in str(e):
- # SSL errors related to framing/MAC get wrapped and reraised here
- raise SSLError(e)
+ if "read operation timed out" not in str(e):
+ # SSL errors related to framing/MAC get wrapped and reraised here
+ raise SSLError(e)
- raise ReadTimeoutError(self._pool, None, "Read timed out.")
+ raise ReadTimeoutError(self._pool, None, "Read timed out.")
except (HTTPException, SocketError) as e:
# This includes IncompleteRead.
- raise ProtocolError("Connection broken: %r" % e, e)
+ raise ProtocolError("Connection broken: %r" % e, e)
# If no exception is thrown, we should avoid cleaning up
# unnecessarily.
@@ -481,7 +481,7 @@ class HTTPResponse(io.IOBase):
def read(self, amt=None, decode_content=None, cache_content=False):
"""
- Similar to :meth:`http.client.HTTPResponse.read`, but with two additional
+ Similar to :meth:`http.client.HTTPResponse.read`, but with two additional
parameters: ``decode_content`` and ``cache_content``.
:param amt:
@@ -508,19 +508,19 @@ class HTTPResponse(io.IOBase):
return
flush_decoder = False
- fp_closed = getattr(self._fp, "closed", False)
+ fp_closed = getattr(self._fp, "closed", False)
with self._error_catcher():
if amt is None:
# cStringIO doesn't like amt=None
- data = self._fp.read() if not fp_closed else b""
+ data = self._fp.read() if not fp_closed else b""
flush_decoder = True
else:
cache_content = False
- data = self._fp.read(amt) if not fp_closed else b""
- if (
- amt != 0 and not data
- ): # Platform-specific: Buggy versions of Python.
+ data = self._fp.read(amt) if not fp_closed else b""
+ if (
+ amt != 0 and not data
+ ): # Platform-specific: Buggy versions of Python.
# Close the connection when no data is returned
#
# This is redundant to what httplib/http.client _should_
@@ -530,10 +530,10 @@ class HTTPResponse(io.IOBase):
# no harm in redundantly calling close.
self._fp.close()
flush_decoder = True
- if self.enforce_content_length and self.length_remaining not in (
- 0,
- None,
- ):
+ if self.enforce_content_length and self.length_remaining not in (
+ 0,
+ None,
+ ):
# This is an edge case that httplib failed to cover due
# to concerns of backward compatibility. We're
# addressing it here to make sure IncompleteRead is
@@ -553,7 +553,7 @@ class HTTPResponse(io.IOBase):
return data
- def stream(self, amt=2 ** 16, decode_content=None):
+ def stream(self, amt=2 ** 16, decode_content=None):
"""
A generator wrapper for the read() method. A call will block until
``amt`` bytes have been read from the connection or until the
@@ -582,7 +582,7 @@ class HTTPResponse(io.IOBase):
@classmethod
def from_httplib(ResponseCls, r, **response_kw):
"""
- Given an :class:`http.client.HTTPResponse` instance ``r``, return a
+ Given an :class:`http.client.HTTPResponse` instance ``r``, return a
corresponding :class:`urllib3.response.HTTPResponse` object.
Remaining parameters are passed to the HTTPResponse constructor, along
@@ -591,27 +591,27 @@ class HTTPResponse(io.IOBase):
headers = r.msg
if not isinstance(headers, HTTPHeaderDict):
- if six.PY2:
- # Python 2.7
+ if six.PY2:
+ # Python 2.7
headers = HTTPHeaderDict.from_httplib(headers)
- else:
- headers = HTTPHeaderDict(headers.items())
+ else:
+ headers = HTTPHeaderDict(headers.items())
# HTTPResponse objects in Python 3 don't have a .strict attribute
- strict = getattr(r, "strict", 0)
- resp = ResponseCls(
- body=r,
- headers=headers,
- status=r.status,
- version=r.version,
- reason=r.reason,
- strict=strict,
- original_response=r,
- **response_kw
- )
+ strict = getattr(r, "strict", 0)
+ resp = ResponseCls(
+ body=r,
+ headers=headers,
+ status=r.status,
+ version=r.version,
+ reason=r.reason,
+ strict=strict,
+ original_response=r,
+ **response_kw
+ )
return resp
- # Backwards-compatibility methods for http.client.HTTPResponse
+ # Backwards-compatibility methods for http.client.HTTPResponse
def getheaders(self):
return self.headers
@@ -630,18 +630,18 @@ class HTTPResponse(io.IOBase):
if self._connection:
self._connection.close()
- if not self.auto_close:
- io.IOBase.close(self)
-
+ if not self.auto_close:
+ io.IOBase.close(self)
+
@property
def closed(self):
- if not self.auto_close:
- return io.IOBase.closed.__get__(self)
- elif self._fp is None:
+ if not self.auto_close:
+ return io.IOBase.closed.__get__(self)
+ elif self._fp is None:
return True
- elif hasattr(self._fp, "isclosed"):
+ elif hasattr(self._fp, "isclosed"):
return self._fp.isclosed()
- elif hasattr(self._fp, "closed"):
+ elif hasattr(self._fp, "closed"):
return self._fp.closed
else:
return True
@@ -652,17 +652,17 @@ class HTTPResponse(io.IOBase):
elif hasattr(self._fp, "fileno"):
return self._fp.fileno()
else:
- raise IOError(
- "The file-like object this HTTPResponse is wrapped "
- "around has no file descriptor"
- )
+ raise IOError(
+ "The file-like object this HTTPResponse is wrapped "
+ "around has no file descriptor"
+ )
def flush(self):
- if (
- self._fp is not None
- and hasattr(self._fp, "flush")
- and not getattr(self._fp, "closed", False)
- ):
+ if (
+ self._fp is not None
+ and hasattr(self._fp, "flush")
+ and not getattr(self._fp, "closed", False)
+ ):
return self._fp.flush()
def readable(self):
@@ -675,17 +675,17 @@ class HTTPResponse(io.IOBase):
if len(temp) == 0:
return 0
else:
- b[: len(temp)] = temp
+ b[: len(temp)] = temp
return len(temp)
def supports_chunked_reads(self):
"""
Checks if the underlying file-like object looks like a
- :class:`http.client.HTTPResponse` object. We do this by testing for
- the fp attribute. If it is present we assume it returns raw chunks as
+ :class:`http.client.HTTPResponse` object. We do this by testing for
+ the fp attribute. If it is present we assume it returns raw chunks as
processed by read_chunked().
"""
- return hasattr(self._fp, "fp")
+ return hasattr(self._fp, "fp")
def _update_chunk_length(self):
# First, we'll figure out length of a chunk and then
@@ -693,13 +693,13 @@ class HTTPResponse(io.IOBase):
if self.chunk_left is not None:
return
line = self._fp.fp.readline()
- line = line.split(b";", 1)[0]
+ line = line.split(b";", 1)[0]
try:
self.chunk_left = int(line, 16)
except ValueError:
# Invalid chunked protocol response, abort.
self.close()
- raise InvalidChunkLength(self, line)
+ raise InvalidChunkLength(self, line)
def _handle_chunk(self, amt):
returned_chunk = None
@@ -728,11 +728,11 @@ class HTTPResponse(io.IOBase):
Similar to :meth:`HTTPResponse.read`, but with an additional
parameter: ``decode_content``.
- :param amt:
- How much of the content to read. If specified, caching is skipped
- because it doesn't make sense to cache partial content as the full
- response.
-
+ :param amt:
+ How much of the content to read. If specified, caching is skipped
+ because it doesn't make sense to cache partial content as the full
+ response.
+
:param decode_content:
If True, will attempt to decode the body based on the
'content-encoding' header.
@@ -742,33 +742,33 @@ class HTTPResponse(io.IOBase):
if not self.chunked:
raise ResponseNotChunked(
"Response is not chunked. "
- "Header 'transfer-encoding: chunked' is missing."
- )
+ "Header 'transfer-encoding: chunked' is missing."
+ )
if not self.supports_chunked_reads():
raise BodyNotHttplibCompatible(
- "Body should be http.client.HTTPResponse like. "
- "It should have have an fp attribute which returns raw chunks."
- )
-
- with self._error_catcher():
- # Don't bother reading the body of a HEAD request.
- if self._original_response and is_response_to_head(self._original_response):
- self._original_response.close()
- return
-
- # If a response is already read and closed
- # then return immediately.
- if self._fp.fp is None:
- return
-
+ "Body should be http.client.HTTPResponse like. "
+ "It should have have an fp attribute which returns raw chunks."
+ )
+
+ with self._error_catcher():
+ # Don't bother reading the body of a HEAD request.
+ if self._original_response and is_response_to_head(self._original_response):
+ self._original_response.close()
+ return
+
+ # If a response is already read and closed
+ # then return immediately.
+ if self._fp.fp is None:
+ return
+
while True:
self._update_chunk_length()
if self.chunk_left == 0:
break
chunk = self._handle_chunk(amt)
- decoded = self._decode(
- chunk, decode_content=decode_content, flush_decoder=False
- )
+ decoded = self._decode(
+ chunk, decode_content=decode_content, flush_decoder=False
+ )
if decoded:
yield decoded
@@ -786,37 +786,37 @@ class HTTPResponse(io.IOBase):
if not line:
# Some sites may not end with '\r\n'.
break
- if line == b"\r\n":
+ if line == b"\r\n":
break
# We read everything; close the "file".
if self._original_response:
self._original_response.close()
-
- def geturl(self):
- """
- Returns the URL that was the source of this response.
- If the request that generated this response redirected, this method
- will return the final redirect location.
- """
- if self.retries is not None and len(self.retries.history):
- return self.retries.history[-1].redirect_location
- else:
- return self._request_url
-
- def __iter__(self):
- buffer = []
- for chunk in self.stream(decode_content=True):
- if b"\n" in chunk:
- chunk = chunk.split(b"\n")
- yield b"".join(buffer) + chunk[0] + b"\n"
- for x in chunk[1:-1]:
- yield x + b"\n"
- if chunk[-1]:
- buffer = [chunk[-1]]
- else:
- buffer = []
- else:
- buffer.append(chunk)
- if buffer:
- yield b"".join(buffer)
+
+ def geturl(self):
+ """
+ Returns the URL that was the source of this response.
+ If the request that generated this response redirected, this method
+ will return the final redirect location.
+ """
+ if self.retries is not None and len(self.retries.history):
+ return self.retries.history[-1].redirect_location
+ else:
+ return self._request_url
+
+ def __iter__(self):
+ buffer = []
+ for chunk in self.stream(decode_content=True):
+ if b"\n" in chunk:
+ chunk = chunk.split(b"\n")
+ yield b"".join(buffer) + chunk[0] + b"\n"
+ for x in chunk[1:-1]:
+ yield x + b"\n"
+ if chunk[-1]:
+ buffer = [chunk[-1]]
+ else:
+ buffer = []
+ else:
+ buffer.append(chunk)
+ if buffer:
+ yield b"".join(buffer)
diff --git a/contrib/python/urllib3/urllib3/util/__init__.py b/contrib/python/urllib3/urllib3/util/__init__.py
index 4547fc522b..3049541da9 100644
--- a/contrib/python/urllib3/urllib3/util/__init__.py
+++ b/contrib/python/urllib3/urllib3/util/__init__.py
@@ -1,49 +1,49 @@
from __future__ import absolute_import
-
+
# For backwards compatibility, provide imports that used to be here.
from .connection import is_connection_dropped
-from .request import SKIP_HEADER, SKIPPABLE_HEADERS, make_headers
+from .request import SKIP_HEADER, SKIPPABLE_HEADERS, make_headers
from .response import is_fp_closed
-from .retry import Retry
+from .retry import Retry
from .ssl_ import (
- ALPN_PROTOCOLS,
+ ALPN_PROTOCOLS,
HAS_SNI,
IS_PYOPENSSL,
IS_SECURETRANSPORT,
- PROTOCOL_TLS,
- SSLContext,
+ PROTOCOL_TLS,
+ SSLContext,
assert_fingerprint,
resolve_cert_reqs,
resolve_ssl_version,
ssl_wrap_socket,
)
-from .timeout import Timeout, current_time
-from .url import Url, get_host, parse_url, split_first
-from .wait import wait_for_read, wait_for_write
+from .timeout import Timeout, current_time
+from .url import Url, get_host, parse_url, split_first
+from .wait import wait_for_read, wait_for_write
__all__ = (
- "HAS_SNI",
- "IS_PYOPENSSL",
- "IS_SECURETRANSPORT",
- "SSLContext",
- "PROTOCOL_TLS",
- "ALPN_PROTOCOLS",
- "Retry",
- "Timeout",
- "Url",
- "assert_fingerprint",
- "current_time",
- "is_connection_dropped",
- "is_fp_closed",
- "get_host",
- "parse_url",
- "make_headers",
- "resolve_cert_reqs",
- "resolve_ssl_version",
- "split_first",
- "ssl_wrap_socket",
- "wait_for_read",
- "wait_for_write",
- "SKIP_HEADER",
- "SKIPPABLE_HEADERS",
+ "HAS_SNI",
+ "IS_PYOPENSSL",
+ "IS_SECURETRANSPORT",
+ "SSLContext",
+ "PROTOCOL_TLS",
+ "ALPN_PROTOCOLS",
+ "Retry",
+ "Timeout",
+ "Url",
+ "assert_fingerprint",
+ "current_time",
+ "is_connection_dropped",
+ "is_fp_closed",
+ "get_host",
+ "parse_url",
+ "make_headers",
+ "resolve_cert_reqs",
+ "resolve_ssl_version",
+ "split_first",
+ "ssl_wrap_socket",
+ "wait_for_read",
+ "wait_for_write",
+ "SKIP_HEADER",
+ "SKIPPABLE_HEADERS",
)
diff --git a/contrib/python/urllib3/urllib3/util/connection.py b/contrib/python/urllib3/urllib3/util/connection.py
index 6af1138f26..eae89b1739 100644
--- a/contrib/python/urllib3/urllib3/util/connection.py
+++ b/contrib/python/urllib3/urllib3/util/connection.py
@@ -1,11 +1,11 @@
from __future__ import absolute_import
-
+
import socket
-
-from ..contrib import _appengine_environ
-from ..exceptions import LocationParseError
-from ..packages import six
-from .wait import NoWayToWaitForSocketError, wait_for_read
+
+from ..contrib import _appengine_environ
+from ..exceptions import LocationParseError
+from ..packages import six
+from .wait import NoWayToWaitForSocketError, wait_for_read
def is_connection_dropped(conn): # Platform-specific
@@ -13,20 +13,20 @@ def is_connection_dropped(conn): # Platform-specific
Returns True if the connection is dropped and should be closed.
:param conn:
- :class:`http.client.HTTPConnection` object.
+ :class:`http.client.HTTPConnection` object.
Note: For platforms like AppEngine, this will always return ``False`` to
let the platform handle connection recycling transparently for us.
"""
- sock = getattr(conn, "sock", False)
+ sock = getattr(conn, "sock", False)
if sock is False: # Platform-specific: AppEngine
return False
if sock is None: # Connection already closed (such as by httplib).
return True
- try:
- # Returns True if readable, which here means it's been dropped
- return wait_for_read(sock, timeout=0.0)
- except NoWayToWaitForSocketError: # Platform-specific: AppEngine
+ try:
+ # Returns True if readable, which here means it's been dropped
+ return wait_for_read(sock, timeout=0.0)
+ except NoWayToWaitForSocketError: # Platform-specific: AppEngine
return False
@@ -34,27 +34,27 @@ def is_connection_dropped(conn): # Platform-specific
# library test suite. Added to its signature is only `socket_options`.
# One additional modification is that we avoid binding to IPv6 servers
# discovered in DNS if the system doesn't have IPv6 functionality.
-def create_connection(
- address,
- timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
- source_address=None,
- socket_options=None,
-):
+def create_connection(
+ address,
+ timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
+ source_address=None,
+ socket_options=None,
+):
"""Connect to *address* and return the socket object.
Convenience function. Connect to *address* (a 2-tuple ``(host,
port)``) and return the socket object. Passing the optional
*timeout* parameter will set the timeout on the socket instance
before attempting to connect. If no *timeout* is supplied, the
- global default timeout setting returned by :func:`socket.getdefaulttimeout`
+ global default timeout setting returned by :func:`socket.getdefaulttimeout`
is used. If *source_address* is set it must be a tuple of (host, port)
for the socket to bind as a source address before making the connection.
An host of '' or port 0 tells the OS to use the default.
"""
host, port = address
- if host.startswith("["):
- host = host.strip("[]")
+ if host.startswith("["):
+ host = host.strip("[]")
err = None
# Using the value from allowed_gai_family() in the context of getaddrinfo lets
@@ -62,13 +62,13 @@ def create_connection(
# The original create_connection function always returns all records.
family = allowed_gai_family()
- try:
- host.encode("idna")
- except UnicodeError:
- return six.raise_from(
- LocationParseError(u"'%s', label empty or too long" % host), None
- )
-
+ try:
+ host.encode("idna")
+ except UnicodeError:
+ return six.raise_from(
+ LocationParseError(u"'%s', label empty or too long" % host), None
+ )
+
for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM):
af, socktype, proto, canonname, sa = res
sock = None
@@ -117,22 +117,22 @@ def allowed_gai_family():
def _has_ipv6(host):
- """Returns True if the system can bind an IPv6 address."""
+ """Returns True if the system can bind an IPv6 address."""
sock = None
has_ipv6 = False
- # App Engine doesn't support IPV6 sockets and actually has a quota on the
- # number of sockets that can be used, so just early out here instead of
- # creating a socket needlessly.
- # See https://github.com/urllib3/urllib3/issues/1446
- if _appengine_environ.is_appengine_sandbox():
- return False
-
+ # App Engine doesn't support IPV6 sockets and actually has a quota on the
+ # number of sockets that can be used, so just early out here instead of
+ # creating a socket needlessly.
+ # See https://github.com/urllib3/urllib3/issues/1446
+ if _appengine_environ.is_appengine_sandbox():
+ return False
+
if socket.has_ipv6:
# has_ipv6 returns true if cPython was compiled with IPv6 support.
# It does not tell us if the system has IPv6 support enabled. To
# determine that we must bind to an IPv6 address.
- # https://github.com/urllib3/urllib3/pull/611
+ # https://github.com/urllib3/urllib3/pull/611
# https://bugs.python.org/issue658327
try:
sock = socket.socket(socket.AF_INET6)
@@ -146,4 +146,4 @@ def _has_ipv6(host):
return has_ipv6
-HAS_IPV6 = _has_ipv6("::1")
+HAS_IPV6 = _has_ipv6("::1")
diff --git a/contrib/python/urllib3/urllib3/util/proxy.py b/contrib/python/urllib3/urllib3/util/proxy.py
index 2199cc7b7f..4bf3dbe28a 100644
--- a/contrib/python/urllib3/urllib3/util/proxy.py
+++ b/contrib/python/urllib3/urllib3/util/proxy.py
@@ -1,57 +1,57 @@
-from .ssl_ import create_urllib3_context, resolve_cert_reqs, resolve_ssl_version
-
-
-def connection_requires_http_tunnel(
- proxy_url=None, proxy_config=None, destination_scheme=None
-):
- """
- Returns True if the connection requires an HTTP CONNECT through the proxy.
-
- :param URL proxy_url:
- URL of the proxy.
- :param ProxyConfig proxy_config:
- Proxy configuration from poolmanager.py
- :param str destination_scheme:
- The scheme of the destination. (i.e https, http, etc)
- """
- # If we're not using a proxy, no way to use a tunnel.
- if proxy_url is None:
- return False
-
- # HTTP destinations never require tunneling, we always forward.
- if destination_scheme == "http":
- return False
-
- # Support for forwarding with HTTPS proxies and HTTPS destinations.
- if (
- proxy_url.scheme == "https"
- and proxy_config
- and proxy_config.use_forwarding_for_https
- ):
- return False
-
- # Otherwise always use a tunnel.
- return True
-
-
-def create_proxy_ssl_context(
- ssl_version, cert_reqs, ca_certs=None, ca_cert_dir=None, ca_cert_data=None
-):
- """
- Generates a default proxy ssl context if one hasn't been provided by the
- user.
- """
- ssl_context = create_urllib3_context(
- ssl_version=resolve_ssl_version(ssl_version),
- cert_reqs=resolve_cert_reqs(cert_reqs),
- )
-
- if (
- not ca_certs
- and not ca_cert_dir
- and not ca_cert_data
- and hasattr(ssl_context, "load_default_certs")
- ):
- ssl_context.load_default_certs()
-
- return ssl_context
+from .ssl_ import create_urllib3_context, resolve_cert_reqs, resolve_ssl_version
+
+
+def connection_requires_http_tunnel(
+ proxy_url=None, proxy_config=None, destination_scheme=None
+):
+ """
+ Returns True if the connection requires an HTTP CONNECT through the proxy.
+
+ :param URL proxy_url:
+ URL of the proxy.
+ :param ProxyConfig proxy_config:
+ Proxy configuration from poolmanager.py
+ :param str destination_scheme:
+ The scheme of the destination. (i.e https, http, etc)
+ """
+ # If we're not using a proxy, no way to use a tunnel.
+ if proxy_url is None:
+ return False
+
+ # HTTP destinations never require tunneling, we always forward.
+ if destination_scheme == "http":
+ return False
+
+ # Support for forwarding with HTTPS proxies and HTTPS destinations.
+ if (
+ proxy_url.scheme == "https"
+ and proxy_config
+ and proxy_config.use_forwarding_for_https
+ ):
+ return False
+
+ # Otherwise always use a tunnel.
+ return True
+
+
+def create_proxy_ssl_context(
+ ssl_version, cert_reqs, ca_certs=None, ca_cert_dir=None, ca_cert_data=None
+):
+ """
+ Generates a default proxy ssl context if one hasn't been provided by the
+ user.
+ """
+ ssl_context = create_urllib3_context(
+ ssl_version=resolve_ssl_version(ssl_version),
+ cert_reqs=resolve_cert_reqs(cert_reqs),
+ )
+
+ if (
+ not ca_certs
+ and not ca_cert_dir
+ and not ca_cert_data
+ and hasattr(ssl_context, "load_default_certs")
+ ):
+ ssl_context.load_default_certs()
+
+ return ssl_context
diff --git a/contrib/python/urllib3/urllib3/util/queue.py b/contrib/python/urllib3/urllib3/util/queue.py
index 41784104ee..a76b6920ad 100644
--- a/contrib/python/urllib3/urllib3/util/queue.py
+++ b/contrib/python/urllib3/urllib3/util/queue.py
@@ -1,22 +1,22 @@
-import collections
-
-from ..packages import six
-from ..packages.six.moves import queue
-
-if six.PY2:
- # Queue is imported for side effects on MS Windows. See issue #229.
- import Queue as _unused_module_Queue # noqa: F401
-
-
-class LifoQueue(queue.Queue):
- def _init(self, _):
- self.queue = collections.deque()
-
- def _qsize(self, len=len):
- return len(self.queue)
-
- def _put(self, item):
- self.queue.append(item)
-
- def _get(self):
- return self.queue.pop()
+import collections
+
+from ..packages import six
+from ..packages.six.moves import queue
+
+if six.PY2:
+ # Queue is imported for side effects on MS Windows. See issue #229.
+ import Queue as _unused_module_Queue # noqa: F401
+
+
+class LifoQueue(queue.Queue):
+ def _init(self, _):
+ self.queue = collections.deque()
+
+ def _qsize(self, len=len):
+ return len(self.queue)
+
+ def _put(self, item):
+ self.queue.append(item)
+
+ def _get(self):
+ return self.queue.pop()
diff --git a/contrib/python/urllib3/urllib3/util/request.py b/contrib/python/urllib3/urllib3/util/request.py
index 25103383ec..815ffb7d7a 100644
--- a/contrib/python/urllib3/urllib3/util/request.py
+++ b/contrib/python/urllib3/urllib3/util/request.py
@@ -1,36 +1,36 @@
from __future__ import absolute_import
-
+
from base64 import b64encode
-from ..exceptions import UnrewindableBodyError
-from ..packages.six import b, integer_types
-
-# Pass as a value within ``headers`` to skip
-# emitting some HTTP headers that are added automatically.
-# The only headers that are supported are ``Accept-Encoding``,
-# ``Host``, and ``User-Agent``.
-SKIP_HEADER = "@@@SKIP_HEADER@@@"
-SKIPPABLE_HEADERS = frozenset(["accept-encoding", "host", "user-agent"])
-
-ACCEPT_ENCODING = "gzip,deflate"
-try:
- import brotli as _unused_module_brotli # noqa: F401
-except ImportError:
- pass
-else:
- ACCEPT_ENCODING += ",br"
-
+from ..exceptions import UnrewindableBodyError
+from ..packages.six import b, integer_types
+
+# Pass as a value within ``headers`` to skip
+# emitting some HTTP headers that are added automatically.
+# The only headers that are supported are ``Accept-Encoding``,
+# ``Host``, and ``User-Agent``.
+SKIP_HEADER = "@@@SKIP_HEADER@@@"
+SKIPPABLE_HEADERS = frozenset(["accept-encoding", "host", "user-agent"])
+
+ACCEPT_ENCODING = "gzip,deflate"
+try:
+ import brotli as _unused_module_brotli # noqa: F401
+except ImportError:
+ pass
+else:
+ ACCEPT_ENCODING += ",br"
+
_FAILEDTELL = object()
-def make_headers(
- keep_alive=None,
- accept_encoding=None,
- user_agent=None,
- basic_auth=None,
- proxy_basic_auth=None,
- disable_cache=None,
-):
+def make_headers(
+ keep_alive=None,
+ accept_encoding=None,
+ user_agent=None,
+ basic_auth=None,
+ proxy_basic_auth=None,
+ disable_cache=None,
+):
"""
Shortcuts for generating request headers.
@@ -70,27 +70,27 @@ def make_headers(
if isinstance(accept_encoding, str):
pass
elif isinstance(accept_encoding, list):
- accept_encoding = ",".join(accept_encoding)
+ accept_encoding = ",".join(accept_encoding)
else:
accept_encoding = ACCEPT_ENCODING
- headers["accept-encoding"] = accept_encoding
+ headers["accept-encoding"] = accept_encoding
if user_agent:
- headers["user-agent"] = user_agent
+ headers["user-agent"] = user_agent
if keep_alive:
- headers["connection"] = "keep-alive"
+ headers["connection"] = "keep-alive"
if basic_auth:
- headers["authorization"] = "Basic " + b64encode(b(basic_auth)).decode("utf-8")
+ headers["authorization"] = "Basic " + b64encode(b(basic_auth)).decode("utf-8")
if proxy_basic_auth:
- headers["proxy-authorization"] = "Basic " + b64encode(
- b(proxy_basic_auth)
- ).decode("utf-8")
+ headers["proxy-authorization"] = "Basic " + b64encode(
+ b(proxy_basic_auth)
+ ).decode("utf-8")
if disable_cache:
- headers["cache-control"] = "no-cache"
+ headers["cache-control"] = "no-cache"
return headers
@@ -102,7 +102,7 @@ def set_file_position(body, pos):
"""
if pos is not None:
rewind_body(body, pos)
- elif getattr(body, "tell", None) is not None:
+ elif getattr(body, "tell", None) is not None:
try:
pos = body.tell()
except (IOError, OSError):
@@ -124,20 +124,20 @@ def rewind_body(body, body_pos):
:param int pos:
Position to seek to in file.
"""
- body_seek = getattr(body, "seek", None)
+ body_seek = getattr(body, "seek", None)
if body_seek is not None and isinstance(body_pos, integer_types):
try:
body_seek(body_pos)
except (IOError, OSError):
- raise UnrewindableBodyError(
- "An error occurred when rewinding request body for redirect/retry."
- )
+ raise UnrewindableBodyError(
+ "An error occurred when rewinding request body for redirect/retry."
+ )
elif body_pos is _FAILEDTELL:
- raise UnrewindableBodyError(
- "Unable to record file position for rewinding "
- "request body during a redirect/retry."
- )
+ raise UnrewindableBodyError(
+ "Unable to record file position for rewinding "
+ "request body during a redirect/retry."
+ )
else:
- raise ValueError(
- "body_pos must be of type integer, instead it was %s." % type(body_pos)
- )
+ raise ValueError(
+ "body_pos must be of type integer, instead it was %s." % type(body_pos)
+ )
diff --git a/contrib/python/urllib3/urllib3/util/response.py b/contrib/python/urllib3/urllib3/util/response.py
index 5ea609cced..1f4592f317 100644
--- a/contrib/python/urllib3/urllib3/util/response.py
+++ b/contrib/python/urllib3/urllib3/util/response.py
@@ -1,9 +1,9 @@
from __future__ import absolute_import
-from email.errors import MultipartInvariantViolationDefect, StartBoundaryNotFoundDefect
-
+from email.errors import MultipartInvariantViolationDefect, StartBoundaryNotFoundDefect
+
from ..exceptions import HeaderParsingError
-from ..packages.six.moves import http_client as httplib
+from ..packages.six.moves import http_client as httplib
def is_fp_closed(obj):
@@ -44,7 +44,7 @@ def assert_header_parsing(headers):
Only works on Python 3.
- :param http.client.HTTPMessage headers: Headers to verify.
+ :param http.client.HTTPMessage headers: Headers to verify.
:raises urllib3.exceptions.HeaderParsingError:
If parsing errors are found.
@@ -53,40 +53,40 @@ def assert_header_parsing(headers):
# This will fail silently if we pass in the wrong kind of parameter.
# To make debugging easier add an explicit check.
if not isinstance(headers, httplib.HTTPMessage):
- raise TypeError("expected httplib.Message, got {0}.".format(type(headers)))
+ raise TypeError("expected httplib.Message, got {0}.".format(type(headers)))
- defects = getattr(headers, "defects", None)
- get_payload = getattr(headers, "get_payload", None)
+ defects = getattr(headers, "defects", None)
+ get_payload = getattr(headers, "get_payload", None)
unparsed_data = None
- if get_payload:
- # get_payload is actually email.message.Message.get_payload;
- # we're only interested in the result if it's not a multipart message
- if not headers.is_multipart():
- payload = get_payload()
-
- if isinstance(payload, (bytes, str)):
- unparsed_data = payload
- if defects:
- # httplib is assuming a response body is available
- # when parsing headers even when httplib only sends
- # header data to parse_headers() This results in
- # defects on multipart responses in particular.
- # See: https://github.com/urllib3/urllib3/issues/800
-
- # So we ignore the following defects:
- # - StartBoundaryNotFoundDefect:
- # The claimed start boundary was never found.
- # - MultipartInvariantViolationDefect:
- # A message claimed to be a multipart but no subparts were found.
- defects = [
- defect
- for defect in defects
- if not isinstance(
- defect, (StartBoundaryNotFoundDefect, MultipartInvariantViolationDefect)
- )
- ]
-
+ if get_payload:
+ # get_payload is actually email.message.Message.get_payload;
+ # we're only interested in the result if it's not a multipart message
+ if not headers.is_multipart():
+ payload = get_payload()
+
+ if isinstance(payload, (bytes, str)):
+ unparsed_data = payload
+ if defects:
+ # httplib is assuming a response body is available
+ # when parsing headers even when httplib only sends
+ # header data to parse_headers() This results in
+ # defects on multipart responses in particular.
+ # See: https://github.com/urllib3/urllib3/issues/800
+
+ # So we ignore the following defects:
+ # - StartBoundaryNotFoundDefect:
+ # The claimed start boundary was never found.
+ # - MultipartInvariantViolationDefect:
+ # A message claimed to be a multipart but no subparts were found.
+ defects = [
+ defect
+ for defect in defects
+ if not isinstance(
+ defect, (StartBoundaryNotFoundDefect, MultipartInvariantViolationDefect)
+ )
+ ]
+
if defects or unparsed_data:
raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data)
@@ -96,12 +96,12 @@ def is_response_to_head(response):
Checks whether the request of a response has been a HEAD-request.
Handles the quirks of AppEngine.
- :param http.client.HTTPResponse response:
- Response to check if the originating request
- used 'HEAD' as a method.
+ :param http.client.HTTPResponse response:
+ Response to check if the originating request
+ used 'HEAD' as a method.
"""
# FIXME: Can we do this somehow without accessing private httplib _method?
method = response._method
if isinstance(method, int): # Platform-specific: Appengine
return method == 3
- return method.upper() == "HEAD"
+ return method.upper() == "HEAD"
diff --git a/contrib/python/urllib3/urllib3/util/retry.py b/contrib/python/urllib3/urllib3/util/retry.py
index 3398323fd7..eb3790bf8e 100644
--- a/contrib/python/urllib3/urllib3/util/retry.py
+++ b/contrib/python/urllib3/urllib3/util/retry.py
@@ -1,19 +1,19 @@
from __future__ import absolute_import
-
-import email
-import logging
-import re
-import time
-import warnings
+
+import email
+import logging
+import re
+import time
+import warnings
from collections import namedtuple
from itertools import takewhile
from ..exceptions import (
ConnectTimeoutError,
- InvalidHeader,
+ InvalidHeader,
MaxRetryError,
ProtocolError,
- ProxyError,
+ ProxyError,
ReadTimeoutError,
ResponseError,
)
@@ -21,76 +21,76 @@ from ..packages import six
log = logging.getLogger(__name__)
-
+
# Data structure for representing the metadata of requests that result in a retry.
-RequestHistory = namedtuple(
- "RequestHistory", ["method", "url", "error", "status", "redirect_location"]
-)
-
-
-# TODO: In v2 we can remove this sentinel and metaclass with deprecated options.
-_Default = object()
-
-
-class _RetryMeta(type):
- @property
- def DEFAULT_METHOD_WHITELIST(cls):
- warnings.warn(
- "Using 'Retry.DEFAULT_METHOD_WHITELIST' is deprecated and "
- "will be removed in v2.0. Use 'Retry.DEFAULT_ALLOWED_METHODS' instead",
- DeprecationWarning,
- )
- return cls.DEFAULT_ALLOWED_METHODS
-
- @DEFAULT_METHOD_WHITELIST.setter
- def DEFAULT_METHOD_WHITELIST(cls, value):
- warnings.warn(
- "Using 'Retry.DEFAULT_METHOD_WHITELIST' is deprecated and "
- "will be removed in v2.0. Use 'Retry.DEFAULT_ALLOWED_METHODS' instead",
- DeprecationWarning,
- )
- cls.DEFAULT_ALLOWED_METHODS = value
-
- @property
- def DEFAULT_REDIRECT_HEADERS_BLACKLIST(cls):
- warnings.warn(
- "Using 'Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST' is deprecated and "
- "will be removed in v2.0. Use 'Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT' instead",
- DeprecationWarning,
- )
- return cls.DEFAULT_REMOVE_HEADERS_ON_REDIRECT
-
- @DEFAULT_REDIRECT_HEADERS_BLACKLIST.setter
- def DEFAULT_REDIRECT_HEADERS_BLACKLIST(cls, value):
- warnings.warn(
- "Using 'Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST' is deprecated and "
- "will be removed in v2.0. Use 'Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT' instead",
- DeprecationWarning,
- )
- cls.DEFAULT_REMOVE_HEADERS_ON_REDIRECT = value
-
- @property
- def BACKOFF_MAX(cls):
- warnings.warn(
- "Using 'Retry.BACKOFF_MAX' is deprecated and "
- "will be removed in v2.0. Use 'Retry.DEFAULT_BACKOFF_MAX' instead",
- DeprecationWarning,
- )
- return cls.DEFAULT_BACKOFF_MAX
-
- @BACKOFF_MAX.setter
- def BACKOFF_MAX(cls, value):
- warnings.warn(
- "Using 'Retry.BACKOFF_MAX' is deprecated and "
- "will be removed in v2.0. Use 'Retry.DEFAULT_BACKOFF_MAX' instead",
- DeprecationWarning,
- )
- cls.DEFAULT_BACKOFF_MAX = value
-
-
-@six.add_metaclass(_RetryMeta)
+RequestHistory = namedtuple(
+ "RequestHistory", ["method", "url", "error", "status", "redirect_location"]
+)
+
+
+# TODO: In v2 we can remove this sentinel and metaclass with deprecated options.
+_Default = object()
+
+
+class _RetryMeta(type):
+ @property
+ def DEFAULT_METHOD_WHITELIST(cls):
+ warnings.warn(
+ "Using 'Retry.DEFAULT_METHOD_WHITELIST' is deprecated and "
+ "will be removed in v2.0. Use 'Retry.DEFAULT_ALLOWED_METHODS' instead",
+ DeprecationWarning,
+ )
+ return cls.DEFAULT_ALLOWED_METHODS
+
+ @DEFAULT_METHOD_WHITELIST.setter
+ def DEFAULT_METHOD_WHITELIST(cls, value):
+ warnings.warn(
+ "Using 'Retry.DEFAULT_METHOD_WHITELIST' is deprecated and "
+ "will be removed in v2.0. Use 'Retry.DEFAULT_ALLOWED_METHODS' instead",
+ DeprecationWarning,
+ )
+ cls.DEFAULT_ALLOWED_METHODS = value
+
+ @property
+ def DEFAULT_REDIRECT_HEADERS_BLACKLIST(cls):
+ warnings.warn(
+ "Using 'Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST' is deprecated and "
+ "will be removed in v2.0. Use 'Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT' instead",
+ DeprecationWarning,
+ )
+ return cls.DEFAULT_REMOVE_HEADERS_ON_REDIRECT
+
+ @DEFAULT_REDIRECT_HEADERS_BLACKLIST.setter
+ def DEFAULT_REDIRECT_HEADERS_BLACKLIST(cls, value):
+ warnings.warn(
+ "Using 'Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST' is deprecated and "
+ "will be removed in v2.0. Use 'Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT' instead",
+ DeprecationWarning,
+ )
+ cls.DEFAULT_REMOVE_HEADERS_ON_REDIRECT = value
+
+ @property
+ def BACKOFF_MAX(cls):
+ warnings.warn(
+ "Using 'Retry.BACKOFF_MAX' is deprecated and "
+ "will be removed in v2.0. Use 'Retry.DEFAULT_BACKOFF_MAX' instead",
+ DeprecationWarning,
+ )
+ return cls.DEFAULT_BACKOFF_MAX
+
+ @BACKOFF_MAX.setter
+ def BACKOFF_MAX(cls, value):
+ warnings.warn(
+ "Using 'Retry.BACKOFF_MAX' is deprecated and "
+ "will be removed in v2.0. Use 'Retry.DEFAULT_BACKOFF_MAX' instead",
+ DeprecationWarning,
+ )
+ cls.DEFAULT_BACKOFF_MAX = value
+
+
+@six.add_metaclass(_RetryMeta)
class Retry(object):
- """Retry configuration.
+ """Retry configuration.
Each retry attempt will create a new Retry object with updated values, so
they can be safely reused.
@@ -116,7 +116,7 @@ class Retry(object):
Total number of retries to allow. Takes precedence over other counts.
Set to ``None`` to remove this constraint and fall back on other
- counts.
+ counts.
Set to ``0`` to fail on the first retry.
@@ -157,35 +157,35 @@ class Retry(object):
Set to ``0`` to fail on the first retry of this type.
- :param int other:
- How many times to retry on other errors.
-
- Other errors are errors that are not connect, read, redirect or status errors.
- These errors might be raised after the request was sent to the server, so the
- request might have side-effects.
-
- Set to ``0`` to fail on the first retry of this type.
-
- If ``total`` is not set, it's a good idea to set this to 0 to account
- for unexpected edge cases and avoid infinite retry loops.
-
- :param iterable allowed_methods:
+ :param int other:
+ How many times to retry on other errors.
+
+ Other errors are errors that are not connect, read, redirect or status errors.
+ These errors might be raised after the request was sent to the server, so the
+ request might have side-effects.
+
+ Set to ``0`` to fail on the first retry of this type.
+
+ If ``total`` is not set, it's a good idea to set this to 0 to account
+ for unexpected edge cases and avoid infinite retry loops.
+
+ :param iterable allowed_methods:
Set of uppercased HTTP method verbs that we should retry on.
By default, we only retry on methods which are considered to be
idempotent (multiple requests with the same parameters end with the
- same state). See :attr:`Retry.DEFAULT_ALLOWED_METHODS`.
+ same state). See :attr:`Retry.DEFAULT_ALLOWED_METHODS`.
Set to a ``False`` value to retry on any verb.
- .. warning::
-
- Previously this parameter was named ``method_whitelist``, that
- usage is deprecated in v1.26.0 and will be removed in v2.0.
-
+ .. warning::
+
+ Previously this parameter was named ``method_whitelist``, that
+ usage is deprecated in v1.26.0 and will be removed in v2.0.
+
:param iterable status_forcelist:
A set of integer HTTP status codes that we should force a retry on.
- A retry is initiated if the request method is in ``allowed_methods``
+ A retry is initiated if the request method is in ``allowed_methods``
and the response status code is in ``status_forcelist``.
By default, this is disabled with ``None``.
@@ -195,11 +195,11 @@ class Retry(object):
(most errors are resolved immediately by a second try without a
delay). urllib3 will sleep for::
- {backoff factor} * (2 ** ({number of total retries} - 1))
+ {backoff factor} * (2 ** ({number of total retries} - 1))
seconds. If the backoff_factor is 0.1, then :func:`.sleep` will sleep
for [0.0s, 0.2s, 0.4s, ...] between retries. It will never be longer
- than :attr:`Retry.DEFAULT_BACKOFF_MAX`.
+ than :attr:`Retry.DEFAULT_BACKOFF_MAX`.
By default, backoff is disabled (set to 0).
@@ -220,70 +220,70 @@ class Retry(object):
Whether to respect Retry-After header on status codes defined as
:attr:`Retry.RETRY_AFTER_STATUS_CODES` or not.
- :param iterable remove_headers_on_redirect:
- Sequence of headers to remove from the request when a response
- indicating a redirect is returned before firing off the redirected
- request.
+ :param iterable remove_headers_on_redirect:
+ Sequence of headers to remove from the request when a response
+ indicating a redirect is returned before firing off the redirected
+ request.
"""
- #: Default methods to be used for ``allowed_methods``
- DEFAULT_ALLOWED_METHODS = frozenset(
- ["HEAD", "GET", "PUT", "DELETE", "OPTIONS", "TRACE"]
- )
+ #: Default methods to be used for ``allowed_methods``
+ DEFAULT_ALLOWED_METHODS = frozenset(
+ ["HEAD", "GET", "PUT", "DELETE", "OPTIONS", "TRACE"]
+ )
- #: Default status codes to be used for ``status_forcelist``
+ #: Default status codes to be used for ``status_forcelist``
RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503])
- #: Default headers to be used for ``remove_headers_on_redirect``
- DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(["Authorization"])
-
+ #: Default headers to be used for ``remove_headers_on_redirect``
+ DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(["Authorization"])
+
#: Maximum backoff time.
- DEFAULT_BACKOFF_MAX = 120
-
- def __init__(
- self,
- total=10,
- connect=None,
- read=None,
- redirect=None,
- status=None,
- other=None,
- allowed_methods=_Default,
- status_forcelist=None,
- backoff_factor=0,
- raise_on_redirect=True,
- raise_on_status=True,
- history=None,
- respect_retry_after_header=True,
- remove_headers_on_redirect=_Default,
- # TODO: Deprecated, remove in v2.0
- method_whitelist=_Default,
- ):
-
- if method_whitelist is not _Default:
- if allowed_methods is not _Default:
- raise ValueError(
- "Using both 'allowed_methods' and "
- "'method_whitelist' together is not allowed. "
- "Instead only use 'allowed_methods'"
- )
- warnings.warn(
- "Using 'method_whitelist' with Retry is deprecated and "
- "will be removed in v2.0. Use 'allowed_methods' instead",
- DeprecationWarning,
- stacklevel=2,
- )
- allowed_methods = method_whitelist
- if allowed_methods is _Default:
- allowed_methods = self.DEFAULT_ALLOWED_METHODS
- if remove_headers_on_redirect is _Default:
- remove_headers_on_redirect = self.DEFAULT_REMOVE_HEADERS_ON_REDIRECT
-
+ DEFAULT_BACKOFF_MAX = 120
+
+ def __init__(
+ self,
+ total=10,
+ connect=None,
+ read=None,
+ redirect=None,
+ status=None,
+ other=None,
+ allowed_methods=_Default,
+ status_forcelist=None,
+ backoff_factor=0,
+ raise_on_redirect=True,
+ raise_on_status=True,
+ history=None,
+ respect_retry_after_header=True,
+ remove_headers_on_redirect=_Default,
+ # TODO: Deprecated, remove in v2.0
+ method_whitelist=_Default,
+ ):
+
+ if method_whitelist is not _Default:
+ if allowed_methods is not _Default:
+ raise ValueError(
+ "Using both 'allowed_methods' and "
+ "'method_whitelist' together is not allowed. "
+ "Instead only use 'allowed_methods'"
+ )
+ warnings.warn(
+ "Using 'method_whitelist' with Retry is deprecated and "
+ "will be removed in v2.0. Use 'allowed_methods' instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ allowed_methods = method_whitelist
+ if allowed_methods is _Default:
+ allowed_methods = self.DEFAULT_ALLOWED_METHODS
+ if remove_headers_on_redirect is _Default:
+ remove_headers_on_redirect = self.DEFAULT_REMOVE_HEADERS_ON_REDIRECT
+
self.total = total
self.connect = connect
self.read = read
self.status = status
- self.other = other
+ self.other = other
if redirect is False or total is False:
redirect = 0
@@ -291,55 +291,55 @@ class Retry(object):
self.redirect = redirect
self.status_forcelist = status_forcelist or set()
- self.allowed_methods = allowed_methods
+ self.allowed_methods = allowed_methods
self.backoff_factor = backoff_factor
self.raise_on_redirect = raise_on_redirect
self.raise_on_status = raise_on_status
self.history = history or tuple()
self.respect_retry_after_header = respect_retry_after_header
- self.remove_headers_on_redirect = frozenset(
- [h.lower() for h in remove_headers_on_redirect]
- )
+ self.remove_headers_on_redirect = frozenset(
+ [h.lower() for h in remove_headers_on_redirect]
+ )
def new(self, **kw):
params = dict(
total=self.total,
- connect=self.connect,
- read=self.read,
- redirect=self.redirect,
- status=self.status,
- other=self.other,
+ connect=self.connect,
+ read=self.read,
+ redirect=self.redirect,
+ status=self.status,
+ other=self.other,
status_forcelist=self.status_forcelist,
backoff_factor=self.backoff_factor,
raise_on_redirect=self.raise_on_redirect,
raise_on_status=self.raise_on_status,
history=self.history,
- remove_headers_on_redirect=self.remove_headers_on_redirect,
- respect_retry_after_header=self.respect_retry_after_header,
+ remove_headers_on_redirect=self.remove_headers_on_redirect,
+ respect_retry_after_header=self.respect_retry_after_header,
)
-
- # TODO: If already given in **kw we use what's given to us
- # If not given we need to figure out what to pass. We decide
- # based on whether our class has the 'method_whitelist' property
- # and if so we pass the deprecated 'method_whitelist' otherwise
- # we use 'allowed_methods'. Remove in v2.0
- if "method_whitelist" not in kw and "allowed_methods" not in kw:
- if "method_whitelist" in self.__dict__:
- warnings.warn(
- "Using 'method_whitelist' with Retry is deprecated and "
- "will be removed in v2.0. Use 'allowed_methods' instead",
- DeprecationWarning,
- )
- params["method_whitelist"] = self.allowed_methods
- else:
- params["allowed_methods"] = self.allowed_methods
-
+
+ # TODO: If already given in **kw we use what's given to us
+ # If not given we need to figure out what to pass. We decide
+ # based on whether our class has the 'method_whitelist' property
+ # and if so we pass the deprecated 'method_whitelist' otherwise
+ # we use 'allowed_methods'. Remove in v2.0
+ if "method_whitelist" not in kw and "allowed_methods" not in kw:
+ if "method_whitelist" in self.__dict__:
+ warnings.warn(
+ "Using 'method_whitelist' with Retry is deprecated and "
+ "will be removed in v2.0. Use 'allowed_methods' instead",
+ DeprecationWarning,
+ )
+ params["method_whitelist"] = self.allowed_methods
+ else:
+ params["allowed_methods"] = self.allowed_methods
+
params.update(kw)
return type(self)(**params)
@classmethod
def from_int(cls, retries, redirect=True, default=None):
- """Backwards-compatibility for the old retries format."""
+ """Backwards-compatibility for the old retries format."""
if retries is None:
retries = default if default is not None else cls.DEFAULT
@@ -352,38 +352,38 @@ class Retry(object):
return new_retries
def get_backoff_time(self):
- """Formula for computing the current backoff
+ """Formula for computing the current backoff
:rtype: float
"""
# We want to consider only the last consecutive errors sequence (Ignore redirects).
- consecutive_errors_len = len(
- list(
- takewhile(lambda x: x.redirect_location is None, reversed(self.history))
- )
- )
+ consecutive_errors_len = len(
+ list(
+ takewhile(lambda x: x.redirect_location is None, reversed(self.history))
+ )
+ )
if consecutive_errors_len <= 1:
return 0
backoff_value = self.backoff_factor * (2 ** (consecutive_errors_len - 1))
- return min(self.DEFAULT_BACKOFF_MAX, backoff_value)
+ return min(self.DEFAULT_BACKOFF_MAX, backoff_value)
def parse_retry_after(self, retry_after):
# Whitespace: https://tools.ietf.org/html/rfc7230#section-3.2.4
if re.match(r"^\s*[0-9]+\s*$", retry_after):
seconds = int(retry_after)
else:
- retry_date_tuple = email.utils.parsedate_tz(retry_after)
+ retry_date_tuple = email.utils.parsedate_tz(retry_after)
if retry_date_tuple is None:
raise InvalidHeader("Invalid Retry-After header: %s" % retry_after)
- if retry_date_tuple[9] is None: # Python 2
- # Assume UTC if no timezone was specified
- # On Python2.7, parsedate_tz returns None for a timezone offset
- # instead of 0 if no timezone is given, where mktime_tz treats
- # a None timezone offset as local time.
- retry_date_tuple = retry_date_tuple[:9] + (0,) + retry_date_tuple[10:]
-
- retry_date = email.utils.mktime_tz(retry_date_tuple)
+ if retry_date_tuple[9] is None: # Python 2
+ # Assume UTC if no timezone was specified
+ # On Python2.7, parsedate_tz returns None for a timezone offset
+ # instead of 0 if no timezone is given, where mktime_tz treats
+ # a None timezone offset as local time.
+ retry_date_tuple = retry_date_tuple[:9] + (0,) + retry_date_tuple[10:]
+
+ retry_date = email.utils.mktime_tz(retry_date_tuple)
seconds = retry_date - time.time()
if seconds < 0:
@@ -392,7 +392,7 @@ class Retry(object):
return seconds
def get_retry_after(self, response):
- """Get the value of Retry-After in seconds."""
+ """Get the value of Retry-After in seconds."""
retry_after = response.getheader("Retry-After")
@@ -416,7 +416,7 @@ class Retry(object):
time.sleep(backoff)
def sleep(self, response=None):
- """Sleep between retry attempts.
+ """Sleep between retry attempts.
This method will respect a server's ``Retry-After`` response header
and sleep the duration of the time requested. If that is not present, it
@@ -424,7 +424,7 @@ class Retry(object):
this method will return immediately.
"""
- if self.respect_retry_after_header and response:
+ if self.respect_retry_after_header and response:
slept = self.sleep_for_retry(response)
if slept:
return
@@ -432,41 +432,41 @@ class Retry(object):
self._sleep_backoff()
def _is_connection_error(self, err):
- """Errors when we're fairly sure that the server did not receive the
+ """Errors when we're fairly sure that the server did not receive the
request, so it should be safe to retry.
"""
- if isinstance(err, ProxyError):
- err = err.original_error
+ if isinstance(err, ProxyError):
+ err = err.original_error
return isinstance(err, ConnectTimeoutError)
def _is_read_error(self, err):
- """Errors that occur after the request has been started, so we should
+ """Errors that occur after the request has been started, so we should
assume that the server began processing it.
"""
return isinstance(err, (ReadTimeoutError, ProtocolError))
def _is_method_retryable(self, method):
- """Checks if a given HTTP method should be retried upon, depending if
- it is included in the allowed_methods
+ """Checks if a given HTTP method should be retried upon, depending if
+ it is included in the allowed_methods
"""
- # TODO: For now favor if the Retry implementation sets its own method_whitelist
- # property outside of our constructor to avoid breaking custom implementations.
- if "method_whitelist" in self.__dict__:
- warnings.warn(
- "Using 'method_whitelist' with Retry is deprecated and "
- "will be removed in v2.0. Use 'allowed_methods' instead",
- DeprecationWarning,
- )
- allowed_methods = self.method_whitelist
- else:
- allowed_methods = self.allowed_methods
-
- if allowed_methods and method.upper() not in allowed_methods:
- return False
+ # TODO: For now favor if the Retry implementation sets its own method_whitelist
+ # property outside of our constructor to avoid breaking custom implementations.
+ if "method_whitelist" in self.__dict__:
+ warnings.warn(
+ "Using 'method_whitelist' with Retry is deprecated and "
+ "will be removed in v2.0. Use 'allowed_methods' instead",
+ DeprecationWarning,
+ )
+ allowed_methods = self.method_whitelist
+ else:
+ allowed_methods = self.allowed_methods
+
+ if allowed_methods and method.upper() not in allowed_methods:
+ return False
return True
def is_retry(self, method, status_code, has_retry_after=False):
- """Is this method/status code retryable? (Based on allowlists and control
+ """Is this method/status code retryable? (Based on allowlists and control
variables such as the number of total retries to allow, whether to
respect the Retry-After header, whether this header is present, and
whether the returned status code is on the list of status codes to
@@ -478,39 +478,39 @@ class Retry(object):
if self.status_forcelist and status_code in self.status_forcelist:
return True
- return (
- self.total
- and self.respect_retry_after_header
- and has_retry_after
- and (status_code in self.RETRY_AFTER_STATUS_CODES)
- )
+ return (
+ self.total
+ and self.respect_retry_after_header
+ and has_retry_after
+ and (status_code in self.RETRY_AFTER_STATUS_CODES)
+ )
def is_exhausted(self):
- """Are we out of retries?"""
- retry_counts = (
- self.total,
- self.connect,
- self.read,
- self.redirect,
- self.status,
- self.other,
- )
+ """Are we out of retries?"""
+ retry_counts = (
+ self.total,
+ self.connect,
+ self.read,
+ self.redirect,
+ self.status,
+ self.other,
+ )
retry_counts = list(filter(None, retry_counts))
if not retry_counts:
return False
return min(retry_counts) < 0
- def increment(
- self,
- method=None,
- url=None,
- response=None,
- error=None,
- _pool=None,
- _stacktrace=None,
- ):
- """Return a new Retry object with incremented retry counters.
+ def increment(
+ self,
+ method=None,
+ url=None,
+ response=None,
+ error=None,
+ _pool=None,
+ _stacktrace=None,
+ ):
+ """Return a new Retry object with incremented retry counters.
:param response: A response object, or None, if the server did not
return a response.
@@ -532,8 +532,8 @@ class Retry(object):
read = self.read
redirect = self.redirect
status_count = self.status
- other = self.other
- cause = "unknown"
+ other = self.other
+ cause = "unknown"
status = None
redirect_location = None
@@ -551,42 +551,42 @@ class Retry(object):
elif read is not None:
read -= 1
- elif error:
- # Other retry?
- if other is not None:
- other -= 1
-
+ elif error:
+ # Other retry?
+ if other is not None:
+ other -= 1
+
elif response and response.get_redirect_location():
# Redirect retry?
if redirect is not None:
redirect -= 1
- cause = "too many redirects"
+ cause = "too many redirects"
redirect_location = response.get_redirect_location()
status = response.status
else:
# Incrementing because of a server error like a 500 in
- # status_forcelist and the given method is in the allowed_methods
+ # status_forcelist and the given method is in the allowed_methods
cause = ResponseError.GENERIC_ERROR
if response and response.status:
if status_count is not None:
status_count -= 1
- cause = ResponseError.SPECIFIC_ERROR.format(status_code=response.status)
+ cause = ResponseError.SPECIFIC_ERROR.format(status_code=response.status)
status = response.status
- history = self.history + (
- RequestHistory(method, url, error, status, redirect_location),
- )
+ history = self.history + (
+ RequestHistory(method, url, error, status, redirect_location),
+ )
new_retry = self.new(
total=total,
- connect=connect,
- read=read,
- redirect=redirect,
- status=status_count,
- other=other,
- history=history,
- )
+ connect=connect,
+ read=read,
+ redirect=redirect,
+ status=status_count,
+ other=other,
+ history=history,
+ )
if new_retry.is_exhausted():
raise MaxRetryError(_pool, url, error or ResponseError(cause))
@@ -596,25 +596,25 @@ class Retry(object):
return new_retry
def __repr__(self):
- return (
- "{cls.__name__}(total={self.total}, connect={self.connect}, "
- "read={self.read}, redirect={self.redirect}, status={self.status})"
- ).format(cls=type(self), self=self)
-
- def __getattr__(self, item):
- if item == "method_whitelist":
- # TODO: Remove this deprecated alias in v2.0
- warnings.warn(
- "Using 'method_whitelist' with Retry is deprecated and "
- "will be removed in v2.0. Use 'allowed_methods' instead",
- DeprecationWarning,
- )
- return self.allowed_methods
- try:
- return getattr(super(Retry, self), item)
- except AttributeError:
- return getattr(Retry, item)
-
-
+ return (
+ "{cls.__name__}(total={self.total}, connect={self.connect}, "
+ "read={self.read}, redirect={self.redirect}, status={self.status})"
+ ).format(cls=type(self), self=self)
+
+ def __getattr__(self, item):
+ if item == "method_whitelist":
+ # TODO: Remove this deprecated alias in v2.0
+ warnings.warn(
+ "Using 'method_whitelist' with Retry is deprecated and "
+ "will be removed in v2.0. Use 'allowed_methods' instead",
+ DeprecationWarning,
+ )
+ return self.allowed_methods
+ try:
+ return getattr(super(Retry, self), item)
+ except AttributeError:
+ return getattr(Retry, item)
+
+
# For backwards compatibility (equivalent to pre-v1.9):
Retry.DEFAULT = Retry(3)
diff --git a/contrib/python/urllib3/urllib3/util/ssl_.py b/contrib/python/urllib3/urllib3/util/ssl_.py
index 8f867812a5..90cee63f83 100644
--- a/contrib/python/urllib3/urllib3/util/ssl_.py
+++ b/contrib/python/urllib3/urllib3/util/ssl_.py
@@ -1,30 +1,30 @@
from __future__ import absolute_import
-
+
import hmac
-import os
-import sys
-import warnings
+import os
+import sys
+import warnings
from binascii import hexlify, unhexlify
from hashlib import md5, sha1, sha256
-from ..exceptions import (
- InsecurePlatformWarning,
- ProxySchemeUnsupported,
- SNIMissingWarning,
- SSLError,
-)
-from ..packages import six
-from .url import BRACELESS_IPV6_ADDRZ_RE, IPV4_RE
+from ..exceptions import (
+ InsecurePlatformWarning,
+ ProxySchemeUnsupported,
+ SNIMissingWarning,
+ SSLError,
+)
+from ..packages import six
+from .url import BRACELESS_IPV6_ADDRZ_RE, IPV4_RE
SSLContext = None
-SSLTransport = None
+SSLTransport = None
HAS_SNI = False
IS_PYOPENSSL = False
IS_SECURETRANSPORT = False
-ALPN_PROTOCOLS = ["http/1.1"]
+ALPN_PROTOCOLS = ["http/1.1"]
# Maps the length of a digest to a possible hash function producing this digest
-HASHFUNC_MAP = {32: md5, 40: sha1, 64: sha256}
+HASHFUNC_MAP = {32: md5, 40: sha1, 64: sha256}
def _const_compare_digest_backport(a, b):
@@ -35,61 +35,61 @@ def _const_compare_digest_backport(a, b):
Returns True if the digests match, and False otherwise.
"""
result = abs(len(a) - len(b))
- for left, right in zip(bytearray(a), bytearray(b)):
- result |= left ^ right
+ for left, right in zip(bytearray(a), bytearray(b)):
+ result |= left ^ right
return result == 0
-_const_compare_digest = getattr(hmac, "compare_digest", _const_compare_digest_backport)
+_const_compare_digest = getattr(hmac, "compare_digest", _const_compare_digest_backport)
try: # Test for SSL features
import ssl
- from ssl import CERT_REQUIRED, wrap_socket
-except ImportError:
- pass
-
-try:
+ from ssl import CERT_REQUIRED, wrap_socket
+except ImportError:
+ pass
+
+try:
from ssl import HAS_SNI # Has SNI?
except ImportError:
pass
+try:
+ from .ssltransport import SSLTransport
+except ImportError:
+ pass
+
+
+try: # Platform-specific: Python 3.6
+ from ssl import PROTOCOL_TLS
+
+ PROTOCOL_SSLv23 = PROTOCOL_TLS
+except ImportError:
+ try:
+ from ssl import PROTOCOL_SSLv23 as PROTOCOL_TLS
+
+ PROTOCOL_SSLv23 = PROTOCOL_TLS
+ except ImportError:
+ PROTOCOL_SSLv23 = PROTOCOL_TLS = 2
+
+try:
+ from ssl import PROTOCOL_TLS_CLIENT
+except ImportError:
+ PROTOCOL_TLS_CLIENT = PROTOCOL_TLS
+
+
try:
- from .ssltransport import SSLTransport
-except ImportError:
- pass
-
-
-try: # Platform-specific: Python 3.6
- from ssl import PROTOCOL_TLS
-
- PROTOCOL_SSLv23 = PROTOCOL_TLS
-except ImportError:
- try:
- from ssl import PROTOCOL_SSLv23 as PROTOCOL_TLS
-
- PROTOCOL_SSLv23 = PROTOCOL_TLS
- except ImportError:
- PROTOCOL_SSLv23 = PROTOCOL_TLS = 2
-
-try:
- from ssl import PROTOCOL_TLS_CLIENT
-except ImportError:
- PROTOCOL_TLS_CLIENT = PROTOCOL_TLS
-
-
-try:
- from ssl import OP_NO_COMPRESSION, OP_NO_SSLv2, OP_NO_SSLv3
+ from ssl import OP_NO_COMPRESSION, OP_NO_SSLv2, OP_NO_SSLv3
except ImportError:
OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000
OP_NO_COMPRESSION = 0x20000
-
-try: # OP_NO_TICKET was added in Python 3.6
- from ssl import OP_NO_TICKET
-except ImportError:
- OP_NO_TICKET = 0x4000
-
-
+
+try: # OP_NO_TICKET was added in Python 3.6
+ from ssl import OP_NO_TICKET
+except ImportError:
+ OP_NO_TICKET = 0x4000
+
+
# A secure default.
# Sources for more information on TLS ciphers:
#
@@ -103,34 +103,34 @@ except ImportError:
# - prefer any AES-GCM and ChaCha20 over any AES-CBC for better performance and
# security,
# - prefer AES-GCM over ChaCha20 because hardware-accelerated AES is common,
-# - disable NULL authentication, MD5 MACs, DSS, and other
-# insecure ciphers for security reasons.
-# - NOTE: TLS 1.3 cipher suites are managed through a different interface
-# not exposed by CPython (yet!) and are enabled by default if they're available.
-DEFAULT_CIPHERS = ":".join(
- [
- "ECDHE+AESGCM",
- "ECDHE+CHACHA20",
- "DHE+AESGCM",
- "DHE+CHACHA20",
- "ECDH+AESGCM",
- "DH+AESGCM",
- "ECDH+AES",
- "DH+AES",
- "RSA+AESGCM",
- "RSA+AES",
- "!aNULL",
- "!eNULL",
- "!MD5",
- "!DSS",
- ]
-)
+# - disable NULL authentication, MD5 MACs, DSS, and other
+# insecure ciphers for security reasons.
+# - NOTE: TLS 1.3 cipher suites are managed through a different interface
+# not exposed by CPython (yet!) and are enabled by default if they're available.
+DEFAULT_CIPHERS = ":".join(
+ [
+ "ECDHE+AESGCM",
+ "ECDHE+CHACHA20",
+ "DHE+AESGCM",
+ "DHE+CHACHA20",
+ "ECDH+AESGCM",
+ "DH+AESGCM",
+ "ECDH+AES",
+ "DH+AES",
+ "RSA+AESGCM",
+ "RSA+AES",
+ "!aNULL",
+ "!eNULL",
+ "!MD5",
+ "!DSS",
+ ]
+)
try:
from ssl import SSLContext # Modern SSL?
except ImportError:
-
- class SSLContext(object): # Platform-specific: Python 2
+
+ class SSLContext(object): # Platform-specific: Python 2
def __init__(self, protocol_version):
self.protocol = protocol_version
# Use default values from a real SSLContext
@@ -146,37 +146,37 @@ except ImportError:
self.certfile = certfile
self.keyfile = keyfile
- def load_verify_locations(self, cafile=None, capath=None, cadata=None):
+ def load_verify_locations(self, cafile=None, capath=None, cadata=None):
self.ca_certs = cafile
if capath is not None:
raise SSLError("CA directories not supported in older Pythons")
- if cadata is not None:
- raise SSLError("CA data not supported in older Pythons")
-
+ if cadata is not None:
+ raise SSLError("CA data not supported in older Pythons")
+
def set_ciphers(self, cipher_suite):
self.ciphers = cipher_suite
def wrap_socket(self, socket, server_hostname=None, server_side=False):
warnings.warn(
- "A true SSLContext object is not available. This prevents "
- "urllib3 from configuring SSL appropriately and may cause "
- "certain SSL connections to fail. You can upgrade to a newer "
- "version of Python to solve this. For more information, see "
- "https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
- "#ssl-warnings",
- InsecurePlatformWarning,
+ "A true SSLContext object is not available. This prevents "
+ "urllib3 from configuring SSL appropriately and may cause "
+ "certain SSL connections to fail. You can upgrade to a newer "
+ "version of Python to solve this. For more information, see "
+ "https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
+ "#ssl-warnings",
+ InsecurePlatformWarning,
)
kwargs = {
- "keyfile": self.keyfile,
- "certfile": self.certfile,
- "ca_certs": self.ca_certs,
- "cert_reqs": self.verify_mode,
- "ssl_version": self.protocol,
- "server_side": server_side,
+ "keyfile": self.keyfile,
+ "certfile": self.certfile,
+ "ca_certs": self.ca_certs,
+ "cert_reqs": self.verify_mode,
+ "ssl_version": self.protocol,
+ "server_side": server_side,
}
- return wrap_socket(socket, ciphers=self.ciphers, **kwargs)
+ return wrap_socket(socket, ciphers=self.ciphers, **kwargs)
def assert_fingerprint(cert, fingerprint):
@@ -189,11 +189,11 @@ def assert_fingerprint(cert, fingerprint):
Fingerprint as string of hexdigits, can be interspersed by colons.
"""
- fingerprint = fingerprint.replace(":", "").lower()
+ fingerprint = fingerprint.replace(":", "").lower()
digest_length = len(fingerprint)
hashfunc = HASHFUNC_MAP.get(digest_length)
if not hashfunc:
- raise SSLError("Fingerprint of invalid length: {0}".format(fingerprint))
+ raise SSLError("Fingerprint of invalid length: {0}".format(fingerprint))
# We need encode() here for py32; works on py2 and p33.
fingerprint_bytes = unhexlify(fingerprint.encode())
@@ -201,31 +201,31 @@ def assert_fingerprint(cert, fingerprint):
cert_digest = hashfunc(cert).digest()
if not _const_compare_digest(cert_digest, fingerprint_bytes):
- raise SSLError(
- 'Fingerprints did not match. Expected "{0}", got "{1}".'.format(
- fingerprint, hexlify(cert_digest)
- )
- )
+ raise SSLError(
+ 'Fingerprints did not match. Expected "{0}", got "{1}".'.format(
+ fingerprint, hexlify(cert_digest)
+ )
+ )
def resolve_cert_reqs(candidate):
"""
Resolves the argument to a numeric constant, which can be passed to
the wrap_socket function/method from the ssl module.
- Defaults to :data:`ssl.CERT_REQUIRED`.
+ Defaults to :data:`ssl.CERT_REQUIRED`.
If given a string it is assumed to be the name of the constant in the
- :mod:`ssl` module or its abbreviation.
+ :mod:`ssl` module or its abbreviation.
(So you can specify `REQUIRED` instead of `CERT_REQUIRED`.
If it's neither `None` nor a string we assume it is already the numeric
constant which can directly be passed to wrap_socket.
"""
if candidate is None:
- return CERT_REQUIRED
+ return CERT_REQUIRED
if isinstance(candidate, str):
res = getattr(ssl, candidate, None)
if res is None:
- res = getattr(ssl, "CERT_" + candidate)
+ res = getattr(ssl, "CERT_" + candidate)
return res
return candidate
@@ -236,20 +236,20 @@ def resolve_ssl_version(candidate):
like resolve_cert_reqs
"""
if candidate is None:
- return PROTOCOL_TLS
+ return PROTOCOL_TLS
if isinstance(candidate, str):
res = getattr(ssl, candidate, None)
if res is None:
- res = getattr(ssl, "PROTOCOL_" + candidate)
+ res = getattr(ssl, "PROTOCOL_" + candidate)
return res
return candidate
-def create_urllib3_context(
- ssl_version=None, cert_reqs=None, options=None, ciphers=None
-):
+def create_urllib3_context(
+ ssl_version=None, cert_reqs=None, options=None, ciphers=None
+):
"""All arguments have the same meaning as ``ssl_wrap_socket``.
By default, this function does a lot of the same work that
@@ -276,21 +276,21 @@ def create_urllib3_context(
``ssl.CERT_REQUIRED``.
:param options:
Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``,
- ``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``, and ``ssl.OP_NO_TICKET``.
+ ``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``, and ``ssl.OP_NO_TICKET``.
:param ciphers:
Which cipher suites to allow the server to select.
:returns:
Constructed SSLContext object with specified options
:rtype: SSLContext
"""
- # PROTOCOL_TLS is deprecated in Python 3.10
- if not ssl_version or ssl_version == PROTOCOL_TLS:
- ssl_version = PROTOCOL_TLS_CLIENT
-
- context = SSLContext(ssl_version)
-
- context.set_ciphers(ciphers or DEFAULT_CIPHERS)
-
+ # PROTOCOL_TLS is deprecated in Python 3.10
+ if not ssl_version or ssl_version == PROTOCOL_TLS:
+ ssl_version = PROTOCOL_TLS_CLIENT
+
+ context = SSLContext(ssl_version)
+
+ context.set_ciphers(ciphers or DEFAULT_CIPHERS)
+
# Setting the default here, as we may have no ssl module on import
cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs
@@ -303,70 +303,70 @@ def create_urllib3_context(
# Disable compression to prevent CRIME attacks for OpenSSL 1.0+
# (issue #309)
options |= OP_NO_COMPRESSION
- # TLSv1.2 only. Unless set explicitly, do not request tickets.
- # This may save some bandwidth on wire, and although the ticket is encrypted,
- # there is a risk associated with it being on wire,
- # if the server is not rotating its ticketing keys properly.
- options |= OP_NO_TICKET
+ # TLSv1.2 only. Unless set explicitly, do not request tickets.
+ # This may save some bandwidth on wire, and although the ticket is encrypted,
+ # there is a risk associated with it being on wire,
+ # if the server is not rotating its ticketing keys properly.
+ options |= OP_NO_TICKET
context.options |= options
- # Enable post-handshake authentication for TLS 1.3, see GH #1634. PHA is
- # necessary for conditional client cert authentication with TLS 1.3.
- # The attribute is None for OpenSSL <= 1.1.0 or does not exist in older
- # versions of Python. We only enable on Python 3.7.4+ or if certificate
- # verification is enabled to work around Python issue #37428
- # See: https://bugs.python.org/issue37428
- if (cert_reqs == ssl.CERT_REQUIRED or sys.version_info >= (3, 7, 4)) and getattr(
- context, "post_handshake_auth", None
- ) is not None:
- context.post_handshake_auth = True
-
- def disable_check_hostname():
- if (
- getattr(context, "check_hostname", None) is not None
- ): # Platform-specific: Python 3.2
- # We do our own verification, including fingerprints and alternative
- # hostnames. So disable it here
- context.check_hostname = False
-
- # The order of the below lines setting verify_mode and check_hostname
- # matter due to safe-guards SSLContext has to prevent an SSLContext with
- # check_hostname=True, verify_mode=NONE/OPTIONAL. This is made even more
- # complex because we don't know whether PROTOCOL_TLS_CLIENT will be used
- # or not so we don't know the initial state of the freshly created SSLContext.
- if cert_reqs == ssl.CERT_REQUIRED:
- context.verify_mode = cert_reqs
- disable_check_hostname()
- else:
- disable_check_hostname()
- context.verify_mode = cert_reqs
-
- # Enable logging of TLS session keys via defacto standard environment variable
- # 'SSLKEYLOGFILE', if the feature is available (Python 3.8+). Skip empty values.
- if hasattr(context, "keylog_filename"):
- sslkeylogfile = os.environ.get("SSLKEYLOGFILE")
- if sslkeylogfile:
- context.keylog_filename = sslkeylogfile
-
+ # Enable post-handshake authentication for TLS 1.3, see GH #1634. PHA is
+ # necessary for conditional client cert authentication with TLS 1.3.
+ # The attribute is None for OpenSSL <= 1.1.0 or does not exist in older
+ # versions of Python. We only enable on Python 3.7.4+ or if certificate
+ # verification is enabled to work around Python issue #37428
+ # See: https://bugs.python.org/issue37428
+ if (cert_reqs == ssl.CERT_REQUIRED or sys.version_info >= (3, 7, 4)) and getattr(
+ context, "post_handshake_auth", None
+ ) is not None:
+ context.post_handshake_auth = True
+
+ def disable_check_hostname():
+ if (
+ getattr(context, "check_hostname", None) is not None
+ ): # Platform-specific: Python 3.2
+ # We do our own verification, including fingerprints and alternative
+ # hostnames. So disable it here
+ context.check_hostname = False
+
+ # The order of the below lines setting verify_mode and check_hostname
+ # matter due to safe-guards SSLContext has to prevent an SSLContext with
+ # check_hostname=True, verify_mode=NONE/OPTIONAL. This is made even more
+ # complex because we don't know whether PROTOCOL_TLS_CLIENT will be used
+ # or not so we don't know the initial state of the freshly created SSLContext.
+ if cert_reqs == ssl.CERT_REQUIRED:
+ context.verify_mode = cert_reqs
+ disable_check_hostname()
+ else:
+ disable_check_hostname()
+ context.verify_mode = cert_reqs
+
+ # Enable logging of TLS session keys via defacto standard environment variable
+ # 'SSLKEYLOGFILE', if the feature is available (Python 3.8+). Skip empty values.
+ if hasattr(context, "keylog_filename"):
+ sslkeylogfile = os.environ.get("SSLKEYLOGFILE")
+ if sslkeylogfile:
+ context.keylog_filename = sslkeylogfile
+
return context
-def ssl_wrap_socket(
- sock,
- keyfile=None,
- certfile=None,
- cert_reqs=None,
- ca_certs=None,
- server_hostname=None,
- ssl_version=None,
- ciphers=None,
- ssl_context=None,
- ca_cert_dir=None,
- key_password=None,
- ca_cert_data=None,
- tls_in_tls=False,
-):
+def ssl_wrap_socket(
+ sock,
+ keyfile=None,
+ certfile=None,
+ cert_reqs=None,
+ ca_certs=None,
+ server_hostname=None,
+ ssl_version=None,
+ ciphers=None,
+ ssl_context=None,
+ ca_cert_dir=None,
+ key_password=None,
+ ca_cert_data=None,
+ tls_in_tls=False,
+):
"""
All arguments except for server_hostname, ssl_context, and ca_cert_dir have
the same meaning as they do when using :func:`ssl.wrap_socket`.
@@ -377,119 +377,119 @@ def ssl_wrap_socket(
A pre-made :class:`SSLContext` object. If none is provided, one will
be created using :func:`create_urllib3_context`.
:param ciphers:
- A string of ciphers we wish the client to support.
+ A string of ciphers we wish the client to support.
:param ca_cert_dir:
A directory containing CA certificates in multiple separate files, as
supported by OpenSSL's -CApath flag or the capath argument to
SSLContext.load_verify_locations().
- :param key_password:
- Optional password if the keyfile is encrypted.
- :param ca_cert_data:
- Optional string containing CA certificates in PEM format suitable for
- passing as the cadata parameter to SSLContext.load_verify_locations()
- :param tls_in_tls:
- Use SSLTransport to wrap the existing socket.
+ :param key_password:
+ Optional password if the keyfile is encrypted.
+ :param ca_cert_data:
+ Optional string containing CA certificates in PEM format suitable for
+ passing as the cadata parameter to SSLContext.load_verify_locations()
+ :param tls_in_tls:
+ Use SSLTransport to wrap the existing socket.
"""
context = ssl_context
if context is None:
# Note: This branch of code and all the variables in it are no longer
# used by urllib3 itself. We should consider deprecating and removing
# this code.
- context = create_urllib3_context(ssl_version, cert_reqs, ciphers=ciphers)
+ context = create_urllib3_context(ssl_version, cert_reqs, ciphers=ciphers)
- if ca_certs or ca_cert_dir or ca_cert_data:
+ if ca_certs or ca_cert_dir or ca_cert_data:
try:
- context.load_verify_locations(ca_certs, ca_cert_dir, ca_cert_data)
- except (IOError, OSError) as e:
+ context.load_verify_locations(ca_certs, ca_cert_dir, ca_cert_data)
+ except (IOError, OSError) as e:
raise SSLError(e)
-
- elif ssl_context is None and hasattr(context, "load_default_certs"):
+
+ elif ssl_context is None and hasattr(context, "load_default_certs"):
# try to load OS default certs; works well on Windows (require Python3.4+)
context.load_default_certs()
- # Attempt to detect if we get the goofy behavior of the
- # keyfile being encrypted and OpenSSL asking for the
- # passphrase via the terminal and instead error out.
- if keyfile and key_password is None and _is_key_file_encrypted(keyfile):
- raise SSLError("Client private key is encrypted, password is required")
-
+ # Attempt to detect if we get the goofy behavior of the
+ # keyfile being encrypted and OpenSSL asking for the
+ # passphrase via the terminal and instead error out.
+ if keyfile and key_password is None and _is_key_file_encrypted(keyfile):
+ raise SSLError("Client private key is encrypted, password is required")
+
if certfile:
- if key_password is None:
- context.load_cert_chain(certfile, keyfile)
- else:
- context.load_cert_chain(certfile, keyfile, key_password)
-
- try:
- if hasattr(context, "set_alpn_protocols"):
- context.set_alpn_protocols(ALPN_PROTOCOLS)
- except NotImplementedError: # Defensive: in CI, we always have set_alpn_protocols
- pass
-
- # If we detect server_hostname is an IP address then the SNI
- # extension should not be used according to RFC3546 Section 3.1
- use_sni_hostname = server_hostname and not is_ipaddress(server_hostname)
- # SecureTransport uses server_hostname in certificate verification.
- send_sni = (use_sni_hostname and HAS_SNI) or (
- IS_SECURETRANSPORT and server_hostname
- )
- # Do not warn the user if server_hostname is an invalid SNI hostname.
- if not HAS_SNI and use_sni_hostname:
- warnings.warn(
- "An HTTPS request has been made, but the SNI (Server Name "
- "Indication) extension to TLS is not available on this platform. "
- "This may cause the server to present an incorrect TLS "
- "certificate, which can cause validation failures. You can upgrade to "
- "a newer version of Python to solve this. For more information, see "
- "https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
- "#ssl-warnings",
- SNIMissingWarning,
- )
-
- if send_sni:
- ssl_sock = _ssl_wrap_socket_impl(
- sock, context, tls_in_tls, server_hostname=server_hostname
- )
- else:
- ssl_sock = _ssl_wrap_socket_impl(sock, context, tls_in_tls)
- return ssl_sock
-
-
-def is_ipaddress(hostname):
- """Detects whether the hostname given is an IPv4 or IPv6 address.
- Also detects IPv6 addresses with Zone IDs.
-
- :param str hostname: Hostname to examine.
- :return: True if the hostname is an IP address, False otherwise.
- """
- if not six.PY2 and isinstance(hostname, bytes):
- # IDN A-label bytes are ASCII compatible.
- hostname = hostname.decode("ascii")
- return bool(IPV4_RE.match(hostname) or BRACELESS_IPV6_ADDRZ_RE.match(hostname))
-
-
-def _is_key_file_encrypted(key_file):
- """Detects if a key file is encrypted or not."""
- with open(key_file, "r") as f:
- for line in f:
- # Look for Proc-Type: 4,ENCRYPTED
- if "ENCRYPTED" in line:
- return True
-
- return False
-
-
-def _ssl_wrap_socket_impl(sock, ssl_context, tls_in_tls, server_hostname=None):
- if tls_in_tls:
- if not SSLTransport:
- # Import error, ssl is not available.
- raise ProxySchemeUnsupported(
- "TLS in TLS requires support for the 'ssl' module"
- )
-
- SSLTransport._validate_ssl_context_for_tls_in_tls(ssl_context)
- return SSLTransport(sock, ssl_context, server_hostname)
-
- if server_hostname:
- return ssl_context.wrap_socket(sock, server_hostname=server_hostname)
- else:
- return ssl_context.wrap_socket(sock)
+ if key_password is None:
+ context.load_cert_chain(certfile, keyfile)
+ else:
+ context.load_cert_chain(certfile, keyfile, key_password)
+
+ try:
+ if hasattr(context, "set_alpn_protocols"):
+ context.set_alpn_protocols(ALPN_PROTOCOLS)
+ except NotImplementedError: # Defensive: in CI, we always have set_alpn_protocols
+ pass
+
+ # If we detect server_hostname is an IP address then the SNI
+ # extension should not be used according to RFC3546 Section 3.1
+ use_sni_hostname = server_hostname and not is_ipaddress(server_hostname)
+ # SecureTransport uses server_hostname in certificate verification.
+ send_sni = (use_sni_hostname and HAS_SNI) or (
+ IS_SECURETRANSPORT and server_hostname
+ )
+ # Do not warn the user if server_hostname is an invalid SNI hostname.
+ if not HAS_SNI and use_sni_hostname:
+ warnings.warn(
+ "An HTTPS request has been made, but the SNI (Server Name "
+ "Indication) extension to TLS is not available on this platform. "
+ "This may cause the server to present an incorrect TLS "
+ "certificate, which can cause validation failures. You can upgrade to "
+ "a newer version of Python to solve this. For more information, see "
+ "https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
+ "#ssl-warnings",
+ SNIMissingWarning,
+ )
+
+ if send_sni:
+ ssl_sock = _ssl_wrap_socket_impl(
+ sock, context, tls_in_tls, server_hostname=server_hostname
+ )
+ else:
+ ssl_sock = _ssl_wrap_socket_impl(sock, context, tls_in_tls)
+ return ssl_sock
+
+
+def is_ipaddress(hostname):
+ """Detects whether the hostname given is an IPv4 or IPv6 address.
+ Also detects IPv6 addresses with Zone IDs.
+
+ :param str hostname: Hostname to examine.
+ :return: True if the hostname is an IP address, False otherwise.
+ """
+ if not six.PY2 and isinstance(hostname, bytes):
+ # IDN A-label bytes are ASCII compatible.
+ hostname = hostname.decode("ascii")
+ return bool(IPV4_RE.match(hostname) or BRACELESS_IPV6_ADDRZ_RE.match(hostname))
+
+
+def _is_key_file_encrypted(key_file):
+ """Detects if a key file is encrypted or not."""
+ with open(key_file, "r") as f:
+ for line in f:
+ # Look for Proc-Type: 4,ENCRYPTED
+ if "ENCRYPTED" in line:
+ return True
+
+ return False
+
+
+def _ssl_wrap_socket_impl(sock, ssl_context, tls_in_tls, server_hostname=None):
+ if tls_in_tls:
+ if not SSLTransport:
+ # Import error, ssl is not available.
+ raise ProxySchemeUnsupported(
+ "TLS in TLS requires support for the 'ssl' module"
+ )
+
+ SSLTransport._validate_ssl_context_for_tls_in_tls(ssl_context)
+ return SSLTransport(sock, ssl_context, server_hostname)
+
+ if server_hostname:
+ return ssl_context.wrap_socket(sock, server_hostname=server_hostname)
+ else:
+ return ssl_context.wrap_socket(sock)
diff --git a/contrib/python/urllib3/urllib3/util/ssl_match_hostname.py b/contrib/python/urllib3/urllib3/util/ssl_match_hostname.py
index a4b4a569cb..5516cffcce 100644
--- a/contrib/python/urllib3/urllib3/util/ssl_match_hostname.py
+++ b/contrib/python/urllib3/urllib3/util/ssl_match_hostname.py
@@ -9,13 +9,13 @@ import sys
# ipaddress has been backported to 2.6+ in pypi. If it is installed on the
# system, use it to handle IPAddress ServerAltnames (this was added in
# python-3.5) otherwise only do DNS matching. This allows
-# util.ssl_match_hostname to continue to be used in Python 2.7.
+# util.ssl_match_hostname to continue to be used in Python 2.7.
try:
import ipaddress
except ImportError:
ipaddress = None
-__version__ = "3.5.0.1"
+__version__ = "3.5.0.1"
class CertificateError(ValueError):
@@ -33,19 +33,19 @@ def _dnsname_match(dn, hostname, max_wildcards=1):
# Ported from python3-syntax:
# leftmost, *remainder = dn.split(r'.')
- parts = dn.split(r".")
+ parts = dn.split(r".")
leftmost = parts[0]
remainder = parts[1:]
- wildcards = leftmost.count("*")
+ wildcards = leftmost.count("*")
if wildcards > max_wildcards:
# Issue #17980: avoid denials of service by refusing more
# than one wildcard per fragment. A survey of established
# policy among SSL implementations showed it to be a
# reasonable choice.
raise CertificateError(
- "too many wildcards in certificate DNS name: " + repr(dn)
- )
+ "too many wildcards in certificate DNS name: " + repr(dn)
+ )
# speed up common case w/o wildcards
if not wildcards:
@@ -54,11 +54,11 @@ def _dnsname_match(dn, hostname, max_wildcards=1):
# RFC 6125, section 6.4.3, subitem 1.
# The client SHOULD NOT attempt to match a presented identifier in which
# the wildcard character comprises a label other than the left-most label.
- if leftmost == "*":
+ if leftmost == "*":
# When '*' is a fragment by itself, it matches a non-empty dotless
# fragment.
- pats.append("[^.]+")
- elif leftmost.startswith("xn--") or hostname.startswith("xn--"):
+ pats.append("[^.]+")
+ elif leftmost.startswith("xn--") or hostname.startswith("xn--"):
# RFC 6125, section 6.4.3, subitem 3.
# The client SHOULD NOT attempt to match a presented identifier
# where the wildcard character is embedded within an A-label or
@@ -66,23 +66,23 @@ def _dnsname_match(dn, hostname, max_wildcards=1):
pats.append(re.escape(leftmost))
else:
# Otherwise, '*' matches any dotless string, e.g. www*
- pats.append(re.escape(leftmost).replace(r"\*", "[^.]*"))
+ pats.append(re.escape(leftmost).replace(r"\*", "[^.]*"))
# add the remaining fragments, ignore any wildcards
for frag in remainder:
pats.append(re.escape(frag))
- pat = re.compile(r"\A" + r"\.".join(pats) + r"\Z", re.IGNORECASE)
+ pat = re.compile(r"\A" + r"\.".join(pats) + r"\Z", re.IGNORECASE)
return pat.match(hostname)
def _to_unicode(obj):
if isinstance(obj, str) and sys.version_info < (3,):
- # ignored flake8 # F821 to support python 2.7 function
- obj = unicode(obj, encoding="ascii", errors="strict") # noqa: F821
+ # ignored flake8 # F821 to support python 2.7 function
+ obj = unicode(obj, encoding="ascii", errors="strict") # noqa: F821
return obj
-
+
def _ipaddress_match(ipname, host_ip):
"""Exact matching of IP addresses.
@@ -104,11 +104,11 @@ def match_hostname(cert, hostname):
returns nothing.
"""
if not cert:
- raise ValueError(
- "empty or no certificate, match_hostname needs a "
- "SSL socket or SSL context with either "
- "CERT_OPTIONAL or CERT_REQUIRED"
- )
+ raise ValueError(
+ "empty or no certificate, match_hostname needs a "
+ "SSL socket or SSL context with either "
+ "CERT_OPTIONAL or CERT_REQUIRED"
+ )
try:
# Divergence from upstream: ipaddress can't handle byte str
host_ip = ipaddress.ip_address(_to_unicode(hostname))
@@ -127,35 +127,35 @@ def match_hostname(cert, hostname):
else:
raise
dnsnames = []
- san = cert.get("subjectAltName", ())
+ san = cert.get("subjectAltName", ())
for key, value in san:
- if key == "DNS":
+ if key == "DNS":
if host_ip is None and _dnsname_match(value, hostname):
return
dnsnames.append(value)
- elif key == "IP Address":
+ elif key == "IP Address":
if host_ip is not None and _ipaddress_match(value, host_ip):
return
dnsnames.append(value)
if not dnsnames:
# The subject is only checked when there is no dNSName entry
# in subjectAltName
- for sub in cert.get("subject", ()):
+ for sub in cert.get("subject", ()):
for key, value in sub:
# XXX according to RFC 2818, the most specific Common Name
# must be used.
- if key == "commonName":
+ if key == "commonName":
if _dnsname_match(value, hostname):
return
dnsnames.append(value)
if len(dnsnames) > 1:
- raise CertificateError(
- "hostname %r "
- "doesn't match either of %s" % (hostname, ", ".join(map(repr, dnsnames)))
- )
+ raise CertificateError(
+ "hostname %r "
+ "doesn't match either of %s" % (hostname, ", ".join(map(repr, dnsnames)))
+ )
elif len(dnsnames) == 1:
- raise CertificateError("hostname %r doesn't match %r" % (hostname, dnsnames[0]))
+ raise CertificateError("hostname %r doesn't match %r" % (hostname, dnsnames[0]))
else:
- raise CertificateError(
- "no appropriate commonName or subjectAltName fields were found"
- )
+ raise CertificateError(
+ "no appropriate commonName or subjectAltName fields were found"
+ )
diff --git a/contrib/python/urllib3/urllib3/util/ssltransport.py b/contrib/python/urllib3/urllib3/util/ssltransport.py
index 4a7105d179..d2ae1fbf9d 100644
--- a/contrib/python/urllib3/urllib3/util/ssltransport.py
+++ b/contrib/python/urllib3/urllib3/util/ssltransport.py
@@ -1,221 +1,221 @@
-import io
-import socket
-import ssl
-
-from ..exceptions import ProxySchemeUnsupported
-from ..packages import six
-
-SSL_BLOCKSIZE = 16384
-
-
-class SSLTransport:
- """
- The SSLTransport wraps an existing socket and establishes an SSL connection.
-
- Contrary to Python's implementation of SSLSocket, it allows you to chain
- multiple TLS connections together. It's particularly useful if you need to
- implement TLS within TLS.
-
- The class supports most of the socket API operations.
- """
-
- @staticmethod
- def _validate_ssl_context_for_tls_in_tls(ssl_context):
- """
- Raises a ProxySchemeUnsupported if the provided ssl_context can't be used
- for TLS in TLS.
-
- The only requirement is that the ssl_context provides the 'wrap_bio'
- methods.
- """
-
- if not hasattr(ssl_context, "wrap_bio"):
- if six.PY2:
- raise ProxySchemeUnsupported(
- "TLS in TLS requires SSLContext.wrap_bio() which isn't "
- "supported on Python 2"
- )
- else:
- raise ProxySchemeUnsupported(
- "TLS in TLS requires SSLContext.wrap_bio() which isn't "
- "available on non-native SSLContext"
- )
-
- def __init__(
- self, socket, ssl_context, server_hostname=None, suppress_ragged_eofs=True
- ):
- """
- Create an SSLTransport around socket using the provided ssl_context.
- """
- self.incoming = ssl.MemoryBIO()
- self.outgoing = ssl.MemoryBIO()
-
- self.suppress_ragged_eofs = suppress_ragged_eofs
- self.socket = socket
-
- self.sslobj = ssl_context.wrap_bio(
- self.incoming, self.outgoing, server_hostname=server_hostname
- )
-
- # Perform initial handshake.
- self._ssl_io_loop(self.sslobj.do_handshake)
-
- def __enter__(self):
- return self
-
- def __exit__(self, *_):
- self.close()
-
- def fileno(self):
- return self.socket.fileno()
-
- def read(self, len=1024, buffer=None):
- return self._wrap_ssl_read(len, buffer)
-
- def recv(self, len=1024, flags=0):
- if flags != 0:
- raise ValueError("non-zero flags not allowed in calls to recv")
- return self._wrap_ssl_read(len)
-
- def recv_into(self, buffer, nbytes=None, flags=0):
- if flags != 0:
- raise ValueError("non-zero flags not allowed in calls to recv_into")
- if buffer and (nbytes is None):
- nbytes = len(buffer)
- elif nbytes is None:
- nbytes = 1024
- return self.read(nbytes, buffer)
-
- def sendall(self, data, flags=0):
- if flags != 0:
- raise ValueError("non-zero flags not allowed in calls to sendall")
- count = 0
- with memoryview(data) as view, view.cast("B") as byte_view:
- amount = len(byte_view)
- while count < amount:
- v = self.send(byte_view[count:])
- count += v
-
- def send(self, data, flags=0):
- if flags != 0:
- raise ValueError("non-zero flags not allowed in calls to send")
- response = self._ssl_io_loop(self.sslobj.write, data)
- return response
-
- def makefile(
- self, mode="r", buffering=None, encoding=None, errors=None, newline=None
- ):
- """
- Python's httpclient uses makefile and buffered io when reading HTTP
- messages and we need to support it.
-
- This is unfortunately a copy and paste of socket.py makefile with small
- changes to point to the socket directly.
- """
- if not set(mode) <= {"r", "w", "b"}:
- raise ValueError("invalid mode %r (only r, w, b allowed)" % (mode,))
-
- writing = "w" in mode
- reading = "r" in mode or not writing
- assert reading or writing
- binary = "b" in mode
- rawmode = ""
- if reading:
- rawmode += "r"
- if writing:
- rawmode += "w"
- raw = socket.SocketIO(self, rawmode)
- self.socket._io_refs += 1
- if buffering is None:
- buffering = -1
- if buffering < 0:
- buffering = io.DEFAULT_BUFFER_SIZE
- if buffering == 0:
- if not binary:
- raise ValueError("unbuffered streams must be binary")
- return raw
- if reading and writing:
- buffer = io.BufferedRWPair(raw, raw, buffering)
- elif reading:
- buffer = io.BufferedReader(raw, buffering)
- else:
- assert writing
- buffer = io.BufferedWriter(raw, buffering)
- if binary:
- return buffer
- text = io.TextIOWrapper(buffer, encoding, errors, newline)
- text.mode = mode
- return text
-
- def unwrap(self):
- self._ssl_io_loop(self.sslobj.unwrap)
-
- def close(self):
- self.socket.close()
-
- def getpeercert(self, binary_form=False):
- return self.sslobj.getpeercert(binary_form)
-
- def version(self):
- return self.sslobj.version()
-
- def cipher(self):
- return self.sslobj.cipher()
-
- def selected_alpn_protocol(self):
- return self.sslobj.selected_alpn_protocol()
-
- def selected_npn_protocol(self):
- return self.sslobj.selected_npn_protocol()
-
- def shared_ciphers(self):
- return self.sslobj.shared_ciphers()
-
- def compression(self):
- return self.sslobj.compression()
-
- def settimeout(self, value):
- self.socket.settimeout(value)
-
- def gettimeout(self):
- return self.socket.gettimeout()
-
- def _decref_socketios(self):
- self.socket._decref_socketios()
-
- def _wrap_ssl_read(self, len, buffer=None):
- try:
- return self._ssl_io_loop(self.sslobj.read, len, buffer)
- except ssl.SSLError as e:
- if e.errno == ssl.SSL_ERROR_EOF and self.suppress_ragged_eofs:
- return 0 # eof, return 0.
- else:
- raise
-
- def _ssl_io_loop(self, func, *args):
- """Performs an I/O loop between incoming/outgoing and the socket."""
- should_loop = True
- ret = None
-
- while should_loop:
- errno = None
- try:
- ret = func(*args)
- except ssl.SSLError as e:
- if e.errno not in (ssl.SSL_ERROR_WANT_READ, ssl.SSL_ERROR_WANT_WRITE):
- # WANT_READ, and WANT_WRITE are expected, others are not.
- raise e
- errno = e.errno
-
- buf = self.outgoing.read()
- self.socket.sendall(buf)
-
- if errno is None:
- should_loop = False
- elif errno == ssl.SSL_ERROR_WANT_READ:
- buf = self.socket.recv(SSL_BLOCKSIZE)
- if buf:
- self.incoming.write(buf)
- else:
- self.incoming.write_eof()
- return ret
+import io
+import socket
+import ssl
+
+from ..exceptions import ProxySchemeUnsupported
+from ..packages import six
+
+SSL_BLOCKSIZE = 16384
+
+
+class SSLTransport:
+ """
+ The SSLTransport wraps an existing socket and establishes an SSL connection.
+
+ Contrary to Python's implementation of SSLSocket, it allows you to chain
+ multiple TLS connections together. It's particularly useful if you need to
+ implement TLS within TLS.
+
+ The class supports most of the socket API operations.
+ """
+
+ @staticmethod
+ def _validate_ssl_context_for_tls_in_tls(ssl_context):
+ """
+ Raises a ProxySchemeUnsupported if the provided ssl_context can't be used
+ for TLS in TLS.
+
+ The only requirement is that the ssl_context provides the 'wrap_bio'
+ methods.
+ """
+
+ if not hasattr(ssl_context, "wrap_bio"):
+ if six.PY2:
+ raise ProxySchemeUnsupported(
+ "TLS in TLS requires SSLContext.wrap_bio() which isn't "
+ "supported on Python 2"
+ )
+ else:
+ raise ProxySchemeUnsupported(
+ "TLS in TLS requires SSLContext.wrap_bio() which isn't "
+ "available on non-native SSLContext"
+ )
+
+ def __init__(
+ self, socket, ssl_context, server_hostname=None, suppress_ragged_eofs=True
+ ):
+ """
+ Create an SSLTransport around socket using the provided ssl_context.
+ """
+ self.incoming = ssl.MemoryBIO()
+ self.outgoing = ssl.MemoryBIO()
+
+ self.suppress_ragged_eofs = suppress_ragged_eofs
+ self.socket = socket
+
+ self.sslobj = ssl_context.wrap_bio(
+ self.incoming, self.outgoing, server_hostname=server_hostname
+ )
+
+ # Perform initial handshake.
+ self._ssl_io_loop(self.sslobj.do_handshake)
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, *_):
+ self.close()
+
+ def fileno(self):
+ return self.socket.fileno()
+
+ def read(self, len=1024, buffer=None):
+ return self._wrap_ssl_read(len, buffer)
+
+ def recv(self, len=1024, flags=0):
+ if flags != 0:
+ raise ValueError("non-zero flags not allowed in calls to recv")
+ return self._wrap_ssl_read(len)
+
+ def recv_into(self, buffer, nbytes=None, flags=0):
+ if flags != 0:
+ raise ValueError("non-zero flags not allowed in calls to recv_into")
+ if buffer and (nbytes is None):
+ nbytes = len(buffer)
+ elif nbytes is None:
+ nbytes = 1024
+ return self.read(nbytes, buffer)
+
+ def sendall(self, data, flags=0):
+ if flags != 0:
+ raise ValueError("non-zero flags not allowed in calls to sendall")
+ count = 0
+ with memoryview(data) as view, view.cast("B") as byte_view:
+ amount = len(byte_view)
+ while count < amount:
+ v = self.send(byte_view[count:])
+ count += v
+
+ def send(self, data, flags=0):
+ if flags != 0:
+ raise ValueError("non-zero flags not allowed in calls to send")
+ response = self._ssl_io_loop(self.sslobj.write, data)
+ return response
+
+ def makefile(
+ self, mode="r", buffering=None, encoding=None, errors=None, newline=None
+ ):
+ """
+ Python's httpclient uses makefile and buffered io when reading HTTP
+ messages and we need to support it.
+
+ This is unfortunately a copy and paste of socket.py makefile with small
+ changes to point to the socket directly.
+ """
+ if not set(mode) <= {"r", "w", "b"}:
+ raise ValueError("invalid mode %r (only r, w, b allowed)" % (mode,))
+
+ writing = "w" in mode
+ reading = "r" in mode or not writing
+ assert reading or writing
+ binary = "b" in mode
+ rawmode = ""
+ if reading:
+ rawmode += "r"
+ if writing:
+ rawmode += "w"
+ raw = socket.SocketIO(self, rawmode)
+ self.socket._io_refs += 1
+ if buffering is None:
+ buffering = -1
+ if buffering < 0:
+ buffering = io.DEFAULT_BUFFER_SIZE
+ if buffering == 0:
+ if not binary:
+ raise ValueError("unbuffered streams must be binary")
+ return raw
+ if reading and writing:
+ buffer = io.BufferedRWPair(raw, raw, buffering)
+ elif reading:
+ buffer = io.BufferedReader(raw, buffering)
+ else:
+ assert writing
+ buffer = io.BufferedWriter(raw, buffering)
+ if binary:
+ return buffer
+ text = io.TextIOWrapper(buffer, encoding, errors, newline)
+ text.mode = mode
+ return text
+
+ def unwrap(self):
+ self._ssl_io_loop(self.sslobj.unwrap)
+
+ def close(self):
+ self.socket.close()
+
+ def getpeercert(self, binary_form=False):
+ return self.sslobj.getpeercert(binary_form)
+
+ def version(self):
+ return self.sslobj.version()
+
+ def cipher(self):
+ return self.sslobj.cipher()
+
+ def selected_alpn_protocol(self):
+ return self.sslobj.selected_alpn_protocol()
+
+ def selected_npn_protocol(self):
+ return self.sslobj.selected_npn_protocol()
+
+ def shared_ciphers(self):
+ return self.sslobj.shared_ciphers()
+
+ def compression(self):
+ return self.sslobj.compression()
+
+ def settimeout(self, value):
+ self.socket.settimeout(value)
+
+ def gettimeout(self):
+ return self.socket.gettimeout()
+
+ def _decref_socketios(self):
+ self.socket._decref_socketios()
+
+ def _wrap_ssl_read(self, len, buffer=None):
+ try:
+ return self._ssl_io_loop(self.sslobj.read, len, buffer)
+ except ssl.SSLError as e:
+ if e.errno == ssl.SSL_ERROR_EOF and self.suppress_ragged_eofs:
+ return 0 # eof, return 0.
+ else:
+ raise
+
+ def _ssl_io_loop(self, func, *args):
+ """Performs an I/O loop between incoming/outgoing and the socket."""
+ should_loop = True
+ ret = None
+
+ while should_loop:
+ errno = None
+ try:
+ ret = func(*args)
+ except ssl.SSLError as e:
+ if e.errno not in (ssl.SSL_ERROR_WANT_READ, ssl.SSL_ERROR_WANT_WRITE):
+ # WANT_READ, and WANT_WRITE are expected, others are not.
+ raise e
+ errno = e.errno
+
+ buf = self.outgoing.read()
+ self.socket.sendall(buf)
+
+ if errno is None:
+ should_loop = False
+ elif errno == ssl.SSL_ERROR_WANT_READ:
+ buf = self.socket.recv(SSL_BLOCKSIZE)
+ if buf:
+ self.incoming.write(buf)
+ else:
+ self.incoming.write_eof()
+ return ret
diff --git a/contrib/python/urllib3/urllib3/util/timeout.py b/contrib/python/urllib3/urllib3/util/timeout.py
index ff69593b05..da1e82925b 100644
--- a/contrib/python/urllib3/urllib3/util/timeout.py
+++ b/contrib/python/urllib3/urllib3/util/timeout.py
@@ -1,7 +1,7 @@
from __future__ import absolute_import
-
-import time
-
+
+import time
+
# The default socket timeout, used by httplib to indicate that no timeout was
# specified by the user
from socket import _GLOBAL_DEFAULT_TIMEOUT
@@ -18,30 +18,30 @@ current_time = getattr(time, "monotonic", time.time)
class Timeout(object):
- """Timeout configuration.
-
- Timeouts can be defined as a default for a pool:
-
- .. code-block:: python
+ """Timeout configuration.
- timeout = Timeout(connect=2.0, read=7.0)
- http = PoolManager(timeout=timeout)
- response = http.request('GET', 'http://example.com/')
+ Timeouts can be defined as a default for a pool:
- Or per-request (which overrides the default for the pool):
+ .. code-block:: python
- .. code-block:: python
+ timeout = Timeout(connect=2.0, read=7.0)
+ http = PoolManager(timeout=timeout)
+ response = http.request('GET', 'http://example.com/')
- response = http.request('GET', 'http://example.com/', timeout=Timeout(10))
+ Or per-request (which overrides the default for the pool):
- Timeouts can be disabled by setting all the parameters to ``None``:
+ .. code-block:: python
- .. code-block:: python
-
- no_timeout = Timeout(connect=None, read=None)
- response = http.request('GET', 'http://example.com/, timeout=no_timeout)
+ response = http.request('GET', 'http://example.com/', timeout=Timeout(10))
+ Timeouts can be disabled by setting all the parameters to ``None``:
+ .. code-block:: python
+
+ no_timeout = Timeout(connect=None, read=None)
+ response = http.request('GET', 'http://example.com/, timeout=no_timeout)
+
+
:param total:
This combines the connect and read timeouts into one; the read timeout
will be set to the time leftover from the connect attempt. In the
@@ -50,27 +50,27 @@ class Timeout(object):
Defaults to None.
- :type total: int, float, or None
+ :type total: int, float, or None
:param connect:
- The maximum amount of time (in seconds) to wait for a connection
- attempt to a server to succeed. Omitting the parameter will default the
- connect timeout to the system default, probably `the global default
- timeout in socket.py
+ The maximum amount of time (in seconds) to wait for a connection
+ attempt to a server to succeed. Omitting the parameter will default the
+ connect timeout to the system default, probably `the global default
+ timeout in socket.py
<http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
None will set an infinite timeout for connection attempts.
- :type connect: int, float, or None
+ :type connect: int, float, or None
:param read:
- The maximum amount of time (in seconds) to wait between consecutive
- read operations for a response from the server. Omitting the parameter
- will default the read timeout to the system default, probably `the
- global default timeout in socket.py
+ The maximum amount of time (in seconds) to wait between consecutive
+ read operations for a response from the server. Omitting the parameter
+ will default the read timeout to the system default, probably `the
+ global default timeout in socket.py
<http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
None will set an infinite timeout.
- :type read: int, float, or None
+ :type read: int, float, or None
.. note::
@@ -100,25 +100,25 @@ class Timeout(object):
DEFAULT_TIMEOUT = _GLOBAL_DEFAULT_TIMEOUT
def __init__(self, total=None, connect=_Default, read=_Default):
- self._connect = self._validate_timeout(connect, "connect")
- self._read = self._validate_timeout(read, "read")
- self.total = self._validate_timeout(total, "total")
+ self._connect = self._validate_timeout(connect, "connect")
+ self._read = self._validate_timeout(read, "read")
+ self.total = self._validate_timeout(total, "total")
self._start_connect = None
- def __repr__(self):
- return "%s(connect=%r, read=%r, total=%r)" % (
- type(self).__name__,
- self._connect,
- self._read,
- self.total,
- )
-
- # __str__ provided for backwards compatibility
- __str__ = __repr__
-
+ def __repr__(self):
+ return "%s(connect=%r, read=%r, total=%r)" % (
+ type(self).__name__,
+ self._connect,
+ self._read,
+ self.total,
+ )
+
+ # __str__ provided for backwards compatibility
+ __str__ = __repr__
+
@classmethod
def _validate_timeout(cls, value, name):
- """Check that a timeout attribute is valid.
+ """Check that a timeout attribute is valid.
:param value: The timeout value to validate
:param name: The name of the timeout attribute to validate. This is
@@ -134,37 +134,37 @@ class Timeout(object):
return value
if isinstance(value, bool):
- raise ValueError(
- "Timeout cannot be a boolean value. It must "
- "be an int, float or None."
- )
+ raise ValueError(
+ "Timeout cannot be a boolean value. It must "
+ "be an int, float or None."
+ )
try:
float(value)
except (TypeError, ValueError):
- raise ValueError(
- "Timeout value %s was %s, but it must be an "
- "int, float or None." % (name, value)
- )
+ raise ValueError(
+ "Timeout value %s was %s, but it must be an "
+ "int, float or None." % (name, value)
+ )
try:
if value <= 0:
- raise ValueError(
- "Attempted to set %s timeout to %s, but the "
- "timeout cannot be set to a value less "
- "than or equal to 0." % (name, value)
- )
- except TypeError:
- # Python 3
- raise ValueError(
- "Timeout value %s was %s, but it must be an "
- "int, float or None." % (name, value)
- )
+ raise ValueError(
+ "Attempted to set %s timeout to %s, but the "
+ "timeout cannot be set to a value less "
+ "than or equal to 0." % (name, value)
+ )
+ except TypeError:
+ # Python 3
+ raise ValueError(
+ "Timeout value %s was %s, but it must be an "
+ "int, float or None." % (name, value)
+ )
return value
@classmethod
def from_float(cls, timeout):
- """Create a new Timeout from a legacy timeout value.
+ """Create a new Timeout from a legacy timeout value.
The timeout value used by httplib.py sets the same timeout on the
connect(), and recv() socket requests. This creates a :class:`Timeout`
@@ -179,7 +179,7 @@ class Timeout(object):
return Timeout(read=timeout, connect=timeout)
def clone(self):
- """Create a copy of the timeout object
+ """Create a copy of the timeout object
Timeout properties are stored per-pool but each request needs a fresh
Timeout object to ensure each one has its own start/stop configured.
@@ -190,10 +190,10 @@ class Timeout(object):
# We can't use copy.deepcopy because that will also create a new object
# for _GLOBAL_DEFAULT_TIMEOUT, which socket.py uses as a sentinel to
# detect the user default.
- return Timeout(connect=self._connect, read=self._read, total=self.total)
+ return Timeout(connect=self._connect, read=self._read, total=self.total)
def start_connect(self):
- """Start the timeout clock, used during a connect() attempt
+ """Start the timeout clock, used during a connect() attempt
:raises urllib3.exceptions.TimeoutStateError: if you attempt
to start a timer that has been started already.
@@ -204,22 +204,22 @@ class Timeout(object):
return self._start_connect
def get_connect_duration(self):
- """Gets the time elapsed since the call to :meth:`start_connect`.
+ """Gets the time elapsed since the call to :meth:`start_connect`.
- :return: Elapsed time in seconds.
+ :return: Elapsed time in seconds.
:rtype: float
:raises urllib3.exceptions.TimeoutStateError: if you attempt
to get duration for a timer that hasn't been started.
"""
if self._start_connect is None:
- raise TimeoutStateError(
- "Can't get connect duration for timer that has not started."
- )
+ raise TimeoutStateError(
+ "Can't get connect duration for timer that has not started."
+ )
return current_time() - self._start_connect
@property
def connect_timeout(self):
- """Get the value to use when setting a connection timeout.
+ """Get the value to use when setting a connection timeout.
This will be a positive float or integer, the value None
(never timeout), or the default system timeout.
@@ -237,7 +237,7 @@ class Timeout(object):
@property
def read_timeout(self):
- """Get the value for the read timeout.
+ """Get the value for the read timeout.
This assumes some time has elapsed in the connection timeout and
computes the read timeout appropriately.
@@ -252,16 +252,16 @@ class Timeout(object):
:raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect`
has not yet been called on this object.
"""
- if (
- self.total is not None
- and self.total is not self.DEFAULT_TIMEOUT
- and self._read is not None
- and self._read is not self.DEFAULT_TIMEOUT
- ):
+ if (
+ self.total is not None
+ and self.total is not self.DEFAULT_TIMEOUT
+ and self._read is not None
+ and self._read is not self.DEFAULT_TIMEOUT
+ ):
# In case the connect timeout has not yet been established.
if self._start_connect is None:
return self._read
- return max(0, min(self.total - self.get_connect_duration(), self._read))
+ return max(0, min(self.total - self.get_connect_duration(), self._read))
elif self.total is not None and self.total is not self.DEFAULT_TIMEOUT:
return max(0, self.total - self.get_connect_duration())
else:
diff --git a/contrib/python/urllib3/urllib3/util/url.py b/contrib/python/urllib3/urllib3/util/url.py
index 81a03da9e3..cf716cfc86 100644
--- a/contrib/python/urllib3/urllib3/util/url.py
+++ b/contrib/python/urllib3/urllib3/util/url.py
@@ -1,110 +1,110 @@
from __future__ import absolute_import
-
-import re
+
+import re
from collections import namedtuple
from ..exceptions import LocationParseError
-from ..packages import six
+from ..packages import six
-url_attrs = ["scheme", "auth", "host", "port", "path", "query", "fragment"]
+url_attrs = ["scheme", "auth", "host", "port", "path", "query", "fragment"]
# We only want to normalize urls with an HTTP(S) scheme.
# urllib3 infers URLs without a scheme (None) to be http.
-NORMALIZABLE_SCHEMES = ("http", "https", None)
-
-# Almost all of these patterns were derived from the
-# 'rfc3986' module: https://github.com/python-hyper/rfc3986
-PERCENT_RE = re.compile(r"%[a-fA-F0-9]{2}")
-SCHEME_RE = re.compile(r"^(?:[a-zA-Z][a-zA-Z0-9+-]*:|/)")
-URI_RE = re.compile(
- r"^(?:([a-zA-Z][a-zA-Z0-9+.-]*):)?"
- r"(?://([^\\/?#]*))?"
- r"([^?#]*)"
- r"(?:\?([^#]*))?"
- r"(?:#(.*))?$",
- re.UNICODE | re.DOTALL,
-)
-
-IPV4_PAT = r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}"
-HEX_PAT = "[0-9A-Fa-f]{1,4}"
-LS32_PAT = "(?:{hex}:{hex}|{ipv4})".format(hex=HEX_PAT, ipv4=IPV4_PAT)
-_subs = {"hex": HEX_PAT, "ls32": LS32_PAT}
-_variations = [
- # 6( h16 ":" ) ls32
- "(?:%(hex)s:){6}%(ls32)s",
- # "::" 5( h16 ":" ) ls32
- "::(?:%(hex)s:){5}%(ls32)s",
- # [ h16 ] "::" 4( h16 ":" ) ls32
- "(?:%(hex)s)?::(?:%(hex)s:){4}%(ls32)s",
- # [ *1( h16 ":" ) h16 ] "::" 3( h16 ":" ) ls32
- "(?:(?:%(hex)s:)?%(hex)s)?::(?:%(hex)s:){3}%(ls32)s",
- # [ *2( h16 ":" ) h16 ] "::" 2( h16 ":" ) ls32
- "(?:(?:%(hex)s:){0,2}%(hex)s)?::(?:%(hex)s:){2}%(ls32)s",
- # [ *3( h16 ":" ) h16 ] "::" h16 ":" ls32
- "(?:(?:%(hex)s:){0,3}%(hex)s)?::%(hex)s:%(ls32)s",
- # [ *4( h16 ":" ) h16 ] "::" ls32
- "(?:(?:%(hex)s:){0,4}%(hex)s)?::%(ls32)s",
- # [ *5( h16 ":" ) h16 ] "::" h16
- "(?:(?:%(hex)s:){0,5}%(hex)s)?::%(hex)s",
- # [ *6( h16 ":" ) h16 ] "::"
- "(?:(?:%(hex)s:){0,6}%(hex)s)?::",
-]
-
-UNRESERVED_PAT = r"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._!\-~"
-IPV6_PAT = "(?:" + "|".join([x % _subs for x in _variations]) + ")"
-ZONE_ID_PAT = "(?:%25|%)(?:[" + UNRESERVED_PAT + "]|%[a-fA-F0-9]{2})+"
-IPV6_ADDRZ_PAT = r"\[" + IPV6_PAT + r"(?:" + ZONE_ID_PAT + r")?\]"
-REG_NAME_PAT = r"(?:[^\[\]%:/?#]|%[a-fA-F0-9]{2})*"
-TARGET_RE = re.compile(r"^(/[^?#]*)(?:\?([^#]*))?(?:#.*)?$")
-
-IPV4_RE = re.compile("^" + IPV4_PAT + "$")
-IPV6_RE = re.compile("^" + IPV6_PAT + "$")
-IPV6_ADDRZ_RE = re.compile("^" + IPV6_ADDRZ_PAT + "$")
-BRACELESS_IPV6_ADDRZ_RE = re.compile("^" + IPV6_ADDRZ_PAT[2:-2] + "$")
-ZONE_ID_RE = re.compile("(" + ZONE_ID_PAT + r")\]$")
-
-_HOST_PORT_PAT = ("^(%s|%s|%s)(?::([0-9]{0,5}))?$") % (
- REG_NAME_PAT,
- IPV4_PAT,
- IPV6_ADDRZ_PAT,
-)
-_HOST_PORT_RE = re.compile(_HOST_PORT_PAT, re.UNICODE | re.DOTALL)
-
-UNRESERVED_CHARS = set(
- "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._-~"
-)
-SUB_DELIM_CHARS = set("!$&'()*+,;=")
-USERINFO_CHARS = UNRESERVED_CHARS | SUB_DELIM_CHARS | {":"}
-PATH_CHARS = USERINFO_CHARS | {"@", "/"}
-QUERY_CHARS = FRAGMENT_CHARS = PATH_CHARS | {"?"}
-
-
-class Url(namedtuple("Url", url_attrs)):
+NORMALIZABLE_SCHEMES = ("http", "https", None)
+
+# Almost all of these patterns were derived from the
+# 'rfc3986' module: https://github.com/python-hyper/rfc3986
+PERCENT_RE = re.compile(r"%[a-fA-F0-9]{2}")
+SCHEME_RE = re.compile(r"^(?:[a-zA-Z][a-zA-Z0-9+-]*:|/)")
+URI_RE = re.compile(
+ r"^(?:([a-zA-Z][a-zA-Z0-9+.-]*):)?"
+ r"(?://([^\\/?#]*))?"
+ r"([^?#]*)"
+ r"(?:\?([^#]*))?"
+ r"(?:#(.*))?$",
+ re.UNICODE | re.DOTALL,
+)
+
+IPV4_PAT = r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}"
+HEX_PAT = "[0-9A-Fa-f]{1,4}"
+LS32_PAT = "(?:{hex}:{hex}|{ipv4})".format(hex=HEX_PAT, ipv4=IPV4_PAT)
+_subs = {"hex": HEX_PAT, "ls32": LS32_PAT}
+_variations = [
+ # 6( h16 ":" ) ls32
+ "(?:%(hex)s:){6}%(ls32)s",
+ # "::" 5( h16 ":" ) ls32
+ "::(?:%(hex)s:){5}%(ls32)s",
+ # [ h16 ] "::" 4( h16 ":" ) ls32
+ "(?:%(hex)s)?::(?:%(hex)s:){4}%(ls32)s",
+ # [ *1( h16 ":" ) h16 ] "::" 3( h16 ":" ) ls32
+ "(?:(?:%(hex)s:)?%(hex)s)?::(?:%(hex)s:){3}%(ls32)s",
+ # [ *2( h16 ":" ) h16 ] "::" 2( h16 ":" ) ls32
+ "(?:(?:%(hex)s:){0,2}%(hex)s)?::(?:%(hex)s:){2}%(ls32)s",
+ # [ *3( h16 ":" ) h16 ] "::" h16 ":" ls32
+ "(?:(?:%(hex)s:){0,3}%(hex)s)?::%(hex)s:%(ls32)s",
+ # [ *4( h16 ":" ) h16 ] "::" ls32
+ "(?:(?:%(hex)s:){0,4}%(hex)s)?::%(ls32)s",
+ # [ *5( h16 ":" ) h16 ] "::" h16
+ "(?:(?:%(hex)s:){0,5}%(hex)s)?::%(hex)s",
+ # [ *6( h16 ":" ) h16 ] "::"
+ "(?:(?:%(hex)s:){0,6}%(hex)s)?::",
+]
+
+UNRESERVED_PAT = r"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._!\-~"
+IPV6_PAT = "(?:" + "|".join([x % _subs for x in _variations]) + ")"
+ZONE_ID_PAT = "(?:%25|%)(?:[" + UNRESERVED_PAT + "]|%[a-fA-F0-9]{2})+"
+IPV6_ADDRZ_PAT = r"\[" + IPV6_PAT + r"(?:" + ZONE_ID_PAT + r")?\]"
+REG_NAME_PAT = r"(?:[^\[\]%:/?#]|%[a-fA-F0-9]{2})*"
+TARGET_RE = re.compile(r"^(/[^?#]*)(?:\?([^#]*))?(?:#.*)?$")
+
+IPV4_RE = re.compile("^" + IPV4_PAT + "$")
+IPV6_RE = re.compile("^" + IPV6_PAT + "$")
+IPV6_ADDRZ_RE = re.compile("^" + IPV6_ADDRZ_PAT + "$")
+BRACELESS_IPV6_ADDRZ_RE = re.compile("^" + IPV6_ADDRZ_PAT[2:-2] + "$")
+ZONE_ID_RE = re.compile("(" + ZONE_ID_PAT + r")\]$")
+
+_HOST_PORT_PAT = ("^(%s|%s|%s)(?::([0-9]{0,5}))?$") % (
+ REG_NAME_PAT,
+ IPV4_PAT,
+ IPV6_ADDRZ_PAT,
+)
+_HOST_PORT_RE = re.compile(_HOST_PORT_PAT, re.UNICODE | re.DOTALL)
+
+UNRESERVED_CHARS = set(
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._-~"
+)
+SUB_DELIM_CHARS = set("!$&'()*+,;=")
+USERINFO_CHARS = UNRESERVED_CHARS | SUB_DELIM_CHARS | {":"}
+PATH_CHARS = USERINFO_CHARS | {"@", "/"}
+QUERY_CHARS = FRAGMENT_CHARS = PATH_CHARS | {"?"}
+
+
+class Url(namedtuple("Url", url_attrs)):
"""
- Data structure for representing an HTTP URL. Used as a return value for
+ Data structure for representing an HTTP URL. Used as a return value for
:func:`parse_url`. Both the scheme and host are normalized as they are
both case-insensitive according to RFC 3986.
"""
-
+
__slots__ = ()
- def __new__(
- cls,
- scheme=None,
- auth=None,
- host=None,
- port=None,
- path=None,
- query=None,
- fragment=None,
- ):
- if path and not path.startswith("/"):
- path = "/" + path
- if scheme is not None:
+ def __new__(
+ cls,
+ scheme=None,
+ auth=None,
+ host=None,
+ port=None,
+ path=None,
+ query=None,
+ fragment=None,
+ ):
+ if path and not path.startswith("/"):
+ path = "/" + path
+ if scheme is not None:
scheme = scheme.lower()
- return super(Url, cls).__new__(
- cls, scheme, auth, host, port, path, query, fragment
- )
+ return super(Url, cls).__new__(
+ cls, scheme, auth, host, port, path, query, fragment
+ )
@property
def hostname(self):
@@ -114,10 +114,10 @@ class Url(namedtuple("Url", url_attrs)):
@property
def request_uri(self):
"""Absolute path including the query string."""
- uri = self.path or "/"
+ uri = self.path or "/"
if self.query is not None:
- uri += "?" + self.query
+ uri += "?" + self.query
return uri
@@ -125,7 +125,7 @@ class Url(namedtuple("Url", url_attrs)):
def netloc(self):
"""Network location including host and port"""
if self.port:
- return "%s:%d" % (self.host, self.port)
+ return "%s:%d" % (self.host, self.port)
return self.host
@property
@@ -148,23 +148,23 @@ class Url(namedtuple("Url", url_attrs)):
'http://username:password@host.com:80/path?query#fragment'
"""
scheme, auth, host, port, path, query, fragment = self
- url = u""
+ url = u""
# We use "is not None" we want things to happen with empty strings (or 0 port)
if scheme is not None:
- url += scheme + u"://"
+ url += scheme + u"://"
if auth is not None:
- url += auth + u"@"
+ url += auth + u"@"
if host is not None:
url += host
if port is not None:
- url += u":" + str(port)
+ url += u":" + str(port)
if path is not None:
url += path
if query is not None:
- url += u"?" + query
+ url += u"?" + query
if fragment is not None:
- url += u"#" + fragment
+ url += u"#" + fragment
return url
@@ -174,8 +174,8 @@ class Url(namedtuple("Url", url_attrs)):
def split_first(s, delims):
"""
- .. deprecated:: 1.25
-
+ .. deprecated:: 1.25
+
Given a string and an iterable of delimiters, split on the first found
delimiter. Return two split parts and the matched delimiter.
@@ -202,142 +202,142 @@ def split_first(s, delims):
min_delim = d
if min_idx is None or min_idx < 0:
- return s, "", None
-
- return s[:min_idx], s[min_idx + 1 :], min_delim
-
-
-def _encode_invalid_chars(component, allowed_chars, encoding="utf-8"):
- """Percent-encodes a URI component without reapplying
- onto an already percent-encoded component.
- """
- if component is None:
- return component
-
- component = six.ensure_text(component)
-
- # Normalize existing percent-encoded bytes.
- # Try to see if the component we're encoding is already percent-encoded
- # so we can skip all '%' characters but still encode all others.
- component, percent_encodings = PERCENT_RE.subn(
- lambda match: match.group(0).upper(), component
- )
-
- uri_bytes = component.encode("utf-8", "surrogatepass")
- is_percent_encoded = percent_encodings == uri_bytes.count(b"%")
- encoded_component = bytearray()
-
- for i in range(0, len(uri_bytes)):
- # Will return a single character bytestring on both Python 2 & 3
- byte = uri_bytes[i : i + 1]
- byte_ord = ord(byte)
- if (is_percent_encoded and byte == b"%") or (
- byte_ord < 128 and byte.decode() in allowed_chars
- ):
- encoded_component += byte
- continue
- encoded_component.extend(b"%" + (hex(byte_ord)[2:].encode().zfill(2).upper()))
-
- return encoded_component.decode(encoding)
-
-
-def _remove_path_dot_segments(path):
- # See http://tools.ietf.org/html/rfc3986#section-5.2.4 for pseudo-code
- segments = path.split("/") # Turn the path into a list of segments
- output = [] # Initialize the variable to use to store output
-
- for segment in segments:
- # '.' is the current directory, so ignore it, it is superfluous
- if segment == ".":
- continue
- # Anything other than '..', should be appended to the output
- elif segment != "..":
- output.append(segment)
- # In this case segment == '..', if we can, we should pop the last
- # element
- elif output:
- output.pop()
-
- # If the path starts with '/' and the output is empty or the first string
- # is non-empty
- if path.startswith("/") and (not output or output[0]):
- output.insert(0, "")
-
- # If the path starts with '/.' or '/..' ensure we add one more empty
- # string to add a trailing '/'
- if path.endswith(("/.", "/..")):
- output.append("")
-
- return "/".join(output)
-
-
-def _normalize_host(host, scheme):
- if host:
- if isinstance(host, six.binary_type):
- host = six.ensure_str(host)
-
- if scheme in NORMALIZABLE_SCHEMES:
- is_ipv6 = IPV6_ADDRZ_RE.match(host)
- if is_ipv6:
- match = ZONE_ID_RE.search(host)
- if match:
- start, end = match.span(1)
- zone_id = host[start:end]
-
- if zone_id.startswith("%25") and zone_id != "%25":
- zone_id = zone_id[3:]
- else:
- zone_id = zone_id[1:]
- zone_id = "%" + _encode_invalid_chars(zone_id, UNRESERVED_CHARS)
- return host[:start].lower() + zone_id + host[end:]
- else:
- return host.lower()
- elif not IPV4_RE.match(host):
- return six.ensure_str(
- b".".join([_idna_encode(label) for label in host.split(".")])
- )
- return host
-
-
-def _idna_encode(name):
- if name and any([ord(x) > 128 for x in name]):
- try:
- import idna
- except ImportError:
- six.raise_from(
- LocationParseError("Unable to parse URL without the 'idna' module"),
- None,
- )
- try:
- return idna.encode(name.lower(), strict=True, std3_rules=True)
- except idna.IDNAError:
- six.raise_from(
- LocationParseError(u"Name '%s' is not a valid IDNA label" % name), None
- )
- return name.lower().encode("ascii")
-
-
-def _encode_target(target):
- """Percent-encodes a request target so that there are no invalid characters"""
- path, query = TARGET_RE.match(target).groups()
- target = _encode_invalid_chars(path, PATH_CHARS)
- query = _encode_invalid_chars(query, QUERY_CHARS)
- if query is not None:
- target += "?" + query
- return target
-
-
+ return s, "", None
+
+ return s[:min_idx], s[min_idx + 1 :], min_delim
+
+
+def _encode_invalid_chars(component, allowed_chars, encoding="utf-8"):
+ """Percent-encodes a URI component without reapplying
+ onto an already percent-encoded component.
+ """
+ if component is None:
+ return component
+
+ component = six.ensure_text(component)
+
+ # Normalize existing percent-encoded bytes.
+ # Try to see if the component we're encoding is already percent-encoded
+ # so we can skip all '%' characters but still encode all others.
+ component, percent_encodings = PERCENT_RE.subn(
+ lambda match: match.group(0).upper(), component
+ )
+
+ uri_bytes = component.encode("utf-8", "surrogatepass")
+ is_percent_encoded = percent_encodings == uri_bytes.count(b"%")
+ encoded_component = bytearray()
+
+ for i in range(0, len(uri_bytes)):
+ # Will return a single character bytestring on both Python 2 & 3
+ byte = uri_bytes[i : i + 1]
+ byte_ord = ord(byte)
+ if (is_percent_encoded and byte == b"%") or (
+ byte_ord < 128 and byte.decode() in allowed_chars
+ ):
+ encoded_component += byte
+ continue
+ encoded_component.extend(b"%" + (hex(byte_ord)[2:].encode().zfill(2).upper()))
+
+ return encoded_component.decode(encoding)
+
+
+def _remove_path_dot_segments(path):
+ # See http://tools.ietf.org/html/rfc3986#section-5.2.4 for pseudo-code
+ segments = path.split("/") # Turn the path into a list of segments
+ output = [] # Initialize the variable to use to store output
+
+ for segment in segments:
+ # '.' is the current directory, so ignore it, it is superfluous
+ if segment == ".":
+ continue
+ # Anything other than '..', should be appended to the output
+ elif segment != "..":
+ output.append(segment)
+ # In this case segment == '..', if we can, we should pop the last
+ # element
+ elif output:
+ output.pop()
+
+ # If the path starts with '/' and the output is empty or the first string
+ # is non-empty
+ if path.startswith("/") and (not output or output[0]):
+ output.insert(0, "")
+
+ # If the path starts with '/.' or '/..' ensure we add one more empty
+ # string to add a trailing '/'
+ if path.endswith(("/.", "/..")):
+ output.append("")
+
+ return "/".join(output)
+
+
+def _normalize_host(host, scheme):
+ if host:
+ if isinstance(host, six.binary_type):
+ host = six.ensure_str(host)
+
+ if scheme in NORMALIZABLE_SCHEMES:
+ is_ipv6 = IPV6_ADDRZ_RE.match(host)
+ if is_ipv6:
+ match = ZONE_ID_RE.search(host)
+ if match:
+ start, end = match.span(1)
+ zone_id = host[start:end]
+
+ if zone_id.startswith("%25") and zone_id != "%25":
+ zone_id = zone_id[3:]
+ else:
+ zone_id = zone_id[1:]
+ zone_id = "%" + _encode_invalid_chars(zone_id, UNRESERVED_CHARS)
+ return host[:start].lower() + zone_id + host[end:]
+ else:
+ return host.lower()
+ elif not IPV4_RE.match(host):
+ return six.ensure_str(
+ b".".join([_idna_encode(label) for label in host.split(".")])
+ )
+ return host
+
+
+def _idna_encode(name):
+ if name and any([ord(x) > 128 for x in name]):
+ try:
+ import idna
+ except ImportError:
+ six.raise_from(
+ LocationParseError("Unable to parse URL without the 'idna' module"),
+ None,
+ )
+ try:
+ return idna.encode(name.lower(), strict=True, std3_rules=True)
+ except idna.IDNAError:
+ six.raise_from(
+ LocationParseError(u"Name '%s' is not a valid IDNA label" % name), None
+ )
+ return name.lower().encode("ascii")
+
+
+def _encode_target(target):
+ """Percent-encodes a request target so that there are no invalid characters"""
+ path, query = TARGET_RE.match(target).groups()
+ target = _encode_invalid_chars(path, PATH_CHARS)
+ query = _encode_invalid_chars(query, QUERY_CHARS)
+ if query is not None:
+ target += "?" + query
+ return target
+
+
def parse_url(url):
"""
Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is
performed to parse incomplete urls. Fields not provided will be None.
- This parser is RFC 3986 compliant.
-
- The parser logic and helper functions are based heavily on
- work done in the ``rfc3986`` module.
-
- :param str url: URL to parse into a :class:`.Url` namedtuple.
+ This parser is RFC 3986 compliant.
+ The parser logic and helper functions are based heavily on
+ work done in the ``rfc3986`` module.
+
+ :param str url: URL to parse into a :class:`.Url` namedtuple.
+
Partly backwards-compatible with :mod:`urlparse`.
Example::
@@ -353,75 +353,75 @@ def parse_url(url):
# Empty
return Url()
- source_url = url
- if not SCHEME_RE.search(url):
- url = "//" + url
-
- try:
- scheme, authority, path, query, fragment = URI_RE.match(url).groups()
- normalize_uri = scheme is None or scheme.lower() in NORMALIZABLE_SCHEMES
-
- if scheme:
- scheme = scheme.lower()
-
- if authority:
- auth, _, host_port = authority.rpartition("@")
- auth = auth or None
- host, port = _HOST_PORT_RE.match(host_port).groups()
- if auth and normalize_uri:
- auth = _encode_invalid_chars(auth, USERINFO_CHARS)
- if port == "":
- port = None
- else:
- auth, host, port = None, None, None
-
- if port is not None:
- port = int(port)
- if not (0 <= port <= 65535):
- raise LocationParseError(url)
-
- host = _normalize_host(host, scheme)
-
- if normalize_uri and path:
- path = _remove_path_dot_segments(path)
- path = _encode_invalid_chars(path, PATH_CHARS)
- if normalize_uri and query:
- query = _encode_invalid_chars(query, QUERY_CHARS)
- if normalize_uri and fragment:
- fragment = _encode_invalid_chars(fragment, FRAGMENT_CHARS)
-
- except (ValueError, AttributeError):
- return six.raise_from(LocationParseError(source_url), None)
-
- # For the sake of backwards compatibility we put empty
- # string values for path if there are any defined values
- # beyond the path in the URL.
- # TODO: Remove this when we break backwards compatibility.
- if not path:
- if query is not None or fragment is not None:
- path = ""
+ source_url = url
+ if not SCHEME_RE.search(url):
+ url = "//" + url
+
+ try:
+ scheme, authority, path, query, fragment = URI_RE.match(url).groups()
+ normalize_uri = scheme is None or scheme.lower() in NORMALIZABLE_SCHEMES
+
+ if scheme:
+ scheme = scheme.lower()
+
+ if authority:
+ auth, _, host_port = authority.rpartition("@")
+ auth = auth or None
+ host, port = _HOST_PORT_RE.match(host_port).groups()
+ if auth and normalize_uri:
+ auth = _encode_invalid_chars(auth, USERINFO_CHARS)
+ if port == "":
+ port = None
+ else:
+ auth, host, port = None, None, None
+
+ if port is not None:
+ port = int(port)
+ if not (0 <= port <= 65535):
+ raise LocationParseError(url)
+
+ host = _normalize_host(host, scheme)
+
+ if normalize_uri and path:
+ path = _remove_path_dot_segments(path)
+ path = _encode_invalid_chars(path, PATH_CHARS)
+ if normalize_uri and query:
+ query = _encode_invalid_chars(query, QUERY_CHARS)
+ if normalize_uri and fragment:
+ fragment = _encode_invalid_chars(fragment, FRAGMENT_CHARS)
+
+ except (ValueError, AttributeError):
+ return six.raise_from(LocationParseError(source_url), None)
+
+ # For the sake of backwards compatibility we put empty
+ # string values for path if there are any defined values
+ # beyond the path in the URL.
+ # TODO: Remove this when we break backwards compatibility.
+ if not path:
+ if query is not None or fragment is not None:
+ path = ""
else:
- path = None
+ path = None
- # Ensure that each part of the URL is a `str` for
- # backwards compatibility.
- if isinstance(url, six.text_type):
- ensure_func = six.ensure_text
- else:
- ensure_func = six.ensure_str
+ # Ensure that each part of the URL is a `str` for
+ # backwards compatibility.
+ if isinstance(url, six.text_type):
+ ensure_func = six.ensure_text
+ else:
+ ensure_func = six.ensure_str
- def ensure_type(x):
- return x if x is None else ensure_func(x)
+ def ensure_type(x):
+ return x if x is None else ensure_func(x)
- return Url(
- scheme=ensure_type(scheme),
- auth=ensure_type(auth),
- host=ensure_type(host),
- port=port,
- path=ensure_type(path),
- query=ensure_type(query),
- fragment=ensure_type(fragment),
- )
+ return Url(
+ scheme=ensure_type(scheme),
+ auth=ensure_type(auth),
+ host=ensure_type(host),
+ port=port,
+ path=ensure_type(path),
+ query=ensure_type(query),
+ fragment=ensure_type(fragment),
+ )
def get_host(url):
@@ -429,4 +429,4 @@ def get_host(url):
Deprecated. Use :func:`parse_url` instead.
"""
p = parse_url(url)
- return p.scheme or "http", p.hostname, p.port
+ return p.scheme or "http", p.hostname, p.port
diff --git a/contrib/python/urllib3/urllib3/util/wait.py b/contrib/python/urllib3/urllib3/util/wait.py
index c280646c7b..ffb8a998be 100644
--- a/contrib/python/urllib3/urllib3/util/wait.py
+++ b/contrib/python/urllib3/urllib3/util/wait.py
@@ -1,153 +1,153 @@
-import errno
-import select
-import sys
-from functools import partial
-
-try:
- from time import monotonic
-except ImportError:
- from time import time as monotonic
-
-__all__ = ["NoWayToWaitForSocketError", "wait_for_read", "wait_for_write"]
-
-
-class NoWayToWaitForSocketError(Exception):
- pass
-
-
-# How should we wait on sockets?
-#
-# There are two types of APIs you can use for waiting on sockets: the fancy
-# modern stateful APIs like epoll/kqueue, and the older stateless APIs like
-# select/poll. The stateful APIs are more efficient when you have a lots of
-# sockets to keep track of, because you can set them up once and then use them
-# lots of times. But we only ever want to wait on a single socket at a time
-# and don't want to keep track of state, so the stateless APIs are actually
-# more efficient. So we want to use select() or poll().
-#
-# Now, how do we choose between select() and poll()? On traditional Unixes,
-# select() has a strange calling convention that makes it slow, or fail
-# altogether, for high-numbered file descriptors. The point of poll() is to fix
-# that, so on Unixes, we prefer poll().
-#
-# On Windows, there is no poll() (or at least Python doesn't provide a wrapper
-# for it), but that's OK, because on Windows, select() doesn't have this
-# strange calling convention; plain select() works fine.
-#
-# So: on Windows we use select(), and everywhere else we use poll(). We also
-# fall back to select() in case poll() is somehow broken or missing.
-
-if sys.version_info >= (3, 5):
- # Modern Python, that retries syscalls by default
- def _retry_on_intr(fn, timeout):
- return fn(timeout)
-
-
-else:
- # Old and broken Pythons.
- def _retry_on_intr(fn, timeout):
- if timeout is None:
- deadline = float("inf")
+import errno
+import select
+import sys
+from functools import partial
+
+try:
+ from time import monotonic
+except ImportError:
+ from time import time as monotonic
+
+__all__ = ["NoWayToWaitForSocketError", "wait_for_read", "wait_for_write"]
+
+
+class NoWayToWaitForSocketError(Exception):
+ pass
+
+
+# How should we wait on sockets?
+#
+# There are two types of APIs you can use for waiting on sockets: the fancy
+# modern stateful APIs like epoll/kqueue, and the older stateless APIs like
+# select/poll. The stateful APIs are more efficient when you have a lots of
+# sockets to keep track of, because you can set them up once and then use them
+# lots of times. But we only ever want to wait on a single socket at a time
+# and don't want to keep track of state, so the stateless APIs are actually
+# more efficient. So we want to use select() or poll().
+#
+# Now, how do we choose between select() and poll()? On traditional Unixes,
+# select() has a strange calling convention that makes it slow, or fail
+# altogether, for high-numbered file descriptors. The point of poll() is to fix
+# that, so on Unixes, we prefer poll().
+#
+# On Windows, there is no poll() (or at least Python doesn't provide a wrapper
+# for it), but that's OK, because on Windows, select() doesn't have this
+# strange calling convention; plain select() works fine.
+#
+# So: on Windows we use select(), and everywhere else we use poll(). We also
+# fall back to select() in case poll() is somehow broken or missing.
+
+if sys.version_info >= (3, 5):
+ # Modern Python, that retries syscalls by default
+ def _retry_on_intr(fn, timeout):
+ return fn(timeout)
+
+
+else:
+ # Old and broken Pythons.
+ def _retry_on_intr(fn, timeout):
+ if timeout is None:
+ deadline = float("inf")
else:
- deadline = monotonic() + timeout
-
- while True:
- try:
- return fn(timeout)
- # OSError for 3 <= pyver < 3.5, select.error for pyver <= 2.7
- except (OSError, select.error) as e:
- # 'e.args[0]' incantation works for both OSError and select.error
- if e.args[0] != errno.EINTR:
- raise
- else:
- timeout = deadline - monotonic()
- if timeout < 0:
- timeout = 0
- if timeout == float("inf"):
- timeout = None
- continue
-
-
-def select_wait_for_socket(sock, read=False, write=False, timeout=None):
- if not read and not write:
- raise RuntimeError("must specify at least one of read=True, write=True")
- rcheck = []
- wcheck = []
- if read:
- rcheck.append(sock)
- if write:
- wcheck.append(sock)
- # When doing a non-blocking connect, most systems signal success by
- # marking the socket writable. Windows, though, signals success by marked
- # it as "exceptional". We paper over the difference by checking the write
- # sockets for both conditions. (The stdlib selectors module does the same
- # thing.)
- fn = partial(select.select, rcheck, wcheck, wcheck)
- rready, wready, xready = _retry_on_intr(fn, timeout)
- return bool(rready or wready or xready)
-
-
-def poll_wait_for_socket(sock, read=False, write=False, timeout=None):
- if not read and not write:
- raise RuntimeError("must specify at least one of read=True, write=True")
- mask = 0
- if read:
- mask |= select.POLLIN
- if write:
- mask |= select.POLLOUT
- poll_obj = select.poll()
- poll_obj.register(sock, mask)
-
- # For some reason, poll() takes timeout in milliseconds
- def do_poll(t):
- if t is not None:
- t *= 1000
- return poll_obj.poll(t)
-
- return bool(_retry_on_intr(do_poll, timeout))
-
-
-def null_wait_for_socket(*args, **kwargs):
- raise NoWayToWaitForSocketError("no select-equivalent available")
-
-
-def _have_working_poll():
- # Apparently some systems have a select.poll that fails as soon as you try
- # to use it, either due to strange configuration or broken monkeypatching
- # from libraries like eventlet/greenlet.
- try:
- poll_obj = select.poll()
- _retry_on_intr(poll_obj.poll, 0)
- except (AttributeError, OSError):
- return False
- else:
- return True
-
-
-def wait_for_socket(*args, **kwargs):
- # We delay choosing which implementation to use until the first time we're
- # called. We could do it at import time, but then we might make the wrong
- # decision if someone goes wild with monkeypatching select.poll after
- # we're imported.
- global wait_for_socket
- if _have_working_poll():
- wait_for_socket = poll_wait_for_socket
- elif hasattr(select, "select"):
- wait_for_socket = select_wait_for_socket
- else: # Platform-specific: Appengine.
- wait_for_socket = null_wait_for_socket
- return wait_for_socket(*args, **kwargs)
-
-
-def wait_for_read(sock, timeout=None):
- """Waits for reading to be available on a given socket.
- Returns True if the socket is readable, or False if the timeout expired.
- """
- return wait_for_socket(sock, read=True, timeout=timeout)
-
-
-def wait_for_write(sock, timeout=None):
- """Waits for writing to be available on a given socket.
- Returns True if the socket is readable, or False if the timeout expired.
- """
- return wait_for_socket(sock, write=True, timeout=timeout)
+ deadline = monotonic() + timeout
+
+ while True:
+ try:
+ return fn(timeout)
+ # OSError for 3 <= pyver < 3.5, select.error for pyver <= 2.7
+ except (OSError, select.error) as e:
+ # 'e.args[0]' incantation works for both OSError and select.error
+ if e.args[0] != errno.EINTR:
+ raise
+ else:
+ timeout = deadline - monotonic()
+ if timeout < 0:
+ timeout = 0
+ if timeout == float("inf"):
+ timeout = None
+ continue
+
+
+def select_wait_for_socket(sock, read=False, write=False, timeout=None):
+ if not read and not write:
+ raise RuntimeError("must specify at least one of read=True, write=True")
+ rcheck = []
+ wcheck = []
+ if read:
+ rcheck.append(sock)
+ if write:
+ wcheck.append(sock)
+ # When doing a non-blocking connect, most systems signal success by
+ # marking the socket writable. Windows, though, signals success by marked
+ # it as "exceptional". We paper over the difference by checking the write
+ # sockets for both conditions. (The stdlib selectors module does the same
+ # thing.)
+ fn = partial(select.select, rcheck, wcheck, wcheck)
+ rready, wready, xready = _retry_on_intr(fn, timeout)
+ return bool(rready or wready or xready)
+
+
+def poll_wait_for_socket(sock, read=False, write=False, timeout=None):
+ if not read and not write:
+ raise RuntimeError("must specify at least one of read=True, write=True")
+ mask = 0
+ if read:
+ mask |= select.POLLIN
+ if write:
+ mask |= select.POLLOUT
+ poll_obj = select.poll()
+ poll_obj.register(sock, mask)
+
+ # For some reason, poll() takes timeout in milliseconds
+ def do_poll(t):
+ if t is not None:
+ t *= 1000
+ return poll_obj.poll(t)
+
+ return bool(_retry_on_intr(do_poll, timeout))
+
+
+def null_wait_for_socket(*args, **kwargs):
+ raise NoWayToWaitForSocketError("no select-equivalent available")
+
+
+def _have_working_poll():
+ # Apparently some systems have a select.poll that fails as soon as you try
+ # to use it, either due to strange configuration or broken monkeypatching
+ # from libraries like eventlet/greenlet.
+ try:
+ poll_obj = select.poll()
+ _retry_on_intr(poll_obj.poll, 0)
+ except (AttributeError, OSError):
+ return False
+ else:
+ return True
+
+
+def wait_for_socket(*args, **kwargs):
+ # We delay choosing which implementation to use until the first time we're
+ # called. We could do it at import time, but then we might make the wrong
+ # decision if someone goes wild with monkeypatching select.poll after
+ # we're imported.
+ global wait_for_socket
+ if _have_working_poll():
+ wait_for_socket = poll_wait_for_socket
+ elif hasattr(select, "select"):
+ wait_for_socket = select_wait_for_socket
+ else: # Platform-specific: Appengine.
+ wait_for_socket = null_wait_for_socket
+ return wait_for_socket(*args, **kwargs)
+
+
+def wait_for_read(sock, timeout=None):
+ """Waits for reading to be available on a given socket.
+ Returns True if the socket is readable, or False if the timeout expired.
+ """
+ return wait_for_socket(sock, read=True, timeout=timeout)
+
+
+def wait_for_write(sock, timeout=None):
+ """Waits for writing to be available on a given socket.
+ Returns True if the socket is readable, or False if the timeout expired.
+ """
+ return wait_for_socket(sock, write=True, timeout=timeout)
diff --git a/contrib/python/urllib3/ya.make b/contrib/python/urllib3/ya.make
index e6844d1bb7..8dcd3c6a6b 100644
--- a/contrib/python/urllib3/ya.make
+++ b/contrib/python/urllib3/ya.make
@@ -1,66 +1,66 @@
-# Generated by devtools/yamaker (pypi).
-
+# Generated by devtools/yamaker (pypi).
+
PY23_LIBRARY()
OWNER(g:python-contrib)
-VERSION(1.26.8)
-
-LICENSE(MIT)
+VERSION(1.26.8)
+LICENSE(MIT)
+
NO_LINT()
-NO_CHECK_IMPORTS(
- urllib3.contrib.*
- urllib3.packages.backports.*
+NO_CHECK_IMPORTS(
+ urllib3.contrib.*
+ urllib3.packages.backports.*
)
PY_SRCS(
TOP_LEVEL
urllib3/__init__.py
urllib3/_collections.py
- urllib3/_version.py
+ urllib3/_version.py
urllib3/connection.py
urllib3/connectionpool.py
- urllib3/contrib/__init__.py
- urllib3/contrib/_appengine_environ.py
- #urllib3/contrib/_securetransport/__init__.py
- #urllib3/contrib/_securetransport/bindings.py
- #urllib3/contrib/_securetransport/low_level.py
+ urllib3/contrib/__init__.py
+ urllib3/contrib/_appengine_environ.py
+ #urllib3/contrib/_securetransport/__init__.py
+ #urllib3/contrib/_securetransport/bindings.py
+ #urllib3/contrib/_securetransport/low_level.py
urllib3/contrib/appengine.py
- #urllib3/contrib/ntlmpool.py
- #urllib3/contrib/pyopenssl.py
- #urllib3/contrib/securetransport.py
- urllib3/contrib/socks.py
+ #urllib3/contrib/ntlmpool.py
+ #urllib3/contrib/pyopenssl.py
+ #urllib3/contrib/securetransport.py
+ urllib3/contrib/socks.py
urllib3/exceptions.py
urllib3/fields.py
urllib3/filepost.py
urllib3/packages/__init__.py
- #urllib3/packages/backports/__init__.py
- #urllib3/packages/backports/makefile.py
+ #urllib3/packages/backports/__init__.py
+ #urllib3/packages/backports/makefile.py
urllib3/packages/six.py
urllib3/poolmanager.py
urllib3/request.py
urllib3/response.py
urllib3/util/__init__.py
urllib3/util/connection.py
- urllib3/util/proxy.py
- urllib3/util/queue.py
+ urllib3/util/proxy.py
+ urllib3/util/queue.py
urllib3/util/request.py
urllib3/util/response.py
urllib3/util/retry.py
urllib3/util/ssl_.py
- urllib3/util/ssl_match_hostname.py
- urllib3/util/ssltransport.py
+ urllib3/util/ssl_match_hostname.py
+ urllib3/util/ssltransport.py
urllib3/util/timeout.py
urllib3/util/url.py
urllib3/util/wait.py
)
-RESOURCE_FILES(
- PREFIX contrib/python/urllib3/
- .dist-info/METADATA
- .dist-info/top_level.txt
-)
-
+RESOURCE_FILES(
+ PREFIX contrib/python/urllib3/
+ .dist-info/METADATA
+ .dist-info/top_level.txt
+)
+
END()