diff options
author | orivej <orivej@yandex-team.ru> | 2022-02-10 16:44:49 +0300 |
---|---|---|
committer | Daniil Cherednik <dcherednik@yandex-team.ru> | 2022-02-10 16:44:49 +0300 |
commit | 718c552901d703c502ccbefdfc3c9028d608b947 (patch) | |
tree | 46534a98bbefcd7b1f3faa5b52c138ab27db75b7 /contrib/python/requests | |
parent | e9656aae26e0358d5378e5b63dcac5c8dbe0e4d0 (diff) | |
download | ydb-718c552901d703c502ccbefdfc3c9028d608b947.tar.gz |
Restoring authorship annotation for <orivej@yandex-team.ru>. Commit 1 of 2.
Diffstat (limited to 'contrib/python/requests')
21 files changed, 1462 insertions, 1462 deletions
diff --git a/contrib/python/requests/.dist-info/METADATA b/contrib/python/requests/.dist-info/METADATA index 6363b2098e..8ae4bd6bef 100644 --- a/contrib/python/requests/.dist-info/METADATA +++ b/contrib/python/requests/.dist-info/METADATA @@ -1,31 +1,31 @@ Metadata-Version: 2.1 -Name: requests +Name: requests Version: 2.27.1 -Summary: Python HTTP for Humans. +Summary: Python HTTP for Humans. Home-page: https://requests.readthedocs.io -Author: Kenneth Reitz -Author-email: me@kennethreitz.org -License: Apache 2.0 +Author: Kenneth Reitz +Author-email: me@kennethreitz.org +License: Apache 2.0 Project-URL: Documentation, https://requests.readthedocs.io Project-URL: Source, https://github.com/psf/requests -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable Classifier: Environment :: Web Environment -Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Developers Classifier: License :: OSI Approved :: Apache Software License -Classifier: Natural Language :: English +Classifier: Natural Language :: English Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python +Classifier: Programming Language :: Python Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 Classifier: Programming Language :: Python :: 3.7 Classifier: Programming Language :: Python :: 3.8 Classifier: Programming Language :: Python :: 3.9 Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy Classifier: Topic :: Internet :: WWW/HTTP Classifier: Topic :: Software Development :: Libraries Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.* @@ -36,17 +36,17 @@ Requires-Dist: chardet (<5,>=3.0.2) ; python_version < "3" Requires-Dist: idna (<3,>=2.5) ; python_version < "3" Requires-Dist: charset-normalizer (~=2.0.0) ; python_version >= "3" Requires-Dist: idna (<4,>=2.5) ; python_version >= "3" -Provides-Extra: security -Provides-Extra: socks +Provides-Extra: security +Provides-Extra: socks Requires-Dist: PySocks (!=1.5.7,>=1.5.6) ; extra == 'socks' Requires-Dist: win-inet-pton ; (sys_platform == "win32" and python_version == "2.7") and extra == 'socks' Provides-Extra: use_chardet_on_py3 Requires-Dist: chardet (<5,>=3.0.2) ; extra == 'use_chardet_on_py3' - + # Requests - + **Requests** is a simple, yet elegant, HTTP library. - + ```python >>> import requests >>> r = requests.get('https://httpbin.org/basic-auth/user/pass', auth=('user', 'pass')) @@ -61,29 +61,29 @@ Requires-Dist: chardet (<5,>=3.0.2) ; extra == 'use_chardet_on_py3' >>> r.json() {'authenticated': True, ...} ``` - + Requests allows you to send HTTP/1.1 requests extremely easily. There’s no need to manually add query strings to your URLs, or to form-encode your `PUT` & `POST` data — but nowadays, just use the `json` method! - + Requests is one of the most downloaded Python packages today, pulling in around `30M downloads / week`— according to GitHub, Requests is currently [depended upon](https://github.com/psf/requests/network/dependents?package_id=UGFja2FnZS01NzA4OTExNg%3D%3D) by `1,000,000+` repositories. You may certainly put your trust in this code. - + [](https://pepy.tech/project/requests) [](https://pypi.org/project/requests) [](https://github.com/psf/requests/graphs/contributors) - + ## Installing Requests and Supported Versions - + Requests is available on PyPI: ```console $ python -m pip install requests ``` - + Requests officially supports Python 2.7 & 3.6+. - + ## Supported Features & Best–Practices - + Requests is ready for the demands of building robust and reliable HTTP–speaking applications, for the needs of today. - + - Keep-Alive & Connection Pooling - International Domains and URLs - Sessions with Cookie Persistence @@ -99,9 +99,9 @@ Requests is ready for the demands of building robust and reliable HTTP–speakin - Chunked HTTP Requests ## API Reference and User Guide available on [Read the Docs](https://requests.readthedocs.io) - + [](https://requests.readthedocs.io) - + ## Cloning the repository When cloning the Requests repository, you may need to add the `-c @@ -119,7 +119,7 @@ git config --global fetch.fsck.badTimezone ignore ``` --- - + [](https://kennethreitz.org) [](https://www.python.org/psf) - + diff --git a/contrib/python/requests/.dist-info/top_level.txt b/contrib/python/requests/.dist-info/top_level.txt index f2293605cf..a80aae1f06 100644 --- a/contrib/python/requests/.dist-info/top_level.txt +++ b/contrib/python/requests/.dist-info/top_level.txt @@ -1 +1 @@ -requests +requests diff --git a/contrib/python/requests/requests/__init__.py b/contrib/python/requests/requests/__init__.py index 53a5b42af6..fcf7adebc3 100644 --- a/contrib/python/requests/requests/__init__.py +++ b/contrib/python/requests/requests/__init__.py @@ -6,7 +6,7 @@ # / """ -Requests HTTP Library +Requests HTTP Library ~~~~~~~~~~~~~~~~~~~~~ Requests is an HTTP library, written in Python, for human beings. @@ -36,40 +36,40 @@ Basic GET usage: The other HTTP methods are supported - see `requests.api`. Full documentation is at <https://requests.readthedocs.io>. -:copyright: (c) 2017 by Kenneth Reitz. +:copyright: (c) 2017 by Kenneth Reitz. :license: Apache 2.0, see LICENSE for more details. """ -import urllib3 -import warnings -from .exceptions import RequestsDependencyWarning +import urllib3 +import warnings +from .exceptions import RequestsDependencyWarning try: from charset_normalizer import __version__ as charset_normalizer_version except ImportError: charset_normalizer_version = None - + try: from chardet import __version__ as chardet_version except ImportError: chardet_version = None def check_compatibility(urllib3_version, chardet_version, charset_normalizer_version): - urllib3_version = urllib3_version.split('.') - assert urllib3_version != ['dev'] # Verify urllib3 isn't installed from git. - - # Sometimes, urllib3 only reports its version as 16.1. - if len(urllib3_version) == 2: - urllib3_version.append('0') - - # Check urllib3 for compatibility. - major, minor, patch = urllib3_version # noqa: F811 - major, minor, patch = int(major), int(minor), int(patch) + urllib3_version = urllib3_version.split('.') + assert urllib3_version != ['dev'] # Verify urllib3 isn't installed from git. + + # Sometimes, urllib3 only reports its version as 16.1. + if len(urllib3_version) == 2: + urllib3_version.append('0') + + # Check urllib3 for compatibility. + major, minor, patch = urllib3_version # noqa: F811 + major, minor, patch = int(major), int(minor), int(patch) # urllib3 >= 1.21.1, <= 1.26 - assert major == 1 - assert minor >= 21 + assert major == 1 + assert minor >= 21 assert minor <= 26 - + # Check charset_normalizer for compatibility. if chardet_version: major, minor, patch = chardet_version.split('.')[:3] @@ -83,7 +83,7 @@ def check_compatibility(urllib3_version, chardet_version, charset_normalizer_ver assert (2, 0, 0) <= (major, minor, patch) < (3, 0, 0) else: raise Exception("You need either charset_normalizer or chardet installed") - + def _check_cryptography(cryptography_version): # cryptography < 1.3.4 try: @@ -95,14 +95,14 @@ def _check_cryptography(cryptography_version): warning = 'Old version of cryptography ({}) may cause slowdown.'.format(cryptography_version) warnings.warn(warning, RequestsDependencyWarning) -# Check imported dependencies for compatibility. -try: +# Check imported dependencies for compatibility. +try: check_compatibility(urllib3.__version__, chardet_version, charset_normalizer_version) -except (AssertionError, ValueError): +except (AssertionError, ValueError): warnings.warn("urllib3 ({}) or chardet ({})/charset_normalizer ({}) doesn't match a supported " "version!".format(urllib3.__version__, chardet_version, charset_normalizer_version), - RequestsDependencyWarning) - + RequestsDependencyWarning) + # Attempt to enable urllib3's fallback for SNI support # if the standard library doesn't support SNI or the # 'ssl' library isn't available. @@ -122,23 +122,23 @@ try: except ImportError: pass -# urllib3's DependencyWarnings should be silenced. -from urllib3.exceptions import DependencyWarning -warnings.simplefilter('ignore', DependencyWarning) - -from .__version__ import __title__, __description__, __url__, __version__ -from .__version__ import __build__, __author__, __author_email__, __license__ -from .__version__ import __copyright__, __cake__ - +# urllib3's DependencyWarnings should be silenced. +from urllib3.exceptions import DependencyWarning +warnings.simplefilter('ignore', DependencyWarning) + +from .__version__ import __title__, __description__, __url__, __version__ +from .__version__ import __build__, __author__, __author_email__, __license__ +from .__version__ import __copyright__, __cake__ + from . import utils -from . import packages +from . import packages from .models import Request, Response, PreparedRequest from .api import request, get, head, post, patch, put, delete, options from .sessions import session, Session from .status_codes import codes from .exceptions import ( RequestException, Timeout, URLRequired, - TooManyRedirects, HTTPError, ConnectionError, + TooManyRedirects, HTTPError, ConnectionError, FileModeWarning, ConnectTimeout, ReadTimeout, JSONDecodeError ) @@ -147,6 +147,6 @@ import logging from logging import NullHandler logging.getLogger(__name__).addHandler(NullHandler()) - -# FileModeWarnings go off per the default. -warnings.simplefilter('default', FileModeWarning, append=True) + +# FileModeWarnings go off per the default. +warnings.simplefilter('default', FileModeWarning, append=True) diff --git a/contrib/python/requests/requests/__version__.py b/contrib/python/requests/requests/__version__.py index e973b03b5f..892594f71a 100644 --- a/contrib/python/requests/requests/__version__.py +++ b/contrib/python/requests/requests/__version__.py @@ -1,14 +1,14 @@ -# .-. .-. .-. . . .-. .-. .-. .-. -# |( |- |.| | | |- `-. | `-. -# ' ' `-' `-`.`-' `-' `-' ' `-' - -__title__ = 'requests' -__description__ = 'Python HTTP for Humans.' +# .-. .-. .-. . . .-. .-. .-. .-. +# |( |- |.| | | |- `-. | `-. +# ' ' `-' `-`.`-' `-' `-' ' `-' + +__title__ = 'requests' +__description__ = 'Python HTTP for Humans.' __url__ = 'https://requests.readthedocs.io' __version__ = '2.27.1' __build__ = 0x022701 -__author__ = 'Kenneth Reitz' -__author_email__ = 'me@kennethreitz.org' -__license__ = 'Apache 2.0' +__author__ = 'Kenneth Reitz' +__author_email__ = 'me@kennethreitz.org' +__license__ = 'Apache 2.0' __copyright__ = 'Copyright 2022 Kenneth Reitz' -__cake__ = u'\u2728 \U0001f370 \u2728' +__cake__ = u'\u2728 \U0001f370 \u2728' diff --git a/contrib/python/requests/requests/_internal_utils.py b/contrib/python/requests/requests/_internal_utils.py index 759d9a56ba..5ed7e225ef 100644 --- a/contrib/python/requests/requests/_internal_utils.py +++ b/contrib/python/requests/requests/_internal_utils.py @@ -1,42 +1,42 @@ -# -*- coding: utf-8 -*- - -""" -requests._internal_utils -~~~~~~~~~~~~~~ - -Provides utility functions that are consumed internally by Requests -which depend on extremely few external helpers (such as compat) -""" - -from .compat import is_py2, builtin_str, str - - -def to_native_string(string, encoding='ascii'): - """Given a string object, regardless of type, returns a representation of - that string in the native string type, encoding and decoding where - necessary. This assumes ASCII unless told otherwise. - """ - if isinstance(string, builtin_str): - out = string - else: - if is_py2: - out = string.encode(encoding) - else: - out = string.decode(encoding) - - return out - - -def unicode_is_ascii(u_string): - """Determine if unicode string only contains ASCII characters. - - :param str u_string: unicode string to check. Must be unicode - and not Python 2 `str`. - :rtype: bool - """ - assert isinstance(u_string, str) - try: - u_string.encode('ascii') - return True - except UnicodeEncodeError: - return False +# -*- coding: utf-8 -*- + +""" +requests._internal_utils +~~~~~~~~~~~~~~ + +Provides utility functions that are consumed internally by Requests +which depend on extremely few external helpers (such as compat) +""" + +from .compat import is_py2, builtin_str, str + + +def to_native_string(string, encoding='ascii'): + """Given a string object, regardless of type, returns a representation of + that string in the native string type, encoding and decoding where + necessary. This assumes ASCII unless told otherwise. + """ + if isinstance(string, builtin_str): + out = string + else: + if is_py2: + out = string.encode(encoding) + else: + out = string.decode(encoding) + + return out + + +def unicode_is_ascii(u_string): + """Determine if unicode string only contains ASCII characters. + + :param str u_string: unicode string to check. Must be unicode + and not Python 2 `str`. + :rtype: bool + """ + assert isinstance(u_string, str) + try: + u_string.encode('ascii') + return True + except UnicodeEncodeError: + return False diff --git a/contrib/python/requests/requests/adapters.py b/contrib/python/requests/requests/adapters.py index 3a8463b7db..085f2459c8 100644 --- a/contrib/python/requests/requests/adapters.py +++ b/contrib/python/requests/requests/adapters.py @@ -8,27 +8,27 @@ This module contains the transport adapters that Requests uses to define and maintain connections. """ -import os.path +import os.path import socket -from urllib3.poolmanager import PoolManager, proxy_from_url -from urllib3.response import HTTPResponse +from urllib3.poolmanager import PoolManager, proxy_from_url +from urllib3.response import HTTPResponse from urllib3.util import parse_url -from urllib3.util import Timeout as TimeoutSauce -from urllib3.util.retry import Retry -from urllib3.exceptions import ClosedPoolError -from urllib3.exceptions import ConnectTimeoutError -from urllib3.exceptions import HTTPError as _HTTPError +from urllib3.util import Timeout as TimeoutSauce +from urllib3.util.retry import Retry +from urllib3.exceptions import ClosedPoolError +from urllib3.exceptions import ConnectTimeoutError +from urllib3.exceptions import HTTPError as _HTTPError from urllib3.exceptions import InvalidHeader as _InvalidHeader -from urllib3.exceptions import MaxRetryError -from urllib3.exceptions import NewConnectionError -from urllib3.exceptions import ProxyError as _ProxyError -from urllib3.exceptions import ProtocolError -from urllib3.exceptions import ReadTimeoutError -from urllib3.exceptions import SSLError as _SSLError -from urllib3.exceptions import ResponseError +from urllib3.exceptions import MaxRetryError +from urllib3.exceptions import NewConnectionError +from urllib3.exceptions import ProxyError as _ProxyError +from urllib3.exceptions import ProtocolError +from urllib3.exceptions import ReadTimeoutError +from urllib3.exceptions import SSLError as _SSLError +from urllib3.exceptions import ResponseError from urllib3.exceptions import LocationValueError - + from .models import Response from .compat import urlparse, basestring from .utils import (DEFAULT_CA_BUNDLE_PATH, extract_zipped_paths, @@ -41,16 +41,16 @@ from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError, InvalidURL, InvalidHeader) from .auth import _basic_auth_str -try: - from urllib3.contrib.socks import SOCKSProxyManager -except ImportError: - def SOCKSProxyManager(*args, **kwargs): - raise InvalidSchema("Missing dependencies for SOCKS support.") - +try: + from urllib3.contrib.socks import SOCKSProxyManager +except ImportError: + def SOCKSProxyManager(*args, **kwargs): + raise InvalidSchema("Missing dependencies for SOCKS support.") + DEFAULT_POOLBLOCK = False DEFAULT_POOLSIZE = 10 DEFAULT_RETRIES = 0 -DEFAULT_POOL_TIMEOUT = None +DEFAULT_POOL_TIMEOUT = None class BaseAdapter(object): @@ -59,26 +59,26 @@ class BaseAdapter(object): def __init__(self): super(BaseAdapter, self).__init__() - def send(self, request, stream=False, timeout=None, verify=True, - cert=None, proxies=None): - """Sends PreparedRequest object. Returns Response object. - - :param request: The :class:`PreparedRequest <PreparedRequest>` being sent. - :param stream: (optional) Whether to stream the request content. - :param timeout: (optional) How long to wait for the server to send - data before giving up, as a float, or a :ref:`(connect timeout, - read timeout) <timeouts>` tuple. - :type timeout: float or tuple - :param verify: (optional) Either a boolean, in which case it controls whether we verify - the server's TLS certificate, or a string, in which case it must be a path - to a CA bundle to use - :param cert: (optional) Any user-provided SSL certificate to be trusted. - :param proxies: (optional) The proxies dictionary to apply to the request. - """ + def send(self, request, stream=False, timeout=None, verify=True, + cert=None, proxies=None): + """Sends PreparedRequest object. Returns Response object. + + :param request: The :class:`PreparedRequest <PreparedRequest>` being sent. + :param stream: (optional) Whether to stream the request content. + :param timeout: (optional) How long to wait for the server to send + data before giving up, as a float, or a :ref:`(connect timeout, + read timeout) <timeouts>` tuple. + :type timeout: float or tuple + :param verify: (optional) Either a boolean, in which case it controls whether we verify + the server's TLS certificate, or a string, in which case it must be a path + to a CA bundle to use + :param cert: (optional) Any user-provided SSL certificate to be trusted. + :param proxies: (optional) The proxies dictionary to apply to the request. + """ raise NotImplementedError def close(self): - """Cleans up adapter specific items.""" + """Cleans up adapter specific items.""" raise NotImplementedError @@ -92,7 +92,7 @@ class HTTPAdapter(BaseAdapter): :param pool_connections: The number of urllib3 connection pools to cache. :param pool_maxsize: The maximum number of connections to save in the pool. - :param max_retries: The maximum number of retries each connection + :param max_retries: The maximum number of retries each connection should attempt. Note, this applies only to failed DNS lookups, socket connections and connection timeouts, never to requests where data has made it to the server. By default, Requests does not retry failed @@ -134,7 +134,7 @@ class HTTPAdapter(BaseAdapter): def __setstate__(self, state): # Can't handle by adding 'proxy_manager' to self.__attrs__ because - # self.poolmanager uses a lambda function, which isn't pickleable. + # self.poolmanager uses a lambda function, which isn't pickleable. self.proxy_manager = {} self.config = {} @@ -174,24 +174,24 @@ class HTTPAdapter(BaseAdapter): :param proxy: The proxy to return a urllib3 ProxyManager for. :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager. :returns: ProxyManager - :rtype: urllib3.ProxyManager + :rtype: urllib3.ProxyManager """ - if proxy in self.proxy_manager: - manager = self.proxy_manager[proxy] - elif proxy.lower().startswith('socks'): - username, password = get_auth_from_url(proxy) - manager = self.proxy_manager[proxy] = SOCKSProxyManager( - proxy, - username=username, - password=password, - num_pools=self._pool_connections, - maxsize=self._pool_maxsize, - block=self._pool_block, - **proxy_kwargs - ) - else: + if proxy in self.proxy_manager: + manager = self.proxy_manager[proxy] + elif proxy.lower().startswith('socks'): + username, password = get_auth_from_url(proxy) + manager = self.proxy_manager[proxy] = SOCKSProxyManager( + proxy, + username=username, + password=password, + num_pools=self._pool_connections, + maxsize=self._pool_maxsize, + block=self._pool_block, + **proxy_kwargs + ) + else: proxy_headers = self.proxy_headers(proxy) - manager = self.proxy_manager[proxy] = proxy_from_url( + manager = self.proxy_manager[proxy] = proxy_from_url( proxy, proxy_headers=proxy_headers, num_pools=self._pool_connections, @@ -199,7 +199,7 @@ class HTTPAdapter(BaseAdapter): block=self._pool_block, **proxy_kwargs) - return manager + return manager def cert_verify(self, conn, url, verify, cert): """Verify a SSL certificate. This method should not be called from user @@ -208,9 +208,9 @@ class HTTPAdapter(BaseAdapter): :param conn: The urllib3 connection object associated with the cert. :param url: The requested URL. - :param verify: Either a boolean, in which case it controls whether we verify - the server's TLS certificate, or a string, in which case it must be a path - to a CA bundle to use + :param verify: Either a boolean, in which case it controls whether we verify + the server's TLS certificate, or a string, in which case it must be a path + to a CA bundle to use :param cert: The SSL certificate to verify. """ if url.lower().startswith('https') and verify: @@ -224,20 +224,20 @@ class HTTPAdapter(BaseAdapter): if not cert_loc: cert_loc = extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH) - if not cert_loc or isinstance(cert_loc, basestring) and not os.path.exists(cert_loc): - raise IOError("Could not find a suitable TLS CA certificate bundle, " + if not cert_loc or isinstance(cert_loc, basestring) and not os.path.exists(cert_loc): + raise IOError("Could not find a suitable TLS CA certificate bundle, " "invalid path: {}".format(cert_loc)) conn.cert_reqs = 'CERT_REQUIRED' - - if not isinstance(cert_loc, basestring) or not os.path.isdir(cert_loc): - conn.ca_certs = cert_loc - else: - conn.ca_cert_dir = cert_loc + + if not isinstance(cert_loc, basestring) or not os.path.isdir(cert_loc): + conn.ca_certs = cert_loc + else: + conn.ca_cert_dir = cert_loc else: conn.cert_reqs = 'CERT_NONE' conn.ca_certs = None - conn.ca_cert_dir = None + conn.ca_cert_dir = None if cert: if not isinstance(cert, basestring): @@ -245,12 +245,12 @@ class HTTPAdapter(BaseAdapter): conn.key_file = cert[1] else: conn.cert_file = cert - conn.key_file = None - if conn.cert_file and not os.path.exists(conn.cert_file): - raise IOError("Could not find the TLS certificate file, " + conn.key_file = None + if conn.cert_file and not os.path.exists(conn.cert_file): + raise IOError("Could not find the TLS certificate file, " "invalid path: {}".format(conn.cert_file)) - if conn.key_file and not os.path.exists(conn.key_file): - raise IOError("Could not find the TLS key file, " + if conn.key_file and not os.path.exists(conn.key_file): + raise IOError("Could not find the TLS key file, " "invalid path: {}".format(conn.key_file)) def build_response(self, req, resp): @@ -261,7 +261,7 @@ class HTTPAdapter(BaseAdapter): :param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response. :param resp: The urllib3 response object. - :rtype: requests.Response + :rtype: requests.Response """ response = Response() @@ -297,9 +297,9 @@ class HTTPAdapter(BaseAdapter): :param url: The URL to connect to. :param proxies: (optional) A Requests-style dictionary of proxies used on this request. - :rtype: urllib3.ConnectionPool + :rtype: urllib3.ConnectionPool """ - proxy = select_proxy(url, proxies) + proxy = select_proxy(url, proxies) if proxy: proxy = prepend_scheme_if_needed(proxy, 'http') @@ -320,12 +320,12 @@ class HTTPAdapter(BaseAdapter): def close(self): """Disposes of any internal state. - Currently, this closes the PoolManager and any active ProxyManager, - which closes any pooled connections. + Currently, this closes the PoolManager and any active ProxyManager, + which closes any pooled connections. """ self.poolmanager.clear() - for proxy in self.proxy_manager.values(): - proxy.clear() + for proxy in self.proxy_manager.values(): + proxy.clear() def request_url(self, request, proxies): """Obtain the url to use when making the final request. @@ -338,20 +338,20 @@ class HTTPAdapter(BaseAdapter): :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. :param request: The :class:`PreparedRequest <PreparedRequest>` being sent. - :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs. - :rtype: str + :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs. + :rtype: str """ - proxy = select_proxy(request.url, proxies) + proxy = select_proxy(request.url, proxies) scheme = urlparse(request.url).scheme - is_proxied_http_request = (proxy and scheme != 'https') - using_socks_proxy = False - if proxy: - proxy_scheme = urlparse(proxy).scheme.lower() - using_socks_proxy = proxy_scheme.startswith('socks') - - url = request.path_url - if is_proxied_http_request and not using_socks_proxy: + is_proxied_http_request = (proxy and scheme != 'https') + using_socks_proxy = False + if proxy: + proxy_scheme = urlparse(proxy).scheme.lower() + using_socks_proxy = proxy_scheme.startswith('socks') + + url = request.path_url + if is_proxied_http_request and not using_socks_proxy: url = urldefragauth(request.url) return url @@ -381,12 +381,12 @@ class HTTPAdapter(BaseAdapter): :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. :param proxy: The url of the proxy being used for this request. - :rtype: dict + :rtype: dict """ headers = {} username, password = get_auth_from_url(proxy) - if username: + if username: headers['Proxy-Authorization'] = _basic_auth_str(username, password) @@ -398,15 +398,15 @@ class HTTPAdapter(BaseAdapter): :param request: The :class:`PreparedRequest <PreparedRequest>` being sent. :param stream: (optional) Whether to stream the request content. :param timeout: (optional) How long to wait for the server to send - data before giving up, as a float, or a :ref:`(connect timeout, - read timeout) <timeouts>` tuple. - :type timeout: float or tuple or urllib3 Timeout object - :param verify: (optional) Either a boolean, in which case it controls whether - we verify the server's TLS certificate, or a string, in which case it - must be a path to a CA bundle to use + data before giving up, as a float, or a :ref:`(connect timeout, + read timeout) <timeouts>` tuple. + :type timeout: float or tuple or urllib3 Timeout object + :param verify: (optional) Either a boolean, in which case it controls whether + we verify the server's TLS certificate, or a string, in which case it + must be a path to a CA bundle to use :param cert: (optional) Any user-provided SSL certificate to be trusted. :param proxies: (optional) The proxies dictionary to apply to the request. - :rtype: requests.Response + :rtype: requests.Response """ try: @@ -430,8 +430,8 @@ class HTTPAdapter(BaseAdapter): "timeout tuple, or a single float to set " "both timeouts to the same value".format(timeout)) raise ValueError(err) - elif isinstance(timeout, TimeoutSauce): - pass + elif isinstance(timeout, TimeoutSauce): + pass else: timeout = TimeoutSauce(connect=timeout, read=timeout) @@ -455,7 +455,7 @@ class HTTPAdapter(BaseAdapter): if hasattr(conn, 'proxy_pool'): conn = conn.proxy_pool - low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT) + low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT) try: skip_host = 'Host' in request.headers @@ -476,14 +476,14 @@ class HTTPAdapter(BaseAdapter): low_conn.send(b'\r\n') low_conn.send(b'0\r\n\r\n') - # Receive the response from the server - try: + # Receive the response from the server + try: # For Python 2.7, use buffering of HTTP responses - r = low_conn.getresponse(buffering=True) - except TypeError: + r = low_conn.getresponse(buffering=True) + except TypeError: # For compatibility with Python 3.3+ - r = low_conn.getresponse() - + r = low_conn.getresponse() + resp = HTTPResponse.from_httplib( r, pool=conn, @@ -502,31 +502,31 @@ class HTTPAdapter(BaseAdapter): except MaxRetryError as e: if isinstance(e.reason, ConnectTimeoutError): - # TODO: Remove this in 3.0.0: see #2811 - if not isinstance(e.reason, NewConnectionError): - raise ConnectTimeout(e, request=request) + # TODO: Remove this in 3.0.0: see #2811 + if not isinstance(e.reason, NewConnectionError): + raise ConnectTimeout(e, request=request) if isinstance(e.reason, ResponseError): raise RetryError(e, request=request) - if isinstance(e.reason, _ProxyError): - raise ProxyError(e, request=request) - - if isinstance(e.reason, _SSLError): - # This branch is for urllib3 v1.22 and later. - raise SSLError(e, request=request) - - raise ConnectionError(e, request=request) - - except ClosedPoolError as e: + if isinstance(e.reason, _ProxyError): + raise ProxyError(e, request=request) + + if isinstance(e.reason, _SSLError): + # This branch is for urllib3 v1.22 and later. + raise SSLError(e, request=request) + raise ConnectionError(e, request=request) + except ClosedPoolError as e: + raise ConnectionError(e, request=request) + except _ProxyError as e: raise ProxyError(e) except (_SSLError, _HTTPError) as e: if isinstance(e, _SSLError): - # This branch is for urllib3 versions earlier than v1.22 + # This branch is for urllib3 versions earlier than v1.22 raise SSLError(e, request=request) elif isinstance(e, ReadTimeoutError): raise ReadTimeout(e, request=request) diff --git a/contrib/python/requests/requests/api.py b/contrib/python/requests/requests/api.py index 4cba90eefe..d9b290e3e3 100644 --- a/contrib/python/requests/requests/api.py +++ b/contrib/python/requests/requests/api.py @@ -25,26 +25,26 @@ def request(method, url, **kwargs): :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. - :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload. - ``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')`` - or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string - defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers - to add for the file. + :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload. + ``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')`` + or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string + defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers + to add for the file. :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth. - :param timeout: (optional) How many seconds to wait for the server to send data - before giving up, as a float, or a :ref:`(connect timeout, read - timeout) <timeouts>` tuple. + :param timeout: (optional) How many seconds to wait for the server to send data + before giving up, as a float, or a :ref:`(connect timeout, read + timeout) <timeouts>` tuple. :type timeout: float or tuple - :param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to ``True``. + :param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to ``True``. :type allow_redirects: bool :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy. - :param verify: (optional) Either a boolean, in which case it controls whether we verify - the server's TLS certificate, or a string, in which case it must be a path - to a CA bundle to use. Defaults to ``True``. + :param verify: (optional) Either a boolean, in which case it controls whether we verify + the server's TLS certificate, or a string, in which case it must be a path + to a CA bundle to use. Defaults to ``True``. :param stream: (optional) if ``False``, the response content will be immediately downloaded. :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. - :return: :class:`Response <Response>` object - :rtype: requests.Response + :return: :class:`Response <Response>` object + :rtype: requests.Response Usage:: @@ -54,48 +54,48 @@ def request(method, url, **kwargs): <Response [200]> """ - # By using the 'with' statement we are sure the session is closed, thus we - # avoid leaving sockets open which can trigger a ResourceWarning in some - # cases, and look like a memory leak in others. - with sessions.Session() as session: - return session.request(method=method, url=url, **kwargs) + # By using the 'with' statement we are sure the session is closed, thus we + # avoid leaving sockets open which can trigger a ResourceWarning in some + # cases, and look like a memory leak in others. + with sessions.Session() as session: + return session.request(method=method, url=url, **kwargs) -def get(url, params=None, **kwargs): - r"""Sends a GET request. +def get(url, params=None, **kwargs): + r"""Sends a GET request. :param url: URL for the new :class:`Request` object. :param params: (optional) Dictionary, list of tuples or bytes to send in the query string for the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. - :return: :class:`Response <Response>` object - :rtype: requests.Response + :return: :class:`Response <Response>` object + :rtype: requests.Response """ - return request('get', url, params=params, **kwargs) + return request('get', url, params=params, **kwargs) def options(url, **kwargs): - r"""Sends an OPTIONS request. + r"""Sends an OPTIONS request. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. - :return: :class:`Response <Response>` object - :rtype: requests.Response + :return: :class:`Response <Response>` object + :rtype: requests.Response """ return request('options', url, **kwargs) def head(url, **kwargs): - r"""Sends a HEAD request. + r"""Sends a HEAD request. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. If `allow_redirects` is not provided, it will be set to `False` (as opposed to the default :meth:`request` behavior). - :return: :class:`Response <Response>` object - :rtype: requests.Response + :return: :class:`Response <Response>` object + :rtype: requests.Response """ kwargs.setdefault('allow_redirects', False) @@ -103,57 +103,57 @@ def head(url, **kwargs): def post(url, data=None, json=None, **kwargs): - r"""Sends a POST request. + r"""Sends a POST request. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, list of tuples, bytes, or file-like object to send in the body of the :class:`Request`. :param json: (optional) json data to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. - :return: :class:`Response <Response>` object - :rtype: requests.Response + :return: :class:`Response <Response>` object + :rtype: requests.Response """ return request('post', url, data=data, json=json, **kwargs) def put(url, data=None, **kwargs): - r"""Sends a PUT request. + r"""Sends a PUT request. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, list of tuples, bytes, or file-like object to send in the body of the :class:`Request`. - :param json: (optional) json data to send in the body of the :class:`Request`. + :param json: (optional) json data to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. - :return: :class:`Response <Response>` object - :rtype: requests.Response + :return: :class:`Response <Response>` object + :rtype: requests.Response """ return request('put', url, data=data, **kwargs) def patch(url, data=None, **kwargs): - r"""Sends a PATCH request. + r"""Sends a PATCH request. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, list of tuples, bytes, or file-like object to send in the body of the :class:`Request`. - :param json: (optional) json data to send in the body of the :class:`Request`. + :param json: (optional) json data to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. - :return: :class:`Response <Response>` object - :rtype: requests.Response + :return: :class:`Response <Response>` object + :rtype: requests.Response """ - return request('patch', url, data=data, **kwargs) + return request('patch', url, data=data, **kwargs) def delete(url, **kwargs): - r"""Sends a DELETE request. + r"""Sends a DELETE request. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. - :return: :class:`Response <Response>` object - :rtype: requests.Response + :return: :class:`Response <Response>` object + :rtype: requests.Response """ return request('delete', url, **kwargs) diff --git a/contrib/python/requests/requests/auth.py b/contrib/python/requests/requests/auth.py index eeface39ae..5aa777412a 100644 --- a/contrib/python/requests/requests/auth.py +++ b/contrib/python/requests/requests/auth.py @@ -11,15 +11,15 @@ import os import re import time import hashlib -import threading -import warnings +import threading +import warnings from base64 import b64encode -from .compat import urlparse, str, basestring +from .compat import urlparse, str, basestring from .cookies import extract_cookies_to_jar -from ._internal_utils import to_native_string -from .utils import parse_dict_header +from ._internal_utils import to_native_string +from .utils import parse_dict_header CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded' CONTENT_TYPE_MULTI_PART = 'multipart/form-data' @@ -28,42 +28,42 @@ CONTENT_TYPE_MULTI_PART = 'multipart/form-data' def _basic_auth_str(username, password): """Returns a Basic Auth string.""" - # "I want us to put a big-ol' comment on top of it that - # says that this behaviour is dumb but we need to preserve - # it because people are relying on it." - # - Lukasa - # - # These are here solely to maintain backwards compatibility - # for things like ints. This will be removed in 3.0.0. - if not isinstance(username, basestring): - warnings.warn( - "Non-string usernames will no longer be supported in Requests " + # "I want us to put a big-ol' comment on top of it that + # says that this behaviour is dumb but we need to preserve + # it because people are relying on it." + # - Lukasa + # + # These are here solely to maintain backwards compatibility + # for things like ints. This will be removed in 3.0.0. + if not isinstance(username, basestring): + warnings.warn( + "Non-string usernames will no longer be supported in Requests " "3.0.0. Please convert the object you've passed in ({!r}) to " - "a string or bytes object in the near future to avoid " - "problems.".format(username), - category=DeprecationWarning, - ) - username = str(username) - - if not isinstance(password, basestring): - warnings.warn( - "Non-string passwords will no longer be supported in Requests " + "a string or bytes object in the near future to avoid " + "problems.".format(username), + category=DeprecationWarning, + ) + username = str(username) + + if not isinstance(password, basestring): + warnings.warn( + "Non-string passwords will no longer be supported in Requests " "3.0.0. Please convert the object you've passed in ({!r}) to " - "a string or bytes object in the near future to avoid " + "a string or bytes object in the near future to avoid " "problems.".format(type(password)), - category=DeprecationWarning, - ) - password = str(password) - # -- End Removal -- - - if isinstance(username, str): - username = username.encode('latin1') - - if isinstance(password, str): - password = password.encode('latin1') - + category=DeprecationWarning, + ) + password = str(password) + # -- End Removal -- + + if isinstance(username, str): + username = username.encode('latin1') + + if isinstance(password, str): + password = password.encode('latin1') + authstr = 'Basic ' + to_native_string( - b64encode(b':'.join((username, password))).strip() + b64encode(b':'.join((username, password))).strip() ) return authstr @@ -78,20 +78,20 @@ class AuthBase(object): class HTTPBasicAuth(AuthBase): """Attaches HTTP Basic Authentication to the given Request object.""" - + def __init__(self, username, password): self.username = username self.password = password - def __eq__(self, other): - return all([ - self.username == getattr(other, 'username', None), - self.password == getattr(other, 'password', None) - ]) - - def __ne__(self, other): - return not self == other - + def __eq__(self, other): + return all([ + self.username == getattr(other, 'username', None), + self.password == getattr(other, 'password', None) + ]) + + def __ne__(self, other): + return not self == other + def __call__(self, r): r.headers['Authorization'] = _basic_auth_str(self.username, self.password) return r @@ -99,7 +99,7 @@ class HTTPBasicAuth(AuthBase): class HTTPProxyAuth(HTTPBasicAuth): """Attaches HTTP Proxy Authentication to a given Request object.""" - + def __call__(self, r): r.headers['Proxy-Authorization'] = _basic_auth_str(self.username, self.password) return r @@ -107,34 +107,34 @@ class HTTPProxyAuth(HTTPBasicAuth): class HTTPDigestAuth(AuthBase): """Attaches HTTP Digest Authentication to the given Request object.""" - + def __init__(self, username, password): self.username = username self.password = password - # Keep state in per-thread local storage - self._thread_local = threading.local() - - def init_per_thread_state(self): - # Ensure state is initialized just once per-thread - if not hasattr(self._thread_local, 'init'): - self._thread_local.init = True - self._thread_local.last_nonce = '' - self._thread_local.nonce_count = 0 - self._thread_local.chal = {} - self._thread_local.pos = None - self._thread_local.num_401_calls = None - + # Keep state in per-thread local storage + self._thread_local = threading.local() + + def init_per_thread_state(self): + # Ensure state is initialized just once per-thread + if not hasattr(self._thread_local, 'init'): + self._thread_local.init = True + self._thread_local.last_nonce = '' + self._thread_local.nonce_count = 0 + self._thread_local.chal = {} + self._thread_local.pos = None + self._thread_local.num_401_calls = None + def build_digest_header(self, method, url): - """ - :rtype: str - """ + """ + :rtype: str + """ - realm = self._thread_local.chal['realm'] - nonce = self._thread_local.chal['nonce'] - qop = self._thread_local.chal.get('qop') - algorithm = self._thread_local.chal.get('algorithm') - opaque = self._thread_local.chal.get('opaque') - hash_utf8 = None + realm = self._thread_local.chal['realm'] + nonce = self._thread_local.chal['nonce'] + qop = self._thread_local.chal.get('qop') + algorithm = self._thread_local.chal.get('algorithm') + opaque = self._thread_local.chal.get('opaque') + hash_utf8 = None if algorithm is None: _algorithm = 'MD5' @@ -174,8 +174,8 @@ class HTTPDigestAuth(AuthBase): # XXX not implemented yet entdig = None p_parsed = urlparse(url) - #: path is request-uri defined in RFC 2616 which should not be empty - path = p_parsed.path or "/" + #: path is request-uri defined in RFC 2616 which should not be empty + path = p_parsed.path or "/" if p_parsed.query: path += '?' + p_parsed.query @@ -185,12 +185,12 @@ class HTTPDigestAuth(AuthBase): HA1 = hash_utf8(A1) HA2 = hash_utf8(A2) - if nonce == self._thread_local.last_nonce: - self._thread_local.nonce_count += 1 + if nonce == self._thread_local.last_nonce: + self._thread_local.nonce_count += 1 else: - self._thread_local.nonce_count = 1 - ncvalue = '%08x' % self._thread_local.nonce_count - s = str(self._thread_local.nonce_count).encode('utf-8') + self._thread_local.nonce_count = 1 + ncvalue = '%08x' % self._thread_local.nonce_count + s = str(self._thread_local.nonce_count).encode('utf-8') s += nonce.encode('utf-8') s += time.ctime().encode('utf-8') s += os.urandom(8) @@ -199,18 +199,18 @@ class HTTPDigestAuth(AuthBase): if _algorithm == 'MD5-SESS': HA1 = hash_utf8('%s:%s:%s' % (HA1, nonce, cnonce)) - if not qop: + if not qop: respdig = KD(HA1, "%s:%s" % (nonce, HA2)) elif qop == 'auth' or 'auth' in qop.split(','): - noncebit = "%s:%s:%s:%s:%s" % ( - nonce, ncvalue, cnonce, 'auth', HA2 - ) + noncebit = "%s:%s:%s:%s:%s" % ( + nonce, ncvalue, cnonce, 'auth', HA2 + ) respdig = KD(HA1, noncebit) else: # XXX handle auth-int. return None - self._thread_local.last_nonce = nonce + self._thread_local.last_nonce = nonce # XXX should the partial digests be encoded too? base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \ @@ -229,37 +229,37 @@ class HTTPDigestAuth(AuthBase): def handle_redirect(self, r, **kwargs): """Reset num_401_calls counter on redirects.""" if r.is_redirect: - self._thread_local.num_401_calls = 1 + self._thread_local.num_401_calls = 1 def handle_401(self, r, **kwargs): - """ - Takes the given response and tries digest-auth, if needed. + """ + Takes the given response and tries digest-auth, if needed. - :rtype: requests.Response - """ - - # If response is not 4xx, do not auth + :rtype: requests.Response + """ + + # If response is not 4xx, do not auth # See https://github.com/psf/requests/issues/3772 - if not 400 <= r.status_code < 500: - self._thread_local.num_401_calls = 1 - return r - - if self._thread_local.pos is not None: + if not 400 <= r.status_code < 500: + self._thread_local.num_401_calls = 1 + return r + + if self._thread_local.pos is not None: # Rewind the file position indicator of the body to where # it was to resend the request. - r.request.body.seek(self._thread_local.pos) + r.request.body.seek(self._thread_local.pos) s_auth = r.headers.get('www-authenticate', '') - if 'digest' in s_auth.lower() and self._thread_local.num_401_calls < 2: + if 'digest' in s_auth.lower() and self._thread_local.num_401_calls < 2: - self._thread_local.num_401_calls += 1 + self._thread_local.num_401_calls += 1 pat = re.compile(r'digest ', flags=re.IGNORECASE) - self._thread_local.chal = parse_dict_header(pat.sub('', s_auth, count=1)) + self._thread_local.chal = parse_dict_header(pat.sub('', s_auth, count=1)) # Consume content and release the original connection # to allow our new request to reuse the same one. r.content - r.close() + r.close() prep = r.request.copy() extract_cookies_to_jar(prep._cookies, r.request, r.raw) prep.prepare_cookies(prep._cookies) @@ -272,34 +272,34 @@ class HTTPDigestAuth(AuthBase): return _r - self._thread_local.num_401_calls = 1 + self._thread_local.num_401_calls = 1 return r def __call__(self, r): - # Initialize per-thread state, if needed - self.init_per_thread_state() + # Initialize per-thread state, if needed + self.init_per_thread_state() # If we have a saved nonce, skip the 401 - if self._thread_local.last_nonce: + if self._thread_local.last_nonce: r.headers['Authorization'] = self.build_digest_header(r.method, r.url) try: - self._thread_local.pos = r.body.tell() + self._thread_local.pos = r.body.tell() except AttributeError: # In the case of HTTPDigestAuth being reused and the body of # the previous request was a file-like object, pos has the # file position of the previous body. Ensure it's set to # None. - self._thread_local.pos = None + self._thread_local.pos = None r.register_hook('response', self.handle_401) r.register_hook('response', self.handle_redirect) - self._thread_local.num_401_calls = 1 - + self._thread_local.num_401_calls = 1 + return r - - def __eq__(self, other): - return all([ - self.username == getattr(other, 'username', None), - self.password == getattr(other, 'password', None) - ]) - - def __ne__(self, other): - return not self == other + + def __eq__(self, other): + return all([ + self.username == getattr(other, 'username', None), + self.password == getattr(other, 'password', None) + ]) + + def __ne__(self, other): + return not self == other diff --git a/contrib/python/requests/requests/certs.py b/contrib/python/requests/requests/certs.py index d1a378d787..7f5162dfe2 100644 --- a/contrib/python/requests/requests/certs.py +++ b/contrib/python/requests/requests/certs.py @@ -2,17 +2,17 @@ # -*- coding: utf-8 -*- """ -requests.certs -~~~~~~~~~~~~~~ +requests.certs +~~~~~~~~~~~~~~ -This module returns the preferred default CA certificate bundle. There is -only one — the one from the certifi package. +This module returns the preferred default CA certificate bundle. There is +only one — the one from the certifi package. If you are packaging Requests, e.g., for a Linux distribution or a managed environment, you can change the definition of where() to return a separately packaged CA bundle. """ -from certifi import where +from certifi import where -if __name__ == '__main__': - print(where()) +if __name__ == '__main__': + print(where()) diff --git a/contrib/python/requests/requests/compat.py b/contrib/python/requests/requests/compat.py index 029ae62ac3..fc2ffca36b 100644 --- a/contrib/python/requests/requests/compat.py +++ b/contrib/python/requests/requests/compat.py @@ -1,11 +1,11 @@ # -*- coding: utf-8 -*- """ -requests.compat -~~~~~~~~~~~~~~~ - -This module handles import compatibility issues between Python 2 and -Python 3. +requests.compat +~~~~~~~~~~~~~~~ + +This module handles import compatibility issues between Python 2 and +Python 3. """ try: @@ -32,7 +32,7 @@ has_simplejson = False try: import simplejson as json has_simplejson = True -except ImportError: +except ImportError: import json # --------- @@ -40,9 +40,9 @@ except ImportError: # --------- if is_py2: - from urllib import ( - quote, unquote, quote_plus, unquote_plus, urlencode, getproxies, - proxy_bypass, proxy_bypass_environment, getproxies_environment) + from urllib import ( + quote, unquote, quote_plus, unquote_plus, urlencode, getproxies, + proxy_bypass, proxy_bypass_environment, getproxies_environment) from urlparse import urlparse, urlunparse, urljoin, urlsplit, urldefrag from urllib2 import parse_http_list import cookielib @@ -56,12 +56,12 @@ if is_py2: str = unicode basestring = basestring numeric_types = (int, long, float) - integer_types = (int, long) + integer_types = (int, long) JSONDecodeError = ValueError elif is_py3: from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag - from urllib.request import parse_http_list, getproxies, proxy_bypass, proxy_bypass_environment, getproxies_environment + from urllib.request import parse_http_list, getproxies, proxy_bypass, proxy_bypass_environment, getproxies_environment from http import cookiejar as cookielib from http.cookies import Morsel from io import StringIO @@ -78,4 +78,4 @@ elif is_py3: bytes = bytes basestring = (str, bytes) numeric_types = (int, float) - integer_types = (int,) + integer_types = (int,) diff --git a/contrib/python/requests/requests/cookies.py b/contrib/python/requests/requests/cookies.py index 56fccd9c25..4280752437 100644 --- a/contrib/python/requests/requests/cookies.py +++ b/contrib/python/requests/requests/cookies.py @@ -1,19 +1,19 @@ # -*- coding: utf-8 -*- """ -requests.cookies -~~~~~~~~~~~~~~~~ - +requests.cookies +~~~~~~~~~~~~~~~~ + Compatibility code to be able to use `cookielib.CookieJar` with requests. requests.utils imports from here, so be careful with imports. """ -import copy +import copy import time -import calendar - -from ._internal_utils import to_native_string +import calendar + +from ._internal_utils import to_native_string from .compat import cookielib, urlparse, urlunparse, Morsel, MutableMapping try: @@ -54,7 +54,7 @@ class MockRequest(object): if not self._r.headers.get('Host'): return self._r.url # If they did set it, retrieve it and reconstruct the expected domain - host = to_native_string(self._r.headers['Host'], encoding='utf-8') + host = to_native_string(self._r.headers['Host'], encoding='utf-8') parsed = urlparse(self._r.url) # Reconstruct the URL as we expect it return urlunparse([ @@ -133,11 +133,11 @@ def extract_cookies_to_jar(jar, request, response): def get_cookie_header(jar, request): - """ - Produce an appropriate Cookie header string to be sent with `request`, or None. - - :rtype: str - """ + """ + Produce an appropriate Cookie header string to be sent with `request`, or None. + + :rtype: str + """ r = MockRequest(request) jar.add_cookie_header(r) return r.get_new_headers().get('Cookie') @@ -150,13 +150,13 @@ def remove_cookie_by_name(cookiejar, name, domain=None, path=None): """ clearables = [] for cookie in cookiejar: - if cookie.name != name: - continue - if domain is not None and domain != cookie.domain: - continue - if path is not None and path != cookie.path: - continue - clearables.append((cookie.domain, cookie.path, cookie.name)) + if cookie.name != name: + continue + if domain is not None and domain != cookie.domain: + continue + if path is not None and path != cookie.path: + continue + clearables.append((cookie.domain, cookie.path, cookie.name)) for domain, path, name in clearables: cookiejar.clear(domain, path, name) @@ -164,35 +164,35 @@ def remove_cookie_by_name(cookiejar, name, domain=None, path=None): class CookieConflictError(RuntimeError): """There are two cookies that meet the criteria specified in the cookie jar. - Use .get and .set and include domain and path args in order to be more specific. - """ + Use .get and .set and include domain and path args in order to be more specific. + """ class RequestsCookieJar(cookielib.CookieJar, MutableMapping): - """Compatibility class; is a cookielib.CookieJar, but exposes a dict - interface. + """Compatibility class; is a cookielib.CookieJar, but exposes a dict + interface. This is the CookieJar we create by default for requests and sessions that don't specify one, since some clients may expect response.cookies and session.cookies to support dict operations. - Requests does not use the dict interface internally; it's just for - compatibility with external client code. All requests code should work - out of the box with externally provided instances of ``CookieJar``, e.g. - ``LWPCookieJar`` and ``FileCookieJar``. + Requests does not use the dict interface internally; it's just for + compatibility with external client code. All requests code should work + out of the box with externally provided instances of ``CookieJar``, e.g. + ``LWPCookieJar`` and ``FileCookieJar``. - Unlike a regular CookieJar, this class is pickleable. + Unlike a regular CookieJar, this class is pickleable. - .. warning:: dictionary operations that are normally O(1) may be O(n). + .. warning:: dictionary operations that are normally O(1) may be O(n). """ def get(self, name, default=None, domain=None, path=None): """Dict-like get() that also supports optional domain and path args in order to resolve naming collisions from using one cookie jar over - multiple domains. - - .. warning:: operation is O(n), not O(1). - """ + multiple domains. + + .. warning:: operation is O(n), not O(1). + """ try: return self._find_no_duplicates(name, domain, path) except KeyError: @@ -201,8 +201,8 @@ class RequestsCookieJar(cookielib.CookieJar, MutableMapping): def set(self, name, value, **kwargs): """Dict-like set() that also supports optional domain and path args in order to resolve naming collisions from using one cookie jar over - multiple domains. - """ + multiple domains. + """ # support client code that unsets cookies by assignment of a None value: if value is None: remove_cookie_by_name(self, name, domain=kwargs.get('domain'), path=kwargs.get('path')) @@ -216,55 +216,55 @@ class RequestsCookieJar(cookielib.CookieJar, MutableMapping): return c def iterkeys(self): - """Dict-like iterkeys() that returns an iterator of names of cookies - from the jar. - - .. seealso:: itervalues() and iteritems(). - """ + """Dict-like iterkeys() that returns an iterator of names of cookies + from the jar. + + .. seealso:: itervalues() and iteritems(). + """ for cookie in iter(self): yield cookie.name def keys(self): - """Dict-like keys() that returns a list of names of cookies from the - jar. - - .. seealso:: values() and items(). - """ + """Dict-like keys() that returns a list of names of cookies from the + jar. + + .. seealso:: values() and items(). + """ return list(self.iterkeys()) def itervalues(self): - """Dict-like itervalues() that returns an iterator of values of cookies - from the jar. - - .. seealso:: iterkeys() and iteritems(). - """ + """Dict-like itervalues() that returns an iterator of values of cookies + from the jar. + + .. seealso:: iterkeys() and iteritems(). + """ for cookie in iter(self): yield cookie.value def values(self): - """Dict-like values() that returns a list of values of cookies from the - jar. - - .. seealso:: keys() and items(). - """ + """Dict-like values() that returns a list of values of cookies from the + jar. + + .. seealso:: keys() and items(). + """ return list(self.itervalues()) def iteritems(self): - """Dict-like iteritems() that returns an iterator of name-value tuples - from the jar. - - .. seealso:: iterkeys() and itervalues(). - """ + """Dict-like iteritems() that returns an iterator of name-value tuples + from the jar. + + .. seealso:: iterkeys() and itervalues(). + """ for cookie in iter(self): yield cookie.name, cookie.value def items(self): - """Dict-like items() that returns a list of name-value tuples from the - jar. Allows client-code to call ``dict(RequestsCookieJar)`` and get a - vanilla python dict of key value pairs. - - .. seealso:: keys() and values(). - """ + """Dict-like items() that returns a list of name-value tuples from the + jar. Allows client-code to call ``dict(RequestsCookieJar)`` and get a + vanilla python dict of key value pairs. + + .. seealso:: keys() and values(). + """ return list(self.iteritems()) def list_domains(self): @@ -285,10 +285,10 @@ class RequestsCookieJar(cookielib.CookieJar, MutableMapping): def multiple_domains(self): """Returns True if there are multiple domains in the jar. - Returns False otherwise. - - :rtype: bool - """ + Returns False otherwise. + + :rtype: bool + """ domains = [] for cookie in iter(self): if cookie.domain is not None and cookie.domain in domains: @@ -297,47 +297,47 @@ class RequestsCookieJar(cookielib.CookieJar, MutableMapping): return False # there is only one domain in jar def get_dict(self, domain=None, path=None): - """Takes as an argument an optional domain and path and returns a plain - old Python dict of name-value pairs of cookies that meet the - requirements. - - :rtype: dict - """ + """Takes as an argument an optional domain and path and returns a plain + old Python dict of name-value pairs of cookies that meet the + requirements. + + :rtype: dict + """ dictionary = {} for cookie in iter(self): - if ( - (domain is None or cookie.domain == domain) and - (path is None or cookie.path == path) - ): + if ( + (domain is None or cookie.domain == domain) and + (path is None or cookie.path == path) + ): dictionary[cookie.name] = cookie.value return dictionary - def __contains__(self, name): - try: - return super(RequestsCookieJar, self).__contains__(name) - except CookieConflictError: - return True - + def __contains__(self, name): + try: + return super(RequestsCookieJar, self).__contains__(name) + except CookieConflictError: + return True + def __getitem__(self, name): - """Dict-like __getitem__() for compatibility with client code. Throws - exception if there are more than one cookie with name. In that case, - use the more explicit get() method instead. + """Dict-like __getitem__() for compatibility with client code. Throws + exception if there are more than one cookie with name. In that case, + use the more explicit get() method instead. - .. warning:: operation is O(n), not O(1). - """ + .. warning:: operation is O(n), not O(1). + """ return self._find_no_duplicates(name) def __setitem__(self, name, value): - """Dict-like __setitem__ for compatibility with client code. Throws - exception if there is already a cookie of that name in the jar. In that - case, use the more explicit set() method instead. - """ + """Dict-like __setitem__ for compatibility with client code. Throws + exception if there is already a cookie of that name in the jar. In that + case, use the more explicit set() method instead. + """ self.set(name, value) def __delitem__(self, name): - """Deletes a cookie given a name. Wraps ``cookielib.CookieJar``'s - ``remove_cookie_by_name()``. - """ + """Deletes a cookie given a name. Wraps ``cookielib.CookieJar``'s + ``remove_cookie_by_name()``. + """ remove_cookie_by_name(self, name) def set_cookie(self, cookie, *args, **kwargs): @@ -349,22 +349,22 @@ class RequestsCookieJar(cookielib.CookieJar, MutableMapping): """Updates this jar with cookies from another CookieJar or dict-like""" if isinstance(other, cookielib.CookieJar): for cookie in other: - self.set_cookie(copy.copy(cookie)) + self.set_cookie(copy.copy(cookie)) else: super(RequestsCookieJar, self).update(other) def _find(self, name, domain=None, path=None): - """Requests uses this method internally to get cookie values. - - If there are conflicting cookies, _find arbitrarily chooses one. - See _find_no_duplicates if you want an exception thrown if there are - conflicting cookies. - - :param name: a string containing name of cookie - :param domain: (optional) string containing domain of cookie - :param path: (optional) string containing path of cookie - :return: cookie.value - """ + """Requests uses this method internally to get cookie values. + + If there are conflicting cookies, _find arbitrarily chooses one. + See _find_no_duplicates if you want an exception thrown if there are + conflicting cookies. + + :param name: a string containing name of cookie + :param domain: (optional) string containing domain of cookie + :param path: (optional) string containing path of cookie + :return: cookie.value + """ for cookie in iter(self): if cookie.name == name: if domain is None or cookie.domain == domain: @@ -374,17 +374,17 @@ class RequestsCookieJar(cookielib.CookieJar, MutableMapping): raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path)) def _find_no_duplicates(self, name, domain=None, path=None): - """Both ``__get_item__`` and ``get`` call this function: it's never - used elsewhere in Requests. - - :param name: a string containing name of cookie - :param domain: (optional) string containing domain of cookie - :param path: (optional) string containing path of cookie - :raises KeyError: if cookie is not found - :raises CookieConflictError: if there are multiple cookies - that match name and optionally domain and path - :return: cookie.value - """ + """Both ``__get_item__`` and ``get`` call this function: it's never + used elsewhere in Requests. + + :param name: a string containing name of cookie + :param domain: (optional) string containing domain of cookie + :param path: (optional) string containing path of cookie + :raises KeyError: if cookie is not found + :raises CookieConflictError: if there are multiple cookies + that match name and optionally domain and path + :return: cookie.value + """ toReturn = None for cookie in iter(self): if cookie.name == name: @@ -423,21 +423,21 @@ class RequestsCookieJar(cookielib.CookieJar, MutableMapping): return self._policy -def _copy_cookie_jar(jar): - if jar is None: - return None - - if hasattr(jar, 'copy'): - # We're dealing with an instance of RequestsCookieJar - return jar.copy() - # We're dealing with a generic CookieJar instance - new_jar = copy.copy(jar) - new_jar.clear() - for cookie in jar: - new_jar.set_cookie(copy.copy(cookie)) - return new_jar - - +def _copy_cookie_jar(jar): + if jar is None: + return None + + if hasattr(jar, 'copy'): + # We're dealing with an instance of RequestsCookieJar + return jar.copy() + # We're dealing with a generic CookieJar instance + new_jar = copy.copy(jar) + new_jar.clear() + for cookie in jar: + new_jar.set_cookie(copy.copy(cookie)) + return new_jar + + def create_cookie(name, value, **kwargs): """Make a cookie from underspecified parameters. @@ -479,15 +479,15 @@ def morsel_to_cookie(morsel): expires = None if morsel['max-age']: - try: - expires = int(time.time() + int(morsel['max-age'])) - except ValueError: - raise TypeError('max-age: %s must be integer' % morsel['max-age']) + try: + expires = int(time.time() + int(morsel['max-age'])) + except ValueError: + raise TypeError('max-age: %s must be integer' % morsel['max-age']) elif morsel['expires']: time_template = '%a, %d-%b-%Y %H:%M:%S GMT' - expires = calendar.timegm( - time.strptime(morsel['expires'], time_template) - ) + expires = calendar.timegm( + time.strptime(morsel['expires'], time_template) + ) return create_cookie( comment=morsel['comment'], comment_url=bool(morsel['comment']), @@ -535,7 +535,7 @@ def merge_cookies(cookiejar, cookies): """ if not isinstance(cookiejar, cookielib.CookieJar): raise ValueError('You can only merge into CookieJar') - + if isinstance(cookies, dict): cookiejar = cookiejar_from_dict( cookies, cookiejar=cookiejar, overwrite=False) diff --git a/contrib/python/requests/requests/exceptions.py b/contrib/python/requests/requests/exceptions.py index 79697635a5..9ad96cee5a 100644 --- a/contrib/python/requests/requests/exceptions.py +++ b/contrib/python/requests/requests/exceptions.py @@ -6,18 +6,18 @@ requests.exceptions This module contains the set of Requests' exceptions. """ -from urllib3.exceptions import HTTPError as BaseHTTPError +from urllib3.exceptions import HTTPError as BaseHTTPError from .compat import JSONDecodeError as CompatJSONDecodeError class RequestException(IOError): """There was an ambiguous exception that occurred while handling your - request. - """ + request. + """ def __init__(self, *args, **kwargs): - """Initialize RequestException with `request` and `response` objects.""" + """Initialize RequestException with `request` and `response` objects.""" response = kwargs.pop('response', None) self.response = response self.request = kwargs.pop('request', None) @@ -88,13 +88,13 @@ class InvalidSchema(RequestException, ValueError): class InvalidURL(RequestException, ValueError): - """The URL provided was somehow invalid.""" - - -class InvalidHeader(RequestException, ValueError): - """The header value provided was somehow invalid.""" + """The URL provided was somehow invalid.""" +class InvalidHeader(RequestException, ValueError): + """The header value provided was somehow invalid.""" + + class InvalidProxyURL(InvalidURL): """The proxy URL provided is invalid.""" @@ -113,21 +113,21 @@ class StreamConsumedError(RequestException, TypeError): class RetryError(RequestException): """Custom retries logic failed""" - - -class UnrewindableBodyError(RequestException): + + +class UnrewindableBodyError(RequestException): """Requests encountered an error when trying to rewind a body.""" - -# Warnings - - -class RequestsWarning(Warning): - """Base warning for Requests.""" - - -class FileModeWarning(RequestsWarning, DeprecationWarning): - """A file was opened in text mode, but Requests determined its binary length.""" - - -class RequestsDependencyWarning(RequestsWarning): - """An imported dependency doesn't match the expected version range.""" + +# Warnings + + +class RequestsWarning(Warning): + """Base warning for Requests.""" + + +class FileModeWarning(RequestsWarning, DeprecationWarning): + """A file was opened in text mode, but Requests determined its binary length.""" + + +class RequestsDependencyWarning(RequestsWarning): + """An imported dependency doesn't match the expected version range.""" diff --git a/contrib/python/requests/requests/help.py b/contrib/python/requests/requests/help.py index 4cd6389f55..3e71f79c73 100644 --- a/contrib/python/requests/requests/help.py +++ b/contrib/python/requests/requests/help.py @@ -1,17 +1,17 @@ -"""Module containing bug report helper(s).""" -from __future__ import print_function - -import json -import platform -import sys -import ssl - -import idna -import urllib3 - -from . import __version__ as requests_version - -try: +"""Module containing bug report helper(s).""" +from __future__ import print_function + +import json +import platform +import sys +import ssl + +import idna +import urllib3 + +from . import __version__ as requests_version + +try: import charset_normalizer except ImportError: charset_normalizer = None @@ -23,113 +23,113 @@ except ImportError: try: from urllib3.contrib import pyopenssl -except ImportError: - pyopenssl = None - OpenSSL = None - cryptography = None -else: - import OpenSSL - import cryptography - - -def _implementation(): - """Return a dict with the Python implementation and version. - - Provide both the name and the version of the Python implementation - currently running. For example, on CPython 2.7.5 it will return - {'name': 'CPython', 'version': '2.7.5'}. - - This function works best on CPython and PyPy: in particular, it probably - doesn't work for Jython or IronPython. Future investigation should be done - to work out the correct shape of the code for those platforms. - """ - implementation = platform.python_implementation() - - if implementation == 'CPython': - implementation_version = platform.python_version() - elif implementation == 'PyPy': - implementation_version = '%s.%s.%s' % (sys.pypy_version_info.major, - sys.pypy_version_info.minor, - sys.pypy_version_info.micro) - if sys.pypy_version_info.releaselevel != 'final': - implementation_version = ''.join([ - implementation_version, sys.pypy_version_info.releaselevel - ]) - elif implementation == 'Jython': - implementation_version = platform.python_version() # Complete Guess - elif implementation == 'IronPython': - implementation_version = platform.python_version() # Complete Guess - else: - implementation_version = 'Unknown' - - return {'name': implementation, 'version': implementation_version} - - -def info(): - """Generate information for a bug report.""" - try: - platform_info = { - 'system': platform.system(), - 'release': platform.release(), - } - except IOError: - platform_info = { - 'system': 'Unknown', - 'release': 'Unknown', - } - - implementation_info = _implementation() - urllib3_info = {'version': urllib3.__version__} +except ImportError: + pyopenssl = None + OpenSSL = None + cryptography = None +else: + import OpenSSL + import cryptography + + +def _implementation(): + """Return a dict with the Python implementation and version. + + Provide both the name and the version of the Python implementation + currently running. For example, on CPython 2.7.5 it will return + {'name': 'CPython', 'version': '2.7.5'}. + + This function works best on CPython and PyPy: in particular, it probably + doesn't work for Jython or IronPython. Future investigation should be done + to work out the correct shape of the code for those platforms. + """ + implementation = platform.python_implementation() + + if implementation == 'CPython': + implementation_version = platform.python_version() + elif implementation == 'PyPy': + implementation_version = '%s.%s.%s' % (sys.pypy_version_info.major, + sys.pypy_version_info.minor, + sys.pypy_version_info.micro) + if sys.pypy_version_info.releaselevel != 'final': + implementation_version = ''.join([ + implementation_version, sys.pypy_version_info.releaselevel + ]) + elif implementation == 'Jython': + implementation_version = platform.python_version() # Complete Guess + elif implementation == 'IronPython': + implementation_version = platform.python_version() # Complete Guess + else: + implementation_version = 'Unknown' + + return {'name': implementation, 'version': implementation_version} + + +def info(): + """Generate information for a bug report.""" + try: + platform_info = { + 'system': platform.system(), + 'release': platform.release(), + } + except IOError: + platform_info = { + 'system': 'Unknown', + 'release': 'Unknown', + } + + implementation_info = _implementation() + urllib3_info = {'version': urllib3.__version__} charset_normalizer_info = {'version': None} chardet_info = {'version': None} if charset_normalizer: charset_normalizer_info = {'version': charset_normalizer.__version__} if chardet: chardet_info = {'version': chardet.__version__} - - pyopenssl_info = { - 'version': None, - 'openssl_version': '', - } - if OpenSSL: - pyopenssl_info = { - 'version': OpenSSL.__version__, - 'openssl_version': '%x' % OpenSSL.SSL.OPENSSL_VERSION_NUMBER, - } - cryptography_info = { - 'version': getattr(cryptography, '__version__', ''), - } - idna_info = { - 'version': getattr(idna, '__version__', ''), - } - + + pyopenssl_info = { + 'version': None, + 'openssl_version': '', + } + if OpenSSL: + pyopenssl_info = { + 'version': OpenSSL.__version__, + 'openssl_version': '%x' % OpenSSL.SSL.OPENSSL_VERSION_NUMBER, + } + cryptography_info = { + 'version': getattr(cryptography, '__version__', ''), + } + idna_info = { + 'version': getattr(idna, '__version__', ''), + } + system_ssl = ssl.OPENSSL_VERSION_NUMBER - system_ssl_info = { - 'version': '%x' % system_ssl if system_ssl is not None else '' - } - - return { - 'platform': platform_info, - 'implementation': implementation_info, - 'system_ssl': system_ssl_info, - 'using_pyopenssl': pyopenssl is not None, + system_ssl_info = { + 'version': '%x' % system_ssl if system_ssl is not None else '' + } + + return { + 'platform': platform_info, + 'implementation': implementation_info, + 'system_ssl': system_ssl_info, + 'using_pyopenssl': pyopenssl is not None, 'using_charset_normalizer': chardet is None, - 'pyOpenSSL': pyopenssl_info, - 'urllib3': urllib3_info, - 'chardet': chardet_info, + 'pyOpenSSL': pyopenssl_info, + 'urllib3': urllib3_info, + 'chardet': chardet_info, 'charset_normalizer': charset_normalizer_info, - 'cryptography': cryptography_info, - 'idna': idna_info, - 'requests': { - 'version': requests_version, - }, - } - - -def main(): - """Pretty-print the bug information as JSON.""" - print(json.dumps(info(), sort_keys=True, indent=2)) - - -if __name__ == '__main__': - main() + 'cryptography': cryptography_info, + 'idna': idna_info, + 'requests': { + 'version': requests_version, + }, + } + + +def main(): + """Pretty-print the bug information as JSON.""" + print(json.dumps(info(), sort_keys=True, indent=2)) + + +if __name__ == '__main__': + main() diff --git a/contrib/python/requests/requests/hooks.py b/contrib/python/requests/requests/hooks.py index 7a51f212c8..2117d0a82a 100644 --- a/contrib/python/requests/requests/hooks.py +++ b/contrib/python/requests/requests/hooks.py @@ -23,8 +23,8 @@ def default_hooks(): def dispatch_hook(key, hooks, hook_data, **kwargs): """Dispatches a hook dictionary on a given piece of data.""" hooks = hooks or {} - hooks = hooks.get(key) - if hooks: + hooks = hooks.get(key) + if hooks: if hasattr(hooks, '__call__'): hooks = [hooks] for hook in hooks: diff --git a/contrib/python/requests/requests/models.py b/contrib/python/requests/requests/models.py index dfbea854f9..e6c79f6506 100644 --- a/contrib/python/requests/requests/models.py +++ b/contrib/python/requests/requests/models.py @@ -8,52 +8,52 @@ This module contains the primary objects that power Requests. """ import datetime -import sys +import sys -# Import encoding now, to avoid implicit import later. -# Implicit import within threads may cause LookupError when standard library is in a ZIP, +# Import encoding now, to avoid implicit import later. +# Implicit import within threads may cause LookupError when standard library is in a ZIP, # such as in Embedded Python. See https://github.com/psf/requests/issues/3578. -import encodings.idna - -from urllib3.fields import RequestField -from urllib3.filepost import encode_multipart_formdata -from urllib3.util import parse_url -from urllib3.exceptions import ( - DecodeError, ReadTimeoutError, ProtocolError, LocationParseError) - -from io import UnsupportedOperation +import encodings.idna + +from urllib3.fields import RequestField +from urllib3.filepost import encode_multipart_formdata +from urllib3.util import parse_url +from urllib3.exceptions import ( + DecodeError, ReadTimeoutError, ProtocolError, LocationParseError) + +from io import UnsupportedOperation from .hooks import default_hooks from .structures import CaseInsensitiveDict from .auth import HTTPBasicAuth -from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar +from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar from .exceptions import ( HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError, ContentDecodingError, ConnectionError, StreamConsumedError, InvalidJSONError) from .exceptions import JSONDecodeError as RequestsJSONDecodeError -from ._internal_utils import to_native_string, unicode_is_ascii +from ._internal_utils import to_native_string, unicode_is_ascii from .utils import ( guess_filename, get_auth_from_url, requote_uri, stream_decode_response_unicode, to_key_val_list, parse_header_links, - iter_slices, guess_json_utf, super_len, check_header_validity) + iter_slices, guess_json_utf, super_len, check_header_validity) from .compat import ( Callable, Mapping, - cookielib, urlunparse, urlsplit, urlencode, str, bytes, + cookielib, urlunparse, urlsplit, urlencode, str, bytes, is_py2, chardet, builtin_str, basestring, JSONDecodeError) -from .compat import json as complexjson +from .compat import json as complexjson from .status_codes import codes #: The set of HTTP status codes that indicate an automatically #: processable redirect. REDIRECT_STATI = ( - codes.moved, # 301 - codes.found, # 302 - codes.other, # 303 - codes.temporary_redirect, # 307 - codes.permanent_redirect, # 308 + codes.moved, # 301 + codes.found, # 302 + codes.other, # 303 + codes.temporary_redirect, # 307 + codes.permanent_redirect, # 308 ) - + DEFAULT_REDIRECT_LIMIT = 30 CONTENT_CHUNK_SIZE = 10 * 1024 ITER_CHUNK_SIZE = 512 @@ -113,10 +113,10 @@ class RequestEncodingMixin(object): """Build the body for a multipart/form-data request. Will successfully encode files when passed as a dict or a list of - tuples. Order is retained if data is a list of tuples but arbitrary + tuples. Order is retained if data is a list of tuples but arbitrary if parameters are supplied as a dict. - The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype) - or 4-tuples (filename, fileobj, contentype, custom_headers). + The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype) + or 4-tuples (filename, fileobj, contentype, custom_headers). """ if (not files): raise ValueError("Files must be provided.") @@ -155,16 +155,16 @@ class RequestEncodingMixin(object): fn = guess_filename(v) or k fp = v - if isinstance(fp, (str, bytes, bytearray)): - fdata = fp + if isinstance(fp, (str, bytes, bytearray)): + fdata = fp elif hasattr(fp, 'read'): fdata = fp.read() elif fp is None: continue - else: + else: fdata = fp - - rf = RequestField(name=k, data=fdata, filename=fn, headers=fh) + + rf = RequestField(name=k, data=fdata, filename=fn, headers=fh) rf.make_multipart(content_type=ft) new_fields.append(rf) @@ -209,7 +209,7 @@ class Request(RequestHooksMixin): :param data: the body to attach to the request. If a dictionary or list of tuples ``[(key, value)]`` is provided, form-encoding will take place. - :param json: json for the body to attach to the request (if files or data is not specified). + :param json: json for the body to attach to the request (if files or data is not specified). :param params: URL parameters to append to the URL. If a dictionary or list of tuples ``[(key, value)]`` is provided, form-encoding will take place. @@ -223,11 +223,11 @@ class Request(RequestHooksMixin): >>> req = requests.Request('GET', 'https://httpbin.org/get') >>> req.prepare() <PreparedRequest [GET]> - """ + """ def __init__(self, - method=None, url=None, headers=None, files=None, data=None, - params=None, auth=None, cookies=None, hooks=None, json=None): + method=None, url=None, headers=None, files=None, data=None, + params=None, auth=None, cookies=None, hooks=None, json=None): # Default empty dicts for dict params. data = [] if data is None else data @@ -306,12 +306,12 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): self.body = None #: dictionary of callback hooks, for internal usage. self.hooks = default_hooks() - #: integer denoting starting position of a readable file-like body. - self._body_position = None + #: integer denoting starting position of a readable file-like body. + self._body_position = None - def prepare(self, - method=None, url=None, headers=None, files=None, data=None, - params=None, auth=None, cookies=None, hooks=None, json=None): + def prepare(self, + method=None, url=None, headers=None, files=None, data=None, + params=None, auth=None, cookies=None, hooks=None, json=None): """Prepares the entire request with the given parameters.""" self.prepare_method(method) @@ -320,7 +320,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): self.prepare_cookies(cookies) self.prepare_body(data, files, json) self.prepare_auth(auth, url) - + # Note that prepare_auth must be last to enable authentication schemes # such as OAuth to work on a fully prepared request. @@ -335,32 +335,32 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): p.method = self.method p.url = self.url p.headers = self.headers.copy() if self.headers is not None else None - p._cookies = _copy_cookie_jar(self._cookies) + p._cookies = _copy_cookie_jar(self._cookies) p.body = self.body p.hooks = self.hooks - p._body_position = self._body_position + p._body_position = self._body_position return p def prepare_method(self, method): """Prepares the given HTTP method.""" self.method = method if self.method is not None: - self.method = to_native_string(self.method.upper()) - - @staticmethod - def _get_idna_encoded_host(host): - import idna - - try: - host = idna.encode(host, uts46=True).decode('utf-8') - except idna.IDNAError: - raise UnicodeError - return host - + self.method = to_native_string(self.method.upper()) + + @staticmethod + def _get_idna_encoded_host(host): + import idna + + try: + host = idna.encode(host, uts46=True).decode('utf-8') + except idna.IDNAError: + raise UnicodeError + return host + def prepare_url(self, url, params): """Prepares the given HTTP URL.""" #: Accept objects that have string representations. - #: We're unable to blindly call unicode/str functions + #: We're unable to blindly call unicode/str functions #: as this will include the bytestring indicator (b'') #: on python 3.x. #: https://github.com/psf/requests/pull/2238 @@ -369,9 +369,9 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): else: url = unicode(url) if is_py2 else str(url) - # Remove leading whitespaces from url - url = url.lstrip() - + # Remove leading whitespaces from url + url = url.lstrip() + # Don't do any URL preparation for non-HTTP schemes like `mailto`, # `data` etc to work around exceptions from `url_parse`, which # handles RFC 3986 only. @@ -387,22 +387,22 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): if not scheme: error = ("Invalid URL {0!r}: No scheme supplied. Perhaps you meant http://{0}?") - error = error.format(to_native_string(url, 'utf8')) - - raise MissingSchema(error) + error = error.format(to_native_string(url, 'utf8')) + raise MissingSchema(error) + if not host: raise InvalidURL("Invalid URL %r: No host supplied" % url) - # In general, we want to try IDNA encoding the hostname if the string contains - # non-ASCII characters. This allows users to automatically get the correct IDNA - # behaviour. For strings containing only ASCII characters, we need to also verify - # it doesn't start with a wildcard (*), before allowing the unencoded hostname. - if not unicode_is_ascii(host): - try: - host = self._get_idna_encoded_host(host) - except UnicodeError: - raise InvalidURL('URL has an invalid label.') + # In general, we want to try IDNA encoding the hostname if the string contains + # non-ASCII characters. This allows users to automatically get the correct IDNA + # behaviour. For strings containing only ASCII characters, we need to also verify + # it doesn't start with a wildcard (*), before allowing the unencoded hostname. + if not unicode_is_ascii(host): + try: + host = self._get_idna_encoded_host(host) + except UnicodeError: + raise InvalidURL('URL has an invalid label.') elif host.startswith((u'*', u'.')): raise InvalidURL('URL has an invalid label.') @@ -430,9 +430,9 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): if isinstance(fragment, str): fragment = fragment.encode('utf-8') - if isinstance(params, (str, bytes)): - params = to_native_string(params) - + if isinstance(params, (str, bytes)): + params = to_native_string(params) + enc_params = self._encode_params(params) if enc_params: if query: @@ -446,13 +446,13 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): def prepare_headers(self, headers): """Prepares the given HTTP headers.""" - self.headers = CaseInsensitiveDict() + self.headers = CaseInsensitiveDict() if headers: - for header in headers.items(): - # Raise exception on invalid header value. - check_header_validity(header) - name, value = header - self.headers[to_native_string(name)] = value + for header in headers.items(): + # Raise exception on invalid header value. + check_header_validity(header) + name, value = header + self.headers[to_native_string(name)] = value def prepare_body(self, data, files, json=None): """Prepares the given HTTP body data.""" @@ -464,9 +464,9 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): body = None content_type = None - if not data and json is not None: - # urllib3 requires a bytes-like body. Python 2's json.dumps - # provides this natively, but Python 3 gives a Unicode string. + if not data and json is not None: + # urllib3 requires a bytes-like body. Python 2's json.dumps + # provides this natively, but Python 3 gives a Unicode string. content_type = 'application/json' try: @@ -474,8 +474,8 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): except ValueError as ve: raise InvalidJSONError(ve, request=self) - if not isinstance(body, bytes): - body = body.encode('utf-8') + if not isinstance(body, bytes): + body = body.encode('utf-8') is_stream = all([ hasattr(data, '__iter__'), @@ -490,21 +490,21 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): body = data - if getattr(body, 'tell', None) is not None: - # Record the current file position before reading. - # This will allow us to rewind a file in the event - # of a redirect. - try: - self._body_position = body.tell() - except (IOError, OSError): - # This differentiates from None, allowing us to catch - # a failed `tell()` later when trying to rewind the body - self._body_position = object() - + if getattr(body, 'tell', None) is not None: + # Record the current file position before reading. + # This will allow us to rewind a file in the event + # of a redirect. + try: + self._body_position = body.tell() + except (IOError, OSError): + # This differentiates from None, allowing us to catch + # a failed `tell()` later when trying to rewind the body + self._body_position = object() + if files: raise NotImplementedError('Streamed bodies and files are mutually exclusive.') - if length: + if length: self.headers['Content-Length'] = builtin_str(length) else: self.headers['Transfer-Encoding'] = 'chunked' @@ -513,7 +513,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): if files: (body, content_type) = self._encode_files(files, data) else: - if data: + if data: body = self._encode_params(data) if isinstance(data, basestring) or hasattr(data, 'read'): content_type = None @@ -529,16 +529,16 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): self.body = body def prepare_content_length(self, body): - """Prepare Content-Length header based on request method and body""" - if body is not None: - length = super_len(body) - if length: - # If length exists, set it. Otherwise, we fallback - # to Transfer-Encoding: chunked. - self.headers['Content-Length'] = builtin_str(length) - elif self.method not in ('GET', 'HEAD') and self.headers.get('Content-Length') is None: - # Set Content-Length to 0 for methods that can have a body - # but don't provide one. (i.e. not GET or HEAD) + """Prepare Content-Length header based on request method and body""" + if body is not None: + length = super_len(body) + if length: + # If length exists, set it. Otherwise, we fallback + # to Transfer-Encoding: chunked. + self.headers['Content-Length'] = builtin_str(length) + elif self.method not in ('GET', 'HEAD') and self.headers.get('Content-Length') is None: + # Set Content-Length to 0 for methods that can have a body + # but don't provide one. (i.e. not GET or HEAD) self.headers['Content-Length'] = '0' def prepare_auth(self, auth, url=''): @@ -564,16 +564,16 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): self.prepare_content_length(self.body) def prepare_cookies(self, cookies): - """Prepares the given HTTP cookie data. - - This function eventually generates a ``Cookie`` header from the - given cookies using cookielib. Due to cookielib's design, the header - will not be regenerated if it already exists, meaning this function - can only be called once for the life of the - :class:`PreparedRequest <PreparedRequest>` object. Any subsequent calls - to ``prepare_cookies`` will have no actual effect, unless the "Cookie" - header is removed beforehand. - """ + """Prepares the given HTTP cookie data. + + This function eventually generates a ``Cookie`` header from the + given cookies using cookielib. Due to cookielib's design, the header + will not be regenerated if it already exists, meaning this function + can only be called once for the life of the + :class:`PreparedRequest <PreparedRequest>` object. Any subsequent calls + to ``prepare_cookies`` will have no actual effect, unless the "Cookie" + header is removed beforehand. + """ if isinstance(cookies, cookielib.CookieJar): self._cookies = cookies else: @@ -585,10 +585,10 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): def prepare_hooks(self, hooks): """Prepares the given hooks.""" - # hooks can be passed as None to the prepare method and to this - # method. To prevent iterating over None, simply use an empty list - # if hooks is False-y - hooks = hooks or [] + # hooks can be passed as None to the prepare method and to this + # method. To prevent iterating over None, simply use an empty list + # if hooks is False-y + hooks = hooks or [] for event in hooks: self.register_hook(event, hooks[event]) @@ -599,14 +599,14 @@ class Response(object): """ __attrs__ = [ - '_content', 'status_code', 'headers', 'url', 'history', - 'encoding', 'reason', 'cookies', 'elapsed', 'request' + '_content', 'status_code', 'headers', 'url', 'history', + 'encoding', 'reason', 'cookies', 'elapsed', 'request' ] def __init__(self): self._content = False self._content_consumed = False - self._next = None + self._next = None #: Integer Code of responded HTTP Status, e.g. 404 or 200. self.status_code = None @@ -639,23 +639,23 @@ class Response(object): self.cookies = cookiejar_from_dict({}) #: The amount of time elapsed between sending the request - #: and the arrival of the response (as a timedelta). - #: This property specifically measures the time taken between sending - #: the first byte of the request and finishing parsing the headers. It - #: is therefore unaffected by consuming the response content or the - #: value of the ``stream`` keyword argument. + #: and the arrival of the response (as a timedelta). + #: This property specifically measures the time taken between sending + #: the first byte of the request and finishing parsing the headers. It + #: is therefore unaffected by consuming the response content or the + #: value of the ``stream`` keyword argument. self.elapsed = datetime.timedelta(0) #: The :class:`PreparedRequest <PreparedRequest>` object to which this #: is a response. self.request = None - def __enter__(self): - return self - - def __exit__(self, *args): - self.close() - + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() + def __getstate__(self): # Consume everything; accessing the content attribute makes # sure the content has been fully read. @@ -676,23 +676,23 @@ class Response(object): return '<Response [%s]>' % (self.status_code) def __bool__(self): - """Returns True if :attr:`status_code` is less than 400. - - This attribute checks if the status code of the response is between - 400 and 600 to see if there was a client error or a server error. If - the status code, is between 200 and 400, this will return True. This - is **not** a check to see if the response code is ``200 OK``. - """ + """Returns True if :attr:`status_code` is less than 400. + + This attribute checks if the status code of the response is between + 400 and 600 to see if there was a client error or a server error. If + the status code, is between 200 and 400, this will return True. This + is **not** a check to see if the response code is ``200 OK``. + """ return self.ok def __nonzero__(self): - """Returns True if :attr:`status_code` is less than 400. - - This attribute checks if the status code of the response is between - 400 and 600 to see if there was a client error or a server error. If - the status code, is between 200 and 400, this will return True. This - is **not** a check to see if the response code is ``200 OK``. - """ + """Returns True if :attr:`status_code` is less than 400. + + This attribute checks if the status code of the response is between + 400 and 600 to see if there was a client error or a server error. If + the status code, is between 200 and 400, this will return True. This + is **not** a check to see if the response code is ``200 OK``. + """ return self.ok def __iter__(self): @@ -702,12 +702,12 @@ class Response(object): @property def ok(self): """Returns True if :attr:`status_code` is less than 400, False if not. - - This attribute checks if the status code of the response is between - 400 and 600 to see if there was a client error or a server error. If + + This attribute checks if the status code of the response is between + 400 and 600 to see if there was a client error or a server error. If the status code is between 200 and 400, this will return True. This - is **not** a check to see if the response code is ``200 OK``. - """ + is **not** a check to see if the response code is ``200 OK``. + """ try: self.raise_for_status() except HTTPError: @@ -723,15 +723,15 @@ class Response(object): @property def is_permanent_redirect(self): - """True if this Response one of the permanent versions of redirect.""" + """True if this Response one of the permanent versions of redirect.""" return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect)) @property - def next(self): - """Returns a PreparedRequest for the next request in a redirect chain, if there is one.""" - return self._next - - @property + def next(self): + """Returns a PreparedRequest for the next request in a redirect chain, if there is one.""" + return self._next + + @property def apparent_encoding(self): """The apparent encoding, provided by the charset_normalizer or chardet libraries.""" return chardet.detect(self.content)['encoding'] @@ -743,19 +743,19 @@ class Response(object): read into memory. This is not necessarily the length of each item returned as decoding can take place. - chunk_size must be of type int or None. A value of None will - function differently depending on the value of `stream`. - stream=True will read data as it arrives in whatever size the - chunks are received. If stream=False, data is returned as - a single chunk. - + chunk_size must be of type int or None. A value of None will + function differently depending on the value of `stream`. + stream=True will read data as it arrives in whatever size the + chunks are received. If stream=False, data is returned as + a single chunk. + If decode_unicode is True, content will be decoded using the best available encoding based on the response. """ - + def generate(): - # Special case for urllib3. - if hasattr(self.raw, 'stream'): + # Special case for urllib3. + if hasattr(self.raw, 'stream'): try: for chunk in self.raw.stream(chunk_size, decode_content=True): yield chunk @@ -765,7 +765,7 @@ class Response(object): raise ContentDecodingError(e) except ReadTimeoutError as e: raise ConnectionError(e) - else: + else: # Standard file-like object. while True: chunk = self.raw.read(chunk_size) @@ -777,8 +777,8 @@ class Response(object): if self._content_consumed and isinstance(self._content, bool): raise StreamConsumedError() - elif chunk_size is not None and not isinstance(chunk_size, int): - raise TypeError("chunk_size must be an int, it is instead a %s." % type(chunk_size)) + elif chunk_size is not None and not isinstance(chunk_size, int): + raise TypeError("chunk_size must be an int, it is instead a %s." % type(chunk_size)) # simulate reading small chunks of the content reused_chunks = iter_slices(self._content, chunk_size) @@ -795,8 +795,8 @@ class Response(object): """Iterates over the response data, one line at a time. When stream=True is set on the request, this avoids reading the content at once into memory for large responses. - - .. note:: This method is not reentrant safe. + + .. note:: This method is not reentrant safe. """ pending = None @@ -828,13 +828,13 @@ class Response(object): if self._content is False: # Read the contents. - if self._content_consumed: - raise RuntimeError( - 'The content for this response was already consumed') + if self._content_consumed: + raise RuntimeError( + 'The content for this response was already consumed') - if self.status_code == 0 or self.raw is None: + if self.status_code == 0 or self.raw is None: self._content = None - else: + else: self._content = b''.join(self.iter_content(CONTENT_CHUNK_SIZE)) or b'' self._content_consumed = True @@ -881,14 +881,14 @@ class Response(object): return content def json(self, **kwargs): - r"""Returns the json-encoded content of a response, if any. + r"""Returns the json-encoded content of a response, if any. :param \*\*kwargs: Optional arguments that ``json.loads`` takes. :raises requests.exceptions.JSONDecodeError: If the response body does not contain valid json. """ - if not self.encoding and self.content and len(self.content) > 3: + if not self.encoding and self.content and len(self.content) > 3: # No encoding set. JSON RFC 4627 section 3 states we should expect # UTF-8, -16 or -32. Detect which one to use; If the detection or # decoding fails, fall back to `self.text` (using charset_normalizer to make @@ -896,9 +896,9 @@ class Response(object): encoding = guess_json_utf(self.content) if encoding is not None: try: - return complexjson.loads( - self.content.decode(encoding), **kwargs - ) + return complexjson.loads( + self.content.decode(encoding), **kwargs + ) except UnicodeDecodeError: # Wrong UTF codec detected; usually because it's not UTF-8 # but some other 8-bit codec. This is an RFC violation, @@ -938,23 +938,23 @@ class Response(object): """Raises :class:`HTTPError`, if one occurred.""" http_error_msg = '' - if isinstance(self.reason, bytes): - # We attempt to decode utf-8 first because some servers - # choose to localize their reason strings. If the string - # isn't utf-8, we fall back to iso-8859-1 for all other - # encodings. (See PR #3538) - try: - reason = self.reason.decode('utf-8') - except UnicodeDecodeError: - reason = self.reason.decode('iso-8859-1') - else: - reason = self.reason + if isinstance(self.reason, bytes): + # We attempt to decode utf-8 first because some servers + # choose to localize their reason strings. If the string + # isn't utf-8, we fall back to iso-8859-1 for all other + # encodings. (See PR #3538) + try: + reason = self.reason.decode('utf-8') + except UnicodeDecodeError: + reason = self.reason.decode('iso-8859-1') + else: + reason = self.reason if 400 <= self.status_code < 500: - http_error_msg = u'%s Client Error: %s for url: %s' % (self.status_code, reason, self.url) + http_error_msg = u'%s Client Error: %s for url: %s' % (self.status_code, reason, self.url) elif 500 <= self.status_code < 600: - http_error_msg = u'%s Server Error: %s for url: %s' % (self.status_code, reason, self.url) + http_error_msg = u'%s Server Error: %s for url: %s' % (self.status_code, reason, self.url) if http_error_msg: raise HTTPError(http_error_msg, response=self) @@ -965,9 +965,9 @@ class Response(object): *Note: Should not normally need to be called explicitly.* """ - if not self._content_consumed: - self.raw.close() - - release_conn = getattr(self.raw, 'release_conn', None) - if release_conn is not None: - release_conn() + if not self._content_consumed: + self.raw.close() + + release_conn = getattr(self.raw, 'release_conn', None) + if release_conn is not None: + release_conn() diff --git a/contrib/python/requests/requests/packages.py b/contrib/python/requests/requests/packages.py index 00196bff25..2430e8e082 100644 --- a/contrib/python/requests/requests/packages.py +++ b/contrib/python/requests/requests/packages.py @@ -1,5 +1,5 @@ -import sys - +import sys + try: import chardet except ImportError: @@ -8,19 +8,19 @@ except ImportError: warnings.filterwarnings('ignore', 'Trying to detect', module='charset_normalizer') -# This code exists for backwards compatibility reasons. -# I don't like it either. Just look the other way. :) - +# This code exists for backwards compatibility reasons. +# I don't like it either. Just look the other way. :) + for package in ('urllib3', 'idna'): - locals()[package] = __import__(package) - # This traversal is apparently necessary such that the identities are - # preserved (requests.packages.urllib3.* is urllib3.*) - for mod in list(sys.modules): - if mod == package or mod.startswith(package + '.'): - sys.modules['requests.packages.' + mod] = sys.modules[mod] - + locals()[package] = __import__(package) + # This traversal is apparently necessary such that the identities are + # preserved (requests.packages.urllib3.* is urllib3.*) + for mod in list(sys.modules): + if mod == package or mod.startswith(package + '.'): + sys.modules['requests.packages.' + mod] = sys.modules[mod] + target = chardet.__name__ for mod in list(sys.modules): if mod == target or mod.startswith(target + '.'): sys.modules['requests.packages.' + target.replace(target, 'chardet')] = sys.modules[mod] -# Kinda cool, though, right? +# Kinda cool, though, right? diff --git a/contrib/python/requests/requests/sessions.py b/contrib/python/requests/requests/sessions.py index 3f59cab922..0c2ed2c6d7 100644 --- a/contrib/python/requests/requests/sessions.py +++ b/contrib/python/requests/requests/sessions.py @@ -9,8 +9,8 @@ requests (cookies, auth, proxies). """ import os import sys -import time -from datetime import timedelta +import time +from datetime import timedelta from collections import OrderedDict from .auth import _basic_auth_str @@ -19,11 +19,11 @@ from .cookies import ( cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies) from .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT from .hooks import default_hooks, dispatch_hook -from ._internal_utils import to_native_string +from ._internal_utils import to_native_string from .utils import to_key_val_list, default_headers, DEFAULT_PORTS from .exceptions import ( TooManyRedirects, InvalidSchema, ChunkedEncodingError, ContentDecodingError) - + from .structures import CaseInsensitiveDict from .adapters import HTTPAdapter @@ -37,19 +37,19 @@ from .status_codes import codes # formerly defined here, reexposed here for backward compatibility from .models import REDIRECT_STATI -# Preferred clock, based on which one is more accurate on a given system. +# Preferred clock, based on which one is more accurate on a given system. if sys.platform == 'win32': try: # Python 3.4+ - preferred_clock = time.perf_counter - except AttributeError: # Earlier than Python 3. - preferred_clock = time.clock -else: - preferred_clock = time.time + preferred_clock = time.perf_counter + except AttributeError: # Earlier than Python 3. + preferred_clock = time.clock +else: + preferred_clock = time.time def merge_setting(request_setting, session_setting, dict_class=OrderedDict): - """Determines appropriate setting for a given request, taking into account - the explicit setting on that request, and the setting in the session. If a + """Determines appropriate setting for a given request, taking into account + the explicit setting on that request, and the setting in the session. If a setting is a dictionary, they will be merged together using `dict_class` """ @@ -69,17 +69,17 @@ def merge_setting(request_setting, session_setting, dict_class=OrderedDict): merged_setting = dict_class(to_key_val_list(session_setting)) merged_setting.update(to_key_val_list(request_setting)) - # Remove keys that are set to None. Extract keys first to avoid altering - # the dictionary during iteration. - none_keys = [k for (k, v) in merged_setting.items() if v is None] - for key in none_keys: - del merged_setting[key] + # Remove keys that are set to None. Extract keys first to avoid altering + # the dictionary during iteration. + none_keys = [k for (k, v) in merged_setting.items() if v is None] + for key in none_keys: + del merged_setting[key] return merged_setting def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict): - """Properly merges both requests and session hooks. + """Properly merges both requests and session hooks. This is necessary because when request_hooks == {'response': []}, the merge breaks Session hooks entirely. @@ -94,28 +94,28 @@ def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict): class SessionRedirectMixin(object): - - def get_redirect_target(self, resp): - """Receives a Response. Returns a redirect URI or ``None``""" - # Due to the nature of how requests processes redirects this method will - # be called at least once upon the original response and at least twice - # on each subsequent redirect response (if any). - # If a custom mixin is used to handle this logic, it may be advantageous - # to cache the redirect location onto the response object as a private - # attribute. - if resp.is_redirect: - location = resp.headers['location'] - # Currently the underlying http module on py3 decode headers - # in latin1, but empirical evidence suggests that latin1 is very - # rarely used with non-ASCII characters in HTTP headers. - # It is more likely to get UTF8 header rather than latin1. - # This causes incorrect handling of UTF8 encoded location headers. - # To solve this, we re-encode the location in latin1. - if is_py3: - location = location.encode('latin1') - return to_native_string(location, 'utf8') - return None - + + def get_redirect_target(self, resp): + """Receives a Response. Returns a redirect URI or ``None``""" + # Due to the nature of how requests processes redirects this method will + # be called at least once upon the original response and at least twice + # on each subsequent redirect response (if any). + # If a custom mixin is used to handle this logic, it may be advantageous + # to cache the redirect location onto the response object as a private + # attribute. + if resp.is_redirect: + location = resp.headers['location'] + # Currently the underlying http module on py3 decode headers + # in latin1, but empirical evidence suggests that latin1 is very + # rarely used with non-ASCII characters in HTTP headers. + # It is more likely to get UTF8 header rather than latin1. + # This causes incorrect handling of UTF8 encoded location headers. + # To solve this, we re-encode the location in latin1. + if is_py3: + location = location.encode('latin1') + return to_native_string(location, 'utf8') + return None + def should_strip_auth(self, old_url, new_url): """Decide whether Authorization header should be removed when redirecting""" old_parsed = urlparse(old_url) @@ -142,27 +142,27 @@ class SessionRedirectMixin(object): return changed_port or changed_scheme def resolve_redirects(self, resp, req, stream=False, timeout=None, - verify=True, cert=None, proxies=None, yield_requests=False, **adapter_kwargs): - """Receives a Response. Returns a generator of Responses or Requests.""" + verify=True, cert=None, proxies=None, yield_requests=False, **adapter_kwargs): + """Receives a Response. Returns a generator of Responses or Requests.""" - hist = [] # keep track of history + hist = [] # keep track of history - url = self.get_redirect_target(resp) + url = self.get_redirect_target(resp) previous_fragment = urlparse(req.url).fragment - while url: + while url: prepared_request = req.copy() - # Update history and keep track of redirects. - # resp.history must ignore the original request in this loop - hist.append(resp) - resp.history = hist[1:] + # Update history and keep track of redirects. + # resp.history must ignore the original request in this loop + hist.append(resp) + resp.history = hist[1:] try: resp.content # Consume socket so it can be released except (ChunkedEncodingError, ContentDecodingError, RuntimeError): resp.raw.read(decode_content=False) - if len(resp.history) >= self.max_redirects: + if len(resp.history) >= self.max_redirects: raise TooManyRedirects('Exceeded {} redirects.'.format(self.max_redirects), response=resp) # Release the connection back into the pool. @@ -191,68 +191,68 @@ class SessionRedirectMixin(object): prepared_request.url = to_native_string(url) - self.rebuild_method(prepared_request, resp) + self.rebuild_method(prepared_request, resp) # https://github.com/psf/requests/issues/1084 if resp.status_code not in (codes.temporary_redirect, codes.permanent_redirect): # https://github.com/psf/requests/issues/3490 - purged_headers = ('Content-Length', 'Content-Type', 'Transfer-Encoding') - for header in purged_headers: - prepared_request.headers.pop(header, None) + purged_headers = ('Content-Length', 'Content-Type', 'Transfer-Encoding') + for header in purged_headers: + prepared_request.headers.pop(header, None) prepared_request.body = None headers = prepared_request.headers headers.pop('Cookie', None) - # Extract any cookies sent on the response to the cookiejar - # in the new request. Because we've mutated our copied prepared - # request, use the old one that we haven't yet touched. - extract_cookies_to_jar(prepared_request._cookies, req, resp.raw) - merge_cookies(prepared_request._cookies, self.cookies) + # Extract any cookies sent on the response to the cookiejar + # in the new request. Because we've mutated our copied prepared + # request, use the old one that we haven't yet touched. + extract_cookies_to_jar(prepared_request._cookies, req, resp.raw) + merge_cookies(prepared_request._cookies, self.cookies) prepared_request.prepare_cookies(prepared_request._cookies) # Rebuild auth and proxy information. proxies = self.rebuild_proxies(prepared_request, proxies) self.rebuild_auth(prepared_request, resp) - # A failed tell() sets `_body_position` to `object()`. This non-None - # value ensures `rewindable` will be True, allowing us to raise an - # UnrewindableBodyError, instead of hanging the connection. - rewindable = ( - prepared_request._body_position is not None and - ('Content-Length' in headers or 'Transfer-Encoding' in headers) - ) - - # Attempt to rewind consumed file-like object. - if rewindable: - rewind_body(prepared_request) - + # A failed tell() sets `_body_position` to `object()`. This non-None + # value ensures `rewindable` will be True, allowing us to raise an + # UnrewindableBodyError, instead of hanging the connection. + rewindable = ( + prepared_request._body_position is not None and + ('Content-Length' in headers or 'Transfer-Encoding' in headers) + ) + + # Attempt to rewind consumed file-like object. + if rewindable: + rewind_body(prepared_request) + # Override the original request. req = prepared_request - if yield_requests: - yield req - else: - - resp = self.send( - req, - stream=stream, - timeout=timeout, - verify=verify, - cert=cert, - proxies=proxies, - allow_redirects=False, - **adapter_kwargs - ) - - extract_cookies_to_jar(self.cookies, prepared_request, resp.raw) - - # extract redirect url, if any, for the next loop - url = self.get_redirect_target(resp) - yield resp - + if yield_requests: + yield req + else: + + resp = self.send( + req, + stream=stream, + timeout=timeout, + verify=verify, + cert=cert, + proxies=proxies, + allow_redirects=False, + **adapter_kwargs + ) + + extract_cookies_to_jar(self.cookies, prepared_request, resp.raw) + + # extract redirect url, if any, for the next loop + url = self.get_redirect_target(resp) + yield resp + def rebuild_auth(self, prepared_request, response): - """When being redirected we may want to strip authentication from the + """When being redirected we may want to strip authentication from the request to avoid leaking credentials. This method intelligently removes and reapplies authentication where possible to avoid credential loss. """ @@ -261,7 +261,7 @@ class SessionRedirectMixin(object): if 'Authorization' in headers and self.should_strip_auth(response.request.url, url): # If we get redirected to a new host, we should strip out any - # authentication headers. + # authentication headers. del headers['Authorization'] # .netrc might have more auth for us on our new host. @@ -270,7 +270,7 @@ class SessionRedirectMixin(object): prepared_request.prepare_auth(new_auth) def rebuild_proxies(self, prepared_request, proxies): - """This method re-evaluates the proxy configuration by considering the + """This method re-evaluates the proxy configuration by considering the environment variables. If we are redirected to a URL covered by NO_PROXY, we strip the proxy configuration. Otherwise, we set missing proxy keys for this URL (in case they were stripped by a previous @@ -278,8 +278,8 @@ class SessionRedirectMixin(object): This method also replaces the Proxy-Authorization header where necessary. - - :rtype: dict + + :rtype: dict """ headers = prepared_request.headers scheme = urlparse(prepared_request.url).scheme @@ -298,29 +298,29 @@ class SessionRedirectMixin(object): return new_proxies - def rebuild_method(self, prepared_request, response): - """When being redirected we may want to change the method of the request - based on certain specs or browser behavior. - """ - method = prepared_request.method + def rebuild_method(self, prepared_request, response): + """When being redirected we may want to change the method of the request + based on certain specs or browser behavior. + """ + method = prepared_request.method # https://tools.ietf.org/html/rfc7231#section-6.4.4 - if response.status_code == codes.see_other and method != 'HEAD': - method = 'GET' - - # Do what the browsers do, despite standards... - # First, turn 302s into GETs. - if response.status_code == codes.found and method != 'HEAD': - method = 'GET' - - # Second, if a POST is responded to with a 301, turn it into a GET. - # This bizarre behaviour is explained in Issue 1704. - if response.status_code == codes.moved and method == 'POST': - method = 'GET' - - prepared_request.method = method - - + if response.status_code == codes.see_other and method != 'HEAD': + method = 'GET' + + # Do what the browsers do, despite standards... + # First, turn 302s into GETs. + if response.status_code == codes.found and method != 'HEAD': + method = 'GET' + + # Second, if a POST is responded to with a 301, turn it into a GET. + # This bizarre behaviour is explained in Issue 1704. + if response.status_code == codes.moved and method == 'POST': + method = 'GET' + + prepared_request.method = method + + class Session(SessionRedirectMixin): """A Requests session. @@ -331,13 +331,13 @@ class Session(SessionRedirectMixin): >>> import requests >>> s = requests.Session() >>> s.get('https://httpbin.org/get') - <Response [200]> - - Or as a context manager:: - - >>> with requests.Session() as s: + <Response [200]> + + Or as a context manager:: + + >>> with requests.Session() as s: ... s.get('https://httpbin.org/get') - <Response [200]> + <Response [200]> """ __attrs__ = [ @@ -357,9 +357,9 @@ class Session(SessionRedirectMixin): #: :class:`Request <Request>`. self.auth = None - #: Dictionary mapping protocol or protocol and host to the URL of the proxy - #: (e.g. {'http': 'foo.bar:3128', 'http://host.name': 'foo.bar:4012'}) to - #: be used on each :class:`Request <Request>`. + #: Dictionary mapping protocol or protocol and host to the URL of the proxy + #: (e.g. {'http': 'foo.bar:3128', 'http://host.name': 'foo.bar:4012'}) to + #: be used on each :class:`Request <Request>`. self.proxies = {} #: Event-handling hooks. @@ -383,18 +383,18 @@ class Session(SessionRedirectMixin): #: Only set this to `False` for testing. self.verify = True - #: SSL client certificate default, if String, path to ssl client - #: cert file (.pem). If Tuple, ('cert', 'key') pair. + #: SSL client certificate default, if String, path to ssl client + #: cert file (.pem). If Tuple, ('cert', 'key') pair. self.cert = None #: Maximum number of redirects allowed. If the request exceeds this #: limit, a :class:`TooManyRedirects` exception is raised. - #: This defaults to requests.models.DEFAULT_REDIRECT_LIMIT, which is - #: 30. + #: This defaults to requests.models.DEFAULT_REDIRECT_LIMIT, which is + #: 30. self.max_redirects = DEFAULT_REDIRECT_LIMIT - #: Trust environment settings for proxy configuration, default - #: authentication and similar. + #: Trust environment settings for proxy configuration, default + #: authentication and similar. self.trust_env = True #: A CookieJar containing all currently outstanding cookies set on this @@ -422,7 +422,7 @@ class Session(SessionRedirectMixin): :param request: :class:`Request` instance to prepare with this session's settings. - :rtype: requests.PreparedRequest + :rtype: requests.PreparedRequest """ cookies = request.cookies or {} @@ -455,9 +455,9 @@ class Session(SessionRedirectMixin): return p def request(self, method, url, - params=None, data=None, headers=None, cookies=None, files=None, - auth=None, timeout=None, allow_redirects=True, proxies=None, - hooks=None, stream=None, verify=None, cert=None, json=None): + params=None, data=None, headers=None, cookies=None, files=None, + auth=None, timeout=None, allow_redirects=True, proxies=None, + hooks=None, stream=None, verify=None, cert=None, json=None): """Constructs a :class:`Request <Request>`, prepares it and sends it. Returns :class:`Response <Response>` object. @@ -478,17 +478,17 @@ class Session(SessionRedirectMixin): :param auth: (optional) Auth tuple or callable to enable Basic/Digest/Custom HTTP Auth. :param timeout: (optional) How long to wait for the server to send - data before giving up, as a float, or a :ref:`(connect timeout, - read timeout) <timeouts>` tuple. + data before giving up, as a float, or a :ref:`(connect timeout, + read timeout) <timeouts>` tuple. :type timeout: float or tuple :param allow_redirects: (optional) Set to True by default. :type allow_redirects: bool - :param proxies: (optional) Dictionary mapping protocol or protocol and - hostname to the URL of the proxy. + :param proxies: (optional) Dictionary mapping protocol or protocol and + hostname to the URL of the proxy. :param stream: (optional) whether to immediately download the response content. Defaults to ``False``. - :param verify: (optional) Either a boolean, in which case it controls whether we verify - the server's TLS certificate, or a string, in which case it must be a path + :param verify: (optional) Either a boolean, in which case it controls whether we verify + the server's TLS certificate, or a string, in which case it must be a path to a CA bundle to use. Defaults to ``True``. When set to ``False``, requests will accept any TLS certificate presented by the server, and will ignore hostname mismatches and/or expired @@ -497,20 +497,20 @@ class Session(SessionRedirectMixin): may be useful during local development or testing. :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. - :rtype: requests.Response + :rtype: requests.Response """ # Create the Request. req = Request( - method=method.upper(), - url=url, - headers=headers, - files=files, - data=data or {}, - json=json, - params=params or {}, - auth=auth, - cookies=cookies, - hooks=hooks, + method=method.upper(), + url=url, + headers=headers, + files=files, + data=data or {}, + json=json, + params=params or {}, + auth=auth, + cookies=cookies, + hooks=hooks, ) prep = self.prepare_request(req) @@ -531,90 +531,90 @@ class Session(SessionRedirectMixin): return resp def get(self, url, **kwargs): - r"""Sends a GET request. Returns :class:`Response` object. + r"""Sends a GET request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. - :rtype: requests.Response + :rtype: requests.Response """ kwargs.setdefault('allow_redirects', True) return self.request('GET', url, **kwargs) def options(self, url, **kwargs): - r"""Sends a OPTIONS request. Returns :class:`Response` object. + r"""Sends a OPTIONS request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. - :rtype: requests.Response + :rtype: requests.Response """ kwargs.setdefault('allow_redirects', True) return self.request('OPTIONS', url, **kwargs) def head(self, url, **kwargs): - r"""Sends a HEAD request. Returns :class:`Response` object. + r"""Sends a HEAD request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. - :rtype: requests.Response + :rtype: requests.Response """ kwargs.setdefault('allow_redirects', False) return self.request('HEAD', url, **kwargs) def post(self, url, data=None, json=None, **kwargs): - r"""Sends a POST request. Returns :class:`Response` object. + r"""Sends a POST request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, list of tuples, bytes, or file-like object to send in the body of the :class:`Request`. :param json: (optional) json to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. - :rtype: requests.Response + :rtype: requests.Response """ return self.request('POST', url, data=data, json=json, **kwargs) def put(self, url, data=None, **kwargs): - r"""Sends a PUT request. Returns :class:`Response` object. + r"""Sends a PUT request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, list of tuples, bytes, or file-like object to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. - :rtype: requests.Response + :rtype: requests.Response """ return self.request('PUT', url, data=data, **kwargs) def patch(self, url, data=None, **kwargs): - r"""Sends a PATCH request. Returns :class:`Response` object. + r"""Sends a PATCH request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, list of tuples, bytes, or file-like object to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. - :rtype: requests.Response + :rtype: requests.Response """ - return self.request('PATCH', url, data=data, **kwargs) + return self.request('PATCH', url, data=data, **kwargs) def delete(self, url, **kwargs): - r"""Sends a DELETE request. Returns :class:`Response` object. + r"""Sends a DELETE request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. - :rtype: requests.Response + :rtype: requests.Response """ return self.request('DELETE', url, **kwargs) def send(self, request, **kwargs): - """Send a given PreparedRequest. - - :rtype: requests.Response - """ + """Send a given PreparedRequest. + + :rtype: requests.Response + """ # Set defaults that the hooks can utilize to ensure they always have # the correct parameters to reproduce the previous request. kwargs.setdefault('stream', self.stream) @@ -627,7 +627,7 @@ class Session(SessionRedirectMixin): # It's possible that users might accidentally send a Request object. # Guard against that specific failure case. - if isinstance(request, Request): + if isinstance(request, Request): raise ValueError('You can only send PreparedRequests.') # Set up variables needed for resolve_redirects and dispatching of hooks @@ -639,14 +639,14 @@ class Session(SessionRedirectMixin): adapter = self.get_adapter(url=request.url) # Start time (approximately) of the request - start = preferred_clock() + start = preferred_clock() # Send the request r = adapter.send(request, **kwargs) # Total elapsed time of the request (approximately) - elapsed = preferred_clock() - start - r.elapsed = timedelta(seconds=elapsed) + elapsed = preferred_clock() - start + r.elapsed = timedelta(seconds=elapsed) # Response manipulation hooks r = dispatch_hook('response', hooks, r, **kwargs) @@ -676,29 +676,29 @@ class Session(SessionRedirectMixin): r = history.pop() r.history = history - # If redirects aren't being followed, store the response on the Request for Response.next(). - if not allow_redirects: - try: - r._next = next(self.resolve_redirects(r, request, yield_requests=True, **kwargs)) - except StopIteration: - pass - + # If redirects aren't being followed, store the response on the Request for Response.next(). + if not allow_redirects: + try: + r._next = next(self.resolve_redirects(r, request, yield_requests=True, **kwargs)) + except StopIteration: + pass + if not stream: r.content return r def merge_environment_settings(self, url, proxies, stream, verify, cert): - """ - Check the environment and merge it with some settings. - - :rtype: dict - """ + """ + Check the environment and merge it with some settings. + + :rtype: dict + """ # Gather clues from the surrounding environment. if self.trust_env: # Set environment's proxies. - no_proxy = proxies.get('no_proxy') if proxies is not None else None - env_proxies = get_environ_proxies(url, no_proxy=no_proxy) + no_proxy = proxies.get('no_proxy') if proxies is not None else None + env_proxies = get_environ_proxies(url, no_proxy=no_proxy) for (k, v) in env_proxies.items(): proxies.setdefault(k, v) @@ -718,11 +718,11 @@ class Session(SessionRedirectMixin): 'cert': cert} def get_adapter(self, url): - """ - Returns the appropriate connection adapter for the given URL. - - :rtype: requests.adapters.BaseAdapter - """ + """ + Returns the appropriate connection adapter for the given URL. + + :rtype: requests.adapters.BaseAdapter + """ for (prefix, adapter) in self.adapters.items(): if url.lower().startswith(prefix.lower()): @@ -739,8 +739,8 @@ class Session(SessionRedirectMixin): def mount(self, prefix, adapter): """Registers a connection adapter to a prefix. - Adapters are sorted in descending order by prefix length. - """ + Adapters are sorted in descending order by prefix length. + """ self.adapters[prefix] = adapter keys_to_move = [k for k in self.adapters if len(k) < len(prefix)] @@ -757,8 +757,8 @@ class Session(SessionRedirectMixin): def session(): - """ - Returns a :class:`Session` for context-management. + """ + Returns a :class:`Session` for context-management. .. deprecated:: 1.0.0 @@ -766,6 +766,6 @@ def session(): backwards compatibility. New code should use :class:`~requests.sessions.Session` to create a session. This may be removed at a future date. - :rtype: Session - """ + :rtype: Session + """ return Session() diff --git a/contrib/python/requests/requests/status_codes.py b/contrib/python/requests/requests/status_codes.py index d80a7cd4dd..2d7cd695b1 100644 --- a/contrib/python/requests/requests/status_codes.py +++ b/contrib/python/requests/requests/status_codes.py @@ -51,7 +51,7 @@ _codes = { 306: ('switch_proxy',), 307: ('temporary_redirect', 'temporary_moved', 'temporary'), 308: ('permanent_redirect', - 'resume_incomplete', 'resume',), # These 2 to be removed in 3.0 + 'resume_incomplete', 'resume',), # These 2 to be removed in 3.0 # Client Error. 400: ('bad_request', 'bad'), @@ -73,7 +73,7 @@ _codes = { 416: ('requested_range_not_satisfiable', 'requested_range', 'range_not_satisfiable'), 417: ('expectation_failed',), 418: ('im_a_teapot', 'teapot', 'i_am_a_teapot'), - 421: ('misdirected_request',), + 421: ('misdirected_request',), 422: ('unprocessable_entity', 'unprocessable'), 423: ('locked',), 424: ('failed_dependency', 'dependency'), @@ -99,7 +99,7 @@ _codes = { 507: ('insufficient_storage',), 509: ('bandwidth_limit_exceeded', 'bandwidth'), 510: ('not_extended',), - 511: ('network_authentication_required', 'network_auth', 'network_authentication'), + 511: ('network_authentication_required', 'network_auth', 'network_authentication'), } codes = LookupDict(name='status_codes') diff --git a/contrib/python/requests/requests/structures.py b/contrib/python/requests/requests/structures.py index 8ee0ba7a08..9836a8779d 100644 --- a/contrib/python/requests/requests/structures.py +++ b/contrib/python/requests/requests/structures.py @@ -13,7 +13,7 @@ from .compat import Mapping, MutableMapping class CaseInsensitiveDict(MutableMapping): - """A case-insensitive ``dict``-like object. + """A case-insensitive ``dict``-like object. Implements all methods and operations of ``MutableMapping`` as well as dict's ``copy``. Also @@ -37,10 +37,10 @@ class CaseInsensitiveDict(MutableMapping): If the constructor, ``.update``, or equality comparison operations are given keys that have equal ``.lower()``s, the behavior is undefined. - """ + """ def __init__(self, data=None, **kwargs): - self._store = OrderedDict() + self._store = OrderedDict() if data is None: data = {} self.update(data, **kwargs) @@ -85,7 +85,7 @@ class CaseInsensitiveDict(MutableMapping): def __repr__(self): return str(dict(self.items())) - + class LookupDict(dict): """Dictionary lookup object.""" diff --git a/contrib/python/requests/requests/utils.py b/contrib/python/requests/requests/utils.py index 611bdbe290..28a12cfb9b 100644 --- a/contrib/python/requests/requests/utils.py +++ b/contrib/python/requests/requests/utils.py @@ -9,7 +9,7 @@ that are also useful for external consumption. """ import codecs -import contextlib +import contextlib import io import os import re @@ -23,19 +23,19 @@ from collections import OrderedDict from urllib3.util import make_headers from urllib3.util import parse_url -from .__version__ import __version__ +from .__version__ import __version__ from . import certs -# to_native_string is unused here, but imported here for backwards compatibility -from ._internal_utils import to_native_string +# to_native_string is unused here, but imported here for backwards compatibility +from ._internal_utils import to_native_string from .compat import parse_http_list as _parse_list_header -from .compat import ( +from .compat import ( quote, urlparse, bytes, str, unquote, getproxies, - proxy_bypass, urlunparse, basestring, integer_types, is_py3, + proxy_bypass, urlunparse, basestring, integer_types, is_py3, proxy_bypass_environment, getproxies_environment, Mapping) -from .cookies import cookiejar_from_dict +from .cookies import cookiejar_from_dict from .structures import CaseInsensitiveDict -from .exceptions import ( - InvalidURL, InvalidHeader, FileModeWarning, UnrewindableBodyError) +from .exceptions import ( + InvalidURL, InvalidHeader, FileModeWarning, UnrewindableBodyError) NETRC_FILES = ('.netrc', '_netrc') @@ -50,10 +50,10 @@ DEFAULT_ACCEPT_ENCODING = ", ".join( if sys.platform == 'win32': - # provide a proxy_bypass version on Windows without DNS lookups - - def proxy_bypass_registry(host): - try: + # provide a proxy_bypass version on Windows without DNS lookups + + def proxy_bypass_registry(host): + try: if is_py3: import winreg else: @@ -62,47 +62,47 @@ if sys.platform == 'win32': return False try: - internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER, - r'Software\Microsoft\Windows\CurrentVersion\Internet Settings') + internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER, + r'Software\Microsoft\Windows\CurrentVersion\Internet Settings') # ProxyEnable could be REG_SZ or REG_DWORD, normalizing it proxyEnable = int(winreg.QueryValueEx(internetSettings, 'ProxyEnable')[0]) # ProxyOverride is almost always a string - proxyOverride = winreg.QueryValueEx(internetSettings, - 'ProxyOverride')[0] - except OSError: - return False - if not proxyEnable or not proxyOverride: - return False - - # make a check value list from the registry entry: replace the - # '<local>' string by the localhost entry and the corresponding - # canonical entry. - proxyOverride = proxyOverride.split(';') - # now check if we match one of the registry values. - for test in proxyOverride: - if test == '<local>': - if '.' not in host: - return True - test = test.replace(".", r"\.") # mask dots - test = test.replace("*", r".*") # change glob sequence - test = test.replace("?", r".") # change glob char - if re.match(test, host, re.I): - return True - return False - - def proxy_bypass(host): # noqa - """Return True, if the host should be bypassed. - - Checks proxy settings gathered from the environment, if specified, - or the registry. - """ - if getproxies_environment(): - return proxy_bypass_environment(host) - else: - return proxy_bypass_registry(host) - - + proxyOverride = winreg.QueryValueEx(internetSettings, + 'ProxyOverride')[0] + except OSError: + return False + if not proxyEnable or not proxyOverride: + return False + + # make a check value list from the registry entry: replace the + # '<local>' string by the localhost entry and the corresponding + # canonical entry. + proxyOverride = proxyOverride.split(';') + # now check if we match one of the registry values. + for test in proxyOverride: + if test == '<local>': + if '.' not in host: + return True + test = test.replace(".", r"\.") # mask dots + test = test.replace("*", r".*") # change glob sequence + test = test.replace("?", r".") # change glob char + if re.match(test, host, re.I): + return True + return False + + def proxy_bypass(host): # noqa + """Return True, if the host should be bypassed. + + Checks proxy settings gathered from the environment, if specified, + or the registry. + """ + if getproxies_environment(): + return proxy_bypass_environment(host) + else: + return proxy_bypass_registry(host) + + def dict_to_sequence(d): """Returns an internal sequence dictionary update.""" @@ -113,16 +113,16 @@ def dict_to_sequence(d): def super_len(o): - total_length = None - current_position = 0 - + total_length = None + current_position = 0 + if hasattr(o, '__len__'): - total_length = len(o) + total_length = len(o) - elif hasattr(o, 'len'): - total_length = o.len + elif hasattr(o, 'len'): + total_length = o.len - elif hasattr(o, 'fileno'): + elif hasattr(o, 'fileno'): try: fileno = o.fileno() except (io.UnsupportedOperation, AttributeError): @@ -131,52 +131,52 @@ def super_len(o): # `Tarfile.extractfile()`, per issue 5229. pass else: - total_length = os.fstat(fileno).st_size - - # Having used fstat to determine the file length, we need to - # confirm that this file was opened up in binary mode. - if 'b' not in o.mode: - warnings.warn(( - "Requests has determined the content-length for this " - "request using the binary size of the file: however, the " - "file has been opened in text mode (i.e. without the 'b' " - "flag in the mode). This may lead to an incorrect " - "content-length. In Requests 3.0, support will be removed " - "for files in text mode."), - FileModeWarning - ) - - if hasattr(o, 'tell'): - try: - current_position = o.tell() - except (OSError, IOError): - # This can happen in some weird situations, such as when the file - # is actually a special file descriptor like stdin. In this - # instance, we don't know what the length is, so set it to zero and - # let requests chunk it instead. - if total_length is not None: - current_position = total_length - else: - if hasattr(o, 'seek') and total_length is None: + total_length = os.fstat(fileno).st_size + + # Having used fstat to determine the file length, we need to + # confirm that this file was opened up in binary mode. + if 'b' not in o.mode: + warnings.warn(( + "Requests has determined the content-length for this " + "request using the binary size of the file: however, the " + "file has been opened in text mode (i.e. without the 'b' " + "flag in the mode). This may lead to an incorrect " + "content-length. In Requests 3.0, support will be removed " + "for files in text mode."), + FileModeWarning + ) + + if hasattr(o, 'tell'): + try: + current_position = o.tell() + except (OSError, IOError): + # This can happen in some weird situations, such as when the file + # is actually a special file descriptor like stdin. In this + # instance, we don't know what the length is, so set it to zero and + # let requests chunk it instead. + if total_length is not None: + current_position = total_length + else: + if hasattr(o, 'seek') and total_length is None: # StringIO and BytesIO have seek but no usable fileno - try: - # seek to end of file - o.seek(0, 2) - total_length = o.tell() - - # seek back to current position to support - # partially read file-like objects - o.seek(current_position or 0) - except (OSError, IOError): - total_length = 0 - - if total_length is None: - total_length = 0 - - return max(0, total_length - current_position) - - -def get_netrc_auth(url, raise_errors=False): + try: + # seek to end of file + o.seek(0, 2) + total_length = o.tell() + + # seek back to current position to support + # partially read file-like objects + o.seek(current_position or 0) + except (OSError, IOError): + total_length = 0 + + if total_length is None: + total_length = 0 + + return max(0, total_length - current_position) + + +def get_netrc_auth(url, raise_errors=False): """Returns the Requests tuple auth for a given url from netrc.""" netrc_file = os.environ.get('NETRC') @@ -209,12 +209,12 @@ def get_netrc_auth(url, raise_errors=False): ri = urlparse(url) - # Strip port numbers from netloc. This weird `if...encode`` dance is - # used for Python 3.2, which doesn't support unicode literals. - splitstr = b':' - if isinstance(url, str): - splitstr = splitstr.decode('ascii') - host = ri.netloc.split(splitstr)[0] + # Strip port numbers from netloc. This weird `if...encode`` dance is + # used for Python 3.2, which doesn't support unicode literals. + splitstr = b':' + if isinstance(url, str): + splitstr = splitstr.decode('ascii') + host = ri.netloc.split(splitstr)[0] try: _netrc = netrc(netrc_path).authenticators(host) @@ -224,9 +224,9 @@ def get_netrc_auth(url, raise_errors=False): return (_netrc[login_i], _netrc[2]) except (NetrcParseError, IOError): # If there was a parsing error or a permissions issue reading the file, - # we'll just skip netrc auth unless explicitly asked to raise errors. - if raise_errors: - raise + # we'll just skip netrc auth unless explicitly asked to raise errors. + if raise_errors: + raise # App Engine hackiness. except (ImportError, AttributeError): @@ -236,8 +236,8 @@ def get_netrc_auth(url, raise_errors=False): def guess_filename(obj): """Tries to guess the filename of the given object.""" name = getattr(obj, 'name', None) - if (name and isinstance(name, basestring) and name[0] != '<' and - name[-1] != '>'): + if (name and isinstance(name, basestring) and name[0] != '<' and + name[-1] != '>'): return os.path.basename(name) @@ -307,8 +307,8 @@ def from_key_val_list(value): ValueError: cannot encode objects that are not 2-tuples >>> from_key_val_list({'key': 'val'}) OrderedDict([('key', 'val')]) - - :rtype: OrderedDict + + :rtype: OrderedDict """ if value is None: return None @@ -333,8 +333,8 @@ def to_key_val_list(value): Traceback (most recent call last): ... ValueError: cannot encode objects that are not 2-tuples - - :rtype: list + + :rtype: list """ if value is None: return None @@ -370,7 +370,7 @@ def parse_list_header(value): :param value: a string with a list header. :return: :class:`list` - :rtype: list + :rtype: list """ result = [] for item in _parse_list_header(value): @@ -401,7 +401,7 @@ def parse_dict_header(value): :param value: a string with a dict header. :return: :class:`dict` - :rtype: dict + :rtype: dict """ result = {} for item in _parse_list_header(value): @@ -422,7 +422,7 @@ def unquote_header_value(value, is_filename=False): using for quoting. :param value: the header value to unquote. - :rtype: str + :rtype: str """ if value and value[0] == value[-1] == '"': # this is not the real unquoting, but fixing this so that the @@ -445,7 +445,7 @@ def dict_from_cookiejar(cj): """Returns a key/value dictionary from a CookieJar. :param cj: CookieJar object to extract cookies from. - :rtype: dict + :rtype: dict """ cookie_dict = {} @@ -461,10 +461,10 @@ def add_dict_to_cookiejar(cj, cookie_dict): :param cj: CookieJar to insert cookies into. :param cookie_dict: Dict of key/values to insert into CookieJar. - :rtype: CookieJar + :rtype: CookieJar """ - return cookiejar_from_dict(cookie_dict, cj) + return cookiejar_from_dict(cookie_dict, cj) def get_encodings_from_content(content): @@ -516,7 +516,7 @@ def get_encoding_from_headers(headers): """Returns encodings from given HTTP Header Dict. :param headers: dictionary to extract encoding from. - :rtype: str + :rtype: str """ content_type = headers.get('content-type') @@ -558,8 +558,8 @@ def stream_decode_response_unicode(iterator, r): def iter_slices(string, slice_length): """Iterate over slices of a string.""" pos = 0 - if slice_length is None or slice_length <= 0: - slice_length = len(string) + if slice_length is None or slice_length <= 0: + slice_length = len(string) while pos < len(string): yield string[pos:pos + slice_length] pos += slice_length @@ -575,7 +575,7 @@ def get_unicode_from_response(r): 1. charset from content-type 2. fall back and replace all unicode characters - :rtype: str + :rtype: str """ warnings.warn(( 'In requests 3.0, get_unicode_from_response will be removed. For ' @@ -603,14 +603,14 @@ def get_unicode_from_response(r): # The unreserved URI characters (RFC 3986) UNRESERVED_SET = frozenset( - "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + "0123456789-._~") + "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + "0123456789-._~") def unquote_unreserved(uri): """Un-escape any percent-escape sequences in a URI that are unreserved characters. This leaves all reserved, illegal and non-ASCII bytes encoded. - - :rtype: str + + :rtype: str """ parts = uri.split('%') for i in range(1, len(parts)): @@ -635,30 +635,30 @@ def requote_uri(uri): This function passes the given URI through an unquote/quote cycle to ensure that it is fully and consistently quoted. - - :rtype: str + + :rtype: str """ - safe_with_percent = "!#$%&'()*+,/:;=?@[]~" - safe_without_percent = "!#$&'()*+,/:;=?@[]~" - try: - # Unquote only the unreserved characters - # Then quote only illegal characters (do not quote reserved, - # unreserved, or '%') - return quote(unquote_unreserved(uri), safe=safe_with_percent) - except InvalidURL: - # We couldn't unquote the given URI, so let's try quoting it, but - # there may be unquoted '%'s in the URI. We need to make sure they're - # properly quoted so they do not cause issues elsewhere. - return quote(uri, safe=safe_without_percent) + safe_with_percent = "!#$%&'()*+,/:;=?@[]~" + safe_without_percent = "!#$&'()*+,/:;=?@[]~" + try: + # Unquote only the unreserved characters + # Then quote only illegal characters (do not quote reserved, + # unreserved, or '%') + return quote(unquote_unreserved(uri), safe=safe_with_percent) + except InvalidURL: + # We couldn't unquote the given URI, so let's try quoting it, but + # there may be unquoted '%'s in the URI. We need to make sure they're + # properly quoted so they do not cause issues elsewhere. + return quote(uri, safe=safe_without_percent) def address_in_network(ip, net): - """This function allows you to check if an IP belongs to a network subnet - + """This function allows you to check if an IP belongs to a network subnet + Example: returns True if ip = 192.168.1.1 and net = 192.168.1.0/24 returns False if ip = 192.168.1.1 and net = 192.168.100.0/24 - - :rtype: bool + + :rtype: bool """ ipaddr = struct.unpack('=L', socket.inet_aton(ip))[0] netaddr, bits = net.split('/') @@ -668,20 +668,20 @@ def address_in_network(ip, net): def dotted_netmask(mask): - """Converts mask from /xx format to xxx.xxx.xxx.xxx - + """Converts mask from /xx format to xxx.xxx.xxx.xxx + Example: if mask is 24 function returns 255.255.255.0 - - :rtype: str + + :rtype: str """ bits = 0xffffffff ^ (1 << 32 - mask) - 1 return socket.inet_ntoa(struct.pack('>I', bits)) def is_ipv4_address(string_ip): - """ - :rtype: bool - """ + """ + :rtype: bool + """ try: socket.inet_aton(string_ip) except socket.error: @@ -690,11 +690,11 @@ def is_ipv4_address(string_ip): def is_valid_cidr(string_network): - """ - Very simple check of the cidr format in no_proxy variable. - - :rtype: bool - """ + """ + Very simple check of the cidr format in no_proxy variable. + + :rtype: bool + """ if string_network.count('/') == 1: try: mask = int(string_network.split('/')[1]) @@ -713,33 +713,33 @@ def is_valid_cidr(string_network): return True -@contextlib.contextmanager -def set_environ(env_name, value): - """Set the environment variable 'env_name' to 'value' - - Save previous value, yield, and then restore the previous value stored in - the environment variable 'env_name'. - - If 'value' is None, do nothing""" - value_changed = value is not None - if value_changed: - old_value = os.environ.get(env_name) - os.environ[env_name] = value - try: - yield - finally: - if value_changed: - if old_value is None: - del os.environ[env_name] - else: - os.environ[env_name] = old_value - - -def should_bypass_proxies(url, no_proxy): +@contextlib.contextmanager +def set_environ(env_name, value): + """Set the environment variable 'env_name' to 'value' + + Save previous value, yield, and then restore the previous value stored in + the environment variable 'env_name'. + + If 'value' is None, do nothing""" + value_changed = value is not None + if value_changed: + old_value = os.environ.get(env_name) + os.environ[env_name] = value + try: + yield + finally: + if value_changed: + if old_value is None: + del os.environ[env_name] + else: + os.environ[env_name] = old_value + + +def should_bypass_proxies(url, no_proxy): """ Returns whether we should bypass proxies or not. - - :rtype: bool + + :rtype: bool """ # Prioritize lowercase environment variables over uppercase # to keep a consistent behaviour with other http projects (curl, wget). @@ -747,9 +747,9 @@ def should_bypass_proxies(url, no_proxy): # First check whether no_proxy is defined. If it is, check that the URL # we're getting isn't in the no_proxy list. - no_proxy_arg = no_proxy - if no_proxy is None: - no_proxy = get_proxy('no_proxy') + no_proxy_arg = no_proxy + if no_proxy is None: + no_proxy = get_proxy('no_proxy') parsed = urlparse(url) if parsed.hostname is None: @@ -759,9 +759,9 @@ def should_bypass_proxies(url, no_proxy): if no_proxy: # We need to check whether we match here. We need to see if we match # the end of the hostname, both with and without the port. - no_proxy = ( - host for host in no_proxy.replace(' ', '').split(',') if host - ) + no_proxy = ( + host for host in no_proxy.replace(' ', '').split(',') if host + ) if is_ipv4_address(parsed.hostname): for proxy_ip in no_proxy: @@ -769,9 +769,9 @@ def should_bypass_proxies(url, no_proxy): if address_in_network(parsed.hostname, proxy_ip): return True elif parsed.hostname == proxy_ip: - # If no_proxy ip was defined in plain IP notation instead of cidr notation & - # matches the IP of the index - return True + # If no_proxy ip was defined in plain IP notation instead of cidr notation & + # matches the IP of the index + return True else: host_with_port = parsed.hostname if parsed.port: @@ -783,55 +783,55 @@ def should_bypass_proxies(url, no_proxy): # to apply the proxies on this URL. return True - with set_environ('no_proxy', no_proxy_arg): + with set_environ('no_proxy', no_proxy_arg): # parsed.hostname can be `None` in cases such as a file URI. - try: + try: bypass = proxy_bypass(parsed.hostname) - except (TypeError, socket.gaierror): - bypass = False + except (TypeError, socket.gaierror): + bypass = False if bypass: return True return False - -def get_environ_proxies(url, no_proxy=None): - """ - Return a dict of environment proxies. - - :rtype: dict - """ - if should_bypass_proxies(url, no_proxy=no_proxy): + +def get_environ_proxies(url, no_proxy=None): + """ + Return a dict of environment proxies. + + :rtype: dict + """ + if should_bypass_proxies(url, no_proxy=no_proxy): return {} else: return getproxies() -def select_proxy(url, proxies): - """Select a proxy for the url, if applicable. +def select_proxy(url, proxies): + """Select a proxy for the url, if applicable. - :param url: The url being for the request - :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs - """ - proxies = proxies or {} - urlparts = urlparse(url) - if urlparts.hostname is None: - return proxies.get(urlparts.scheme, proxies.get('all')) - - proxy_keys = [ - urlparts.scheme + '://' + urlparts.hostname, - urlparts.scheme, - 'all://' + urlparts.hostname, - 'all', - ] - proxy = None - for proxy_key in proxy_keys: - if proxy_key in proxies: - proxy = proxies[proxy_key] - break + :param url: The url being for the request + :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs + """ + proxies = proxies or {} + urlparts = urlparse(url) + if urlparts.hostname is None: + return proxies.get(urlparts.scheme, proxies.get('all')) + + proxy_keys = [ + urlparts.scheme + '://' + urlparts.hostname, + urlparts.scheme, + 'all://' + urlparts.hostname, + 'all', + ] + proxy = None + for proxy_key in proxy_keys: + if proxy_key in proxies: + proxy = proxies[proxy_key] + break - return proxy + return proxy def resolve_proxies(request, proxies, trust_env=True): @@ -861,19 +861,19 @@ def resolve_proxies(request, proxies, trust_env=True): return new_proxies -def default_user_agent(name="python-requests"): - """ - Return a string representing the default user agent. - - :rtype: str - """ - return '%s/%s' % (name, __version__) - - +def default_user_agent(name="python-requests"): + """ + Return a string representing the default user agent. + + :rtype: str + """ + return '%s/%s' % (name, __version__) + + def default_headers(): - """ - :rtype: requests.structures.CaseInsensitiveDict - """ + """ + :rtype: requests.structures.CaseInsensitiveDict + """ return CaseInsensitiveDict({ 'User-Agent': default_user_agent(), 'Accept-Encoding': DEFAULT_ACCEPT_ENCODING, @@ -887,28 +887,28 @@ def parse_header_links(value): i.e. Link: <http:/.../front.jpeg>; rel=front; type="image/jpeg",<http://.../back.jpeg>; rel=back;type="image/jpeg" - :rtype: list + :rtype: list """ links = [] - replace_chars = ' \'"' + replace_chars = ' \'"' value = value.strip(replace_chars) if not value: return links - for val in re.split(', *<', value): + for val in re.split(', *<', value): try: - url, params = val.split(';', 1) + url, params = val.split(';', 1) except ValueError: url, params = val, '' - link = {'url': url.strip('<> \'"')} + link = {'url': url.strip('<> \'"')} - for param in params.split(';'): + for param in params.split(';'): try: - key, value = param.split('=') + key, value = param.split('=') except ValueError: break @@ -926,14 +926,14 @@ _null3 = _null * 3 def guess_json_utf(data): - """ - :rtype: str - """ + """ + :rtype: str + """ # JSON always starts with two ASCII characters, so detection is as # easy as counting the nulls and from their location and count # determine the encoding. Also detect a BOM, if present. sample = data[:4] - if sample in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE): + if sample in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE): return 'utf-32' # BOM included if sample[:3] == codecs.BOM_UTF8: return 'utf-8-sig' # BOM included, MS style (discouraged) @@ -958,11 +958,11 @@ def guess_json_utf(data): def prepend_scheme_if_needed(url, new_scheme): - """Given a URL that may or may not have a scheme, prepend the given scheme. - Does not replace a present scheme with the one provided as an argument. - - :rtype: str - """ + """Given a URL that may or may not have a scheme, prepend the given scheme. + Does not replace a present scheme with the one provided as an argument. + + :rtype: str + """ parsed = parse_url(url) scheme, auth, host, port, path, query, fragment = parsed @@ -988,10 +988,10 @@ def prepend_scheme_if_needed(url, new_scheme): def get_auth_from_url(url): """Given a url with authentication components, extract them into a tuple of - username,password. - - :rtype: (str,str) - """ + username,password. + + :rtype: (str,str) + """ parsed = urlparse(url) try: @@ -1002,37 +1002,37 @@ def get_auth_from_url(url): return auth -# Moved outside of function to avoid recompile every call -_CLEAN_HEADER_REGEX_BYTE = re.compile(b'^\\S[^\\r\\n]*$|^$') -_CLEAN_HEADER_REGEX_STR = re.compile(r'^\S[^\r\n]*$|^$') - - -def check_header_validity(header): - """Verifies that header value is a string which doesn't contain - leading whitespace or return characters. This prevents unintended - header injection. - - :param header: tuple, in the format (name, value). +# Moved outside of function to avoid recompile every call +_CLEAN_HEADER_REGEX_BYTE = re.compile(b'^\\S[^\\r\\n]*$|^$') +_CLEAN_HEADER_REGEX_STR = re.compile(r'^\S[^\r\n]*$|^$') + + +def check_header_validity(header): + """Verifies that header value is a string which doesn't contain + leading whitespace or return characters. This prevents unintended + header injection. + + :param header: tuple, in the format (name, value). """ - name, value = header + name, value = header - if isinstance(value, bytes): - pat = _CLEAN_HEADER_REGEX_BYTE + if isinstance(value, bytes): + pat = _CLEAN_HEADER_REGEX_BYTE else: - pat = _CLEAN_HEADER_REGEX_STR - try: - if not pat.match(value): - raise InvalidHeader("Invalid return character or leading space in header: %s" % name) - except TypeError: - raise InvalidHeader("Value for header {%s: %s} must be of type str or " - "bytes, not %s" % (name, value, type(value))) + pat = _CLEAN_HEADER_REGEX_STR + try: + if not pat.match(value): + raise InvalidHeader("Invalid return character or leading space in header: %s" % name) + except TypeError: + raise InvalidHeader("Value for header {%s: %s} must be of type str or " + "bytes, not %s" % (name, value, type(value))) def urldefragauth(url): """ - Given a url remove the fragment and the authentication part. - - :rtype: str + Given a url remove the fragment and the authentication part. + + :rtype: str """ scheme, netloc, path, params, query, fragment = urlparse(url) @@ -1043,18 +1043,18 @@ def urldefragauth(url): netloc = netloc.rsplit('@', 1)[-1] return urlunparse((scheme, netloc, path, params, query, '')) - - -def rewind_body(prepared_request): - """Move file pointer back to its recorded starting position - so it can be read again on redirect. - """ - body_seek = getattr(prepared_request.body, 'seek', None) - if body_seek is not None and isinstance(prepared_request._body_position, integer_types): - try: - body_seek(prepared_request._body_position) - except (IOError, OSError): - raise UnrewindableBodyError("An error occurred when rewinding request " - "body for redirect.") - else: - raise UnrewindableBodyError("Unable to rewind request body for redirect.") + + +def rewind_body(prepared_request): + """Move file pointer back to its recorded starting position + so it can be read again on redirect. + """ + body_seek = getattr(prepared_request.body, 'seek', None) + if body_seek is not None and isinstance(prepared_request._body_position, integer_types): + try: + body_seek(prepared_request._body_position) + except (IOError, OSError): + raise UnrewindableBodyError("An error occurred when rewinding request " + "body for redirect.") + else: + raise UnrewindableBodyError("Unable to rewind request body for redirect.") diff --git a/contrib/python/requests/ya.make b/contrib/python/requests/ya.make index f971752d75..9298642cb2 100644 --- a/contrib/python/requests/ya.make +++ b/contrib/python/requests/ya.make @@ -2,18 +2,18 @@ PY23_LIBRARY() -OWNER(g:python-contrib) +OWNER(g:python-contrib) VERSION(2.27.1) LICENSE(Apache-2.0) PEERDIR( - contrib/python/certifi - contrib/python/idna - contrib/python/urllib3 -) - + contrib/python/certifi + contrib/python/idna + contrib/python/urllib3 +) + IF (PYTHON2) PEERDIR( contrib/python/chardet @@ -24,28 +24,28 @@ ELSE() ) ENDIF() -NO_LINT() - +NO_LINT() + PY_SRCS( TOP_LEVEL - requests/__init__.py - requests/__version__.py - requests/_internal_utils.py + requests/__init__.py + requests/__version__.py + requests/_internal_utils.py requests/adapters.py requests/api.py requests/auth.py requests/certs.py - requests/compat.py - requests/cookies.py - requests/exceptions.py - requests/help.py + requests/compat.py + requests/cookies.py + requests/exceptions.py + requests/help.py requests/hooks.py requests/models.py - requests/packages.py + requests/packages.py requests/sessions.py - requests/status_codes.py - requests/structures.py - requests/utils.py + requests/status_codes.py + requests/structures.py + requests/utils.py ) RESOURCE_FILES( |