aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/python/requests
diff options
context:
space:
mode:
authorshadchin <shadchin@yandex-team.ru>2022-02-10 16:44:39 +0300
committerDaniil Cherednik <dcherednik@yandex-team.ru>2022-02-10 16:44:39 +0300
commite9656aae26e0358d5378e5b63dcac5c8dbe0e4d0 (patch)
tree64175d5cadab313b3e7039ebaa06c5bc3295e274 /contrib/python/requests
parent2598ef1d0aee359b4b6d5fdd1758916d5907d04f (diff)
downloadydb-e9656aae26e0358d5378e5b63dcac5c8dbe0e4d0.tar.gz
Restoring authorship annotation for <shadchin@yandex-team.ru>. Commit 2 of 2.
Diffstat (limited to 'contrib/python/requests')
-rw-r--r--contrib/python/requests/.dist-info/METADATA216
-rw-r--r--contrib/python/requests/LICENSE350
-rw-r--r--contrib/python/requests/NOTICE4
-rw-r--r--contrib/python/requests/README.md156
-rw-r--r--contrib/python/requests/patches/01-arcadia.patch58
-rw-r--r--contrib/python/requests/requests/__init__.py132
-rw-r--r--contrib/python/requests/requests/__version__.py8
-rw-r--r--contrib/python/requests/requests/adapters.py62
-rw-r--r--contrib/python/requests/requests/api.py38
-rw-r--r--contrib/python/requests/requests/auth.py32
-rw-r--r--contrib/python/requests/requests/compat.py30
-rw-r--r--contrib/python/requests/requests/cookies.py48
-rw-r--r--contrib/python/requests/requests/exceptions.py38
-rw-r--r--contrib/python/requests/requests/help.py40
-rw-r--r--contrib/python/requests/requests/hooks.py4
-rw-r--r--contrib/python/requests/requests/models.py124
-rw-r--r--contrib/python/requests/requests/packages.py26
-rw-r--r--contrib/python/requests/requests/sessions.py186
-rw-r--r--contrib/python/requests/requests/status_codes.py74
-rw-r--r--contrib/python/requests/requests/structures.py12
-rw-r--r--contrib/python/requests/requests/utils.py402
-rw-r--r--contrib/python/requests/ya.make42
22 files changed, 1041 insertions, 1041 deletions
diff --git a/contrib/python/requests/.dist-info/METADATA b/contrib/python/requests/.dist-info/METADATA
index 944103b04a..6363b2098e 100644
--- a/contrib/python/requests/.dist-info/METADATA
+++ b/contrib/python/requests/.dist-info/METADATA
@@ -1,125 +1,125 @@
-Metadata-Version: 2.1
+Metadata-Version: 2.1
Name: requests
-Version: 2.27.1
+Version: 2.27.1
Summary: Python HTTP for Humans.
-Home-page: https://requests.readthedocs.io
+Home-page: https://requests.readthedocs.io
Author: Kenneth Reitz
Author-email: me@kennethreitz.org
License: Apache 2.0
-Project-URL: Documentation, https://requests.readthedocs.io
-Project-URL: Source, https://github.com/psf/requests
+Project-URL: Documentation, https://requests.readthedocs.io
+Project-URL: Source, https://github.com/psf/requests
Platform: UNKNOWN
Classifier: Development Status :: 5 - Production/Stable
-Classifier: Environment :: Web Environment
+Classifier: Environment :: Web Environment
Classifier: Intended Audience :: Developers
-Classifier: License :: OSI Approved :: Apache Software License
+Classifier: License :: OSI Approved :: Apache Software License
Classifier: Natural Language :: English
-Classifier: Operating System :: OS Independent
+Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python
-Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.6
-Classifier: Programming Language :: Python :: 3.7
-Classifier: Programming Language :: Python :: 3.8
-Classifier: Programming Language :: Python :: 3.9
-Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Programming Language :: Python :: Implementation :: PyPy
-Classifier: Topic :: Internet :: WWW/HTTP
-Classifier: Topic :: Software Development :: Libraries
-Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*
-Description-Content-Type: text/markdown
-Requires-Dist: urllib3 (<1.27,>=1.21.1)
-Requires-Dist: certifi (>=2017.4.17)
-Requires-Dist: chardet (<5,>=3.0.2) ; python_version < "3"
-Requires-Dist: idna (<3,>=2.5) ; python_version < "3"
-Requires-Dist: charset-normalizer (~=2.0.0) ; python_version >= "3"
-Requires-Dist: idna (<4,>=2.5) ; python_version >= "3"
+Classifier: Topic :: Internet :: WWW/HTTP
+Classifier: Topic :: Software Development :: Libraries
+Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*
+Description-Content-Type: text/markdown
+Requires-Dist: urllib3 (<1.27,>=1.21.1)
+Requires-Dist: certifi (>=2017.4.17)
+Requires-Dist: chardet (<5,>=3.0.2) ; python_version < "3"
+Requires-Dist: idna (<3,>=2.5) ; python_version < "3"
+Requires-Dist: charset-normalizer (~=2.0.0) ; python_version >= "3"
+Requires-Dist: idna (<4,>=2.5) ; python_version >= "3"
Provides-Extra: security
Provides-Extra: socks
-Requires-Dist: PySocks (!=1.5.7,>=1.5.6) ; extra == 'socks'
-Requires-Dist: win-inet-pton ; (sys_platform == "win32" and python_version == "2.7") and extra == 'socks'
-Provides-Extra: use_chardet_on_py3
-Requires-Dist: chardet (<5,>=3.0.2) ; extra == 'use_chardet_on_py3'
-
-# Requests
-
-**Requests** is a simple, yet elegant, HTTP library.
-
-```python
->>> import requests
->>> r = requests.get('https://httpbin.org/basic-auth/user/pass', auth=('user', 'pass'))
->>> r.status_code
-200
->>> r.headers['content-type']
-'application/json; charset=utf8'
->>> r.encoding
-'utf-8'
->>> r.text
-'{"authenticated": true, ...'
->>> r.json()
-{'authenticated': True, ...}
-```
-
-Requests allows you to send HTTP/1.1 requests extremely easily. There’s no need to manually add query strings to your URLs, or to form-encode your `PUT` & `POST` data — but nowadays, just use the `json` method!
-
-Requests is one of the most downloaded Python packages today, pulling in around `30M downloads / week`— according to GitHub, Requests is currently [depended upon](https://github.com/psf/requests/network/dependents?package_id=UGFja2FnZS01NzA4OTExNg%3D%3D) by `1,000,000+` repositories. You may certainly put your trust in this code.
-
-[![Downloads](https://pepy.tech/badge/requests/month)](https://pepy.tech/project/requests)
-[![Supported Versions](https://img.shields.io/pypi/pyversions/requests.svg)](https://pypi.org/project/requests)
-[![Contributors](https://img.shields.io/github/contributors/psf/requests.svg)](https://github.com/psf/requests/graphs/contributors)
-
-## Installing Requests and Supported Versions
-
-Requests is available on PyPI:
-
-```console
-$ python -m pip install requests
-```
-
-Requests officially supports Python 2.7 & 3.6+.
-
-## Supported Features & Best–Practices
-
-Requests is ready for the demands of building robust and reliable HTTP–speaking applications, for the needs of today.
-
-- Keep-Alive & Connection Pooling
-- International Domains and URLs
-- Sessions with Cookie Persistence
-- Browser-style TLS/SSL Verification
-- Basic & Digest Authentication
-- Familiar `dict`–like Cookies
-- Automatic Content Decompression and Decoding
-- Multi-part File Uploads
-- SOCKS Proxy Support
-- Connection Timeouts
-- Streaming Downloads
-- Automatic honoring of `.netrc`
-- Chunked HTTP Requests
-
-## API Reference and User Guide available on [Read the Docs](https://requests.readthedocs.io)
-
-[![Read the Docs](https://raw.githubusercontent.com/psf/requests/main/ext/ss.png)](https://requests.readthedocs.io)
-
-## Cloning the repository
-
-When cloning the Requests repository, you may need to add the `-c
-fetch.fsck.badTimezone=ignore` flag to avoid an error about a bad commit (see
-[this issue](https://github.com/psf/requests/issues/2690) for more background):
-
-```shell
-git clone -c fetch.fsck.badTimezone=ignore https://github.com/psf/requests.git
-```
-
-You can also apply this setting to your global Git config:
-
-```shell
-git config --global fetch.fsck.badTimezone ignore
-```
-
----
-
-[![Kenneth Reitz](https://raw.githubusercontent.com/psf/requests/main/ext/kr.png)](https://kennethreitz.org) [![Python Software Foundation](https://raw.githubusercontent.com/psf/requests/main/ext/psf.png)](https://www.python.org/psf)
-
-
+Requires-Dist: PySocks (!=1.5.7,>=1.5.6) ; extra == 'socks'
+Requires-Dist: win-inet-pton ; (sys_platform == "win32" and python_version == "2.7") and extra == 'socks'
+Provides-Extra: use_chardet_on_py3
+Requires-Dist: chardet (<5,>=3.0.2) ; extra == 'use_chardet_on_py3'
+
+# Requests
+
+**Requests** is a simple, yet elegant, HTTP library.
+
+```python
+>>> import requests
+>>> r = requests.get('https://httpbin.org/basic-auth/user/pass', auth=('user', 'pass'))
+>>> r.status_code
+200
+>>> r.headers['content-type']
+'application/json; charset=utf8'
+>>> r.encoding
+'utf-8'
+>>> r.text
+'{"authenticated": true, ...'
+>>> r.json()
+{'authenticated': True, ...}
+```
+
+Requests allows you to send HTTP/1.1 requests extremely easily. There’s no need to manually add query strings to your URLs, or to form-encode your `PUT` & `POST` data — but nowadays, just use the `json` method!
+
+Requests is one of the most downloaded Python packages today, pulling in around `30M downloads / week`— according to GitHub, Requests is currently [depended upon](https://github.com/psf/requests/network/dependents?package_id=UGFja2FnZS01NzA4OTExNg%3D%3D) by `1,000,000+` repositories. You may certainly put your trust in this code.
+
+[![Downloads](https://pepy.tech/badge/requests/month)](https://pepy.tech/project/requests)
+[![Supported Versions](https://img.shields.io/pypi/pyversions/requests.svg)](https://pypi.org/project/requests)
+[![Contributors](https://img.shields.io/github/contributors/psf/requests.svg)](https://github.com/psf/requests/graphs/contributors)
+
+## Installing Requests and Supported Versions
+
+Requests is available on PyPI:
+
+```console
+$ python -m pip install requests
+```
+
+Requests officially supports Python 2.7 & 3.6+.
+
+## Supported Features & Best–Practices
+
+Requests is ready for the demands of building robust and reliable HTTP–speaking applications, for the needs of today.
+
+- Keep-Alive & Connection Pooling
+- International Domains and URLs
+- Sessions with Cookie Persistence
+- Browser-style TLS/SSL Verification
+- Basic & Digest Authentication
+- Familiar `dict`–like Cookies
+- Automatic Content Decompression and Decoding
+- Multi-part File Uploads
+- SOCKS Proxy Support
+- Connection Timeouts
+- Streaming Downloads
+- Automatic honoring of `.netrc`
+- Chunked HTTP Requests
+
+## API Reference and User Guide available on [Read the Docs](https://requests.readthedocs.io)
+
+[![Read the Docs](https://raw.githubusercontent.com/psf/requests/main/ext/ss.png)](https://requests.readthedocs.io)
+
+## Cloning the repository
+
+When cloning the Requests repository, you may need to add the `-c
+fetch.fsck.badTimezone=ignore` flag to avoid an error about a bad commit (see
+[this issue](https://github.com/psf/requests/issues/2690) for more background):
+
+```shell
+git clone -c fetch.fsck.badTimezone=ignore https://github.com/psf/requests.git
+```
+
+You can also apply this setting to your global Git config:
+
+```shell
+git config --global fetch.fsck.badTimezone ignore
+```
+
+---
+
+[![Kenneth Reitz](https://raw.githubusercontent.com/psf/requests/main/ext/kr.png)](https://kennethreitz.org) [![Python Software Foundation](https://raw.githubusercontent.com/psf/requests/main/ext/psf.png)](https://www.python.org/psf)
+
+
diff --git a/contrib/python/requests/LICENSE b/contrib/python/requests/LICENSE
index a81c6e3602..67db858821 100644
--- a/contrib/python/requests/LICENSE
+++ b/contrib/python/requests/LICENSE
@@ -1,175 +1,175 @@
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
diff --git a/contrib/python/requests/NOTICE b/contrib/python/requests/NOTICE
index 9ce9d3da4e..1ff62db688 100644
--- a/contrib/python/requests/NOTICE
+++ b/contrib/python/requests/NOTICE
@@ -1,2 +1,2 @@
-Requests
-Copyright 2019 Kenneth Reitz
+Requests
+Copyright 2019 Kenneth Reitz
diff --git a/contrib/python/requests/README.md b/contrib/python/requests/README.md
index ec81d353d7..807215ac56 100644
--- a/contrib/python/requests/README.md
+++ b/contrib/python/requests/README.md
@@ -1,78 +1,78 @@
-# Requests
-
-**Requests** is a simple, yet elegant, HTTP library.
-
-```python
->>> import requests
->>> r = requests.get('https://httpbin.org/basic-auth/user/pass', auth=('user', 'pass'))
->>> r.status_code
-200
->>> r.headers['content-type']
-'application/json; charset=utf8'
->>> r.encoding
-'utf-8'
->>> r.text
-'{"authenticated": true, ...'
->>> r.json()
-{'authenticated': True, ...}
-```
-
-Requests allows you to send HTTP/1.1 requests extremely easily. There’s no need to manually add query strings to your URLs, or to form-encode your `PUT` & `POST` data — but nowadays, just use the `json` method!
-
-Requests is one of the most downloaded Python packages today, pulling in around `30M downloads / week`— according to GitHub, Requests is currently [depended upon](https://github.com/psf/requests/network/dependents?package_id=UGFja2FnZS01NzA4OTExNg%3D%3D) by `1,000,000+` repositories. You may certainly put your trust in this code.
-
-[![Downloads](https://pepy.tech/badge/requests/month)](https://pepy.tech/project/requests)
-[![Supported Versions](https://img.shields.io/pypi/pyversions/requests.svg)](https://pypi.org/project/requests)
-[![Contributors](https://img.shields.io/github/contributors/psf/requests.svg)](https://github.com/psf/requests/graphs/contributors)
-
-## Installing Requests and Supported Versions
-
-Requests is available on PyPI:
-
-```console
-$ python -m pip install requests
-```
-
-Requests officially supports Python 2.7 & 3.6+.
-
-## Supported Features & Best–Practices
-
-Requests is ready for the demands of building robust and reliable HTTP–speaking applications, for the needs of today.
-
-- Keep-Alive & Connection Pooling
-- International Domains and URLs
-- Sessions with Cookie Persistence
-- Browser-style TLS/SSL Verification
-- Basic & Digest Authentication
-- Familiar `dict`–like Cookies
-- Automatic Content Decompression and Decoding
-- Multi-part File Uploads
-- SOCKS Proxy Support
-- Connection Timeouts
-- Streaming Downloads
-- Automatic honoring of `.netrc`
-- Chunked HTTP Requests
-
-## API Reference and User Guide available on [Read the Docs](https://requests.readthedocs.io)
-
-[![Read the Docs](https://raw.githubusercontent.com/psf/requests/main/ext/ss.png)](https://requests.readthedocs.io)
-
-## Cloning the repository
-
-When cloning the Requests repository, you may need to add the `-c
-fetch.fsck.badTimezone=ignore` flag to avoid an error about a bad commit (see
-[this issue](https://github.com/psf/requests/issues/2690) for more background):
-
-```shell
-git clone -c fetch.fsck.badTimezone=ignore https://github.com/psf/requests.git
-```
-
-You can also apply this setting to your global Git config:
-
-```shell
-git config --global fetch.fsck.badTimezone ignore
-```
-
----
-
-[![Kenneth Reitz](https://raw.githubusercontent.com/psf/requests/main/ext/kr.png)](https://kennethreitz.org) [![Python Software Foundation](https://raw.githubusercontent.com/psf/requests/main/ext/psf.png)](https://www.python.org/psf)
+# Requests
+
+**Requests** is a simple, yet elegant, HTTP library.
+
+```python
+>>> import requests
+>>> r = requests.get('https://httpbin.org/basic-auth/user/pass', auth=('user', 'pass'))
+>>> r.status_code
+200
+>>> r.headers['content-type']
+'application/json; charset=utf8'
+>>> r.encoding
+'utf-8'
+>>> r.text
+'{"authenticated": true, ...'
+>>> r.json()
+{'authenticated': True, ...}
+```
+
+Requests allows you to send HTTP/1.1 requests extremely easily. There’s no need to manually add query strings to your URLs, or to form-encode your `PUT` & `POST` data — but nowadays, just use the `json` method!
+
+Requests is one of the most downloaded Python packages today, pulling in around `30M downloads / week`— according to GitHub, Requests is currently [depended upon](https://github.com/psf/requests/network/dependents?package_id=UGFja2FnZS01NzA4OTExNg%3D%3D) by `1,000,000+` repositories. You may certainly put your trust in this code.
+
+[![Downloads](https://pepy.tech/badge/requests/month)](https://pepy.tech/project/requests)
+[![Supported Versions](https://img.shields.io/pypi/pyversions/requests.svg)](https://pypi.org/project/requests)
+[![Contributors](https://img.shields.io/github/contributors/psf/requests.svg)](https://github.com/psf/requests/graphs/contributors)
+
+## Installing Requests and Supported Versions
+
+Requests is available on PyPI:
+
+```console
+$ python -m pip install requests
+```
+
+Requests officially supports Python 2.7 & 3.6+.
+
+## Supported Features & Best–Practices
+
+Requests is ready for the demands of building robust and reliable HTTP–speaking applications, for the needs of today.
+
+- Keep-Alive & Connection Pooling
+- International Domains and URLs
+- Sessions with Cookie Persistence
+- Browser-style TLS/SSL Verification
+- Basic & Digest Authentication
+- Familiar `dict`–like Cookies
+- Automatic Content Decompression and Decoding
+- Multi-part File Uploads
+- SOCKS Proxy Support
+- Connection Timeouts
+- Streaming Downloads
+- Automatic honoring of `.netrc`
+- Chunked HTTP Requests
+
+## API Reference and User Guide available on [Read the Docs](https://requests.readthedocs.io)
+
+[![Read the Docs](https://raw.githubusercontent.com/psf/requests/main/ext/ss.png)](https://requests.readthedocs.io)
+
+## Cloning the repository
+
+When cloning the Requests repository, you may need to add the `-c
+fetch.fsck.badTimezone=ignore` flag to avoid an error about a bad commit (see
+[this issue](https://github.com/psf/requests/issues/2690) for more background):
+
+```shell
+git clone -c fetch.fsck.badTimezone=ignore https://github.com/psf/requests.git
+```
+
+You can also apply this setting to your global Git config:
+
+```shell
+git config --global fetch.fsck.badTimezone ignore
+```
+
+---
+
+[![Kenneth Reitz](https://raw.githubusercontent.com/psf/requests/main/ext/kr.png)](https://kennethreitz.org) [![Python Software Foundation](https://raw.githubusercontent.com/psf/requests/main/ext/psf.png)](https://www.python.org/psf)
diff --git a/contrib/python/requests/patches/01-arcadia.patch b/contrib/python/requests/patches/01-arcadia.patch
index f7e8351851..c622d7b578 100644
--- a/contrib/python/requests/patches/01-arcadia.patch
+++ b/contrib/python/requests/patches/01-arcadia.patch
@@ -1,29 +1,29 @@
---- contrib/python/requests/requests/adapters.py (index)
-+++ contrib/python/requests/requests/adapters.py (working tree)
-@@ -224,13 +224,13 @@ class HTTPAdapter(BaseAdapter):
- if not cert_loc:
- cert_loc = extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH)
-
-- if not cert_loc or not os.path.exists(cert_loc):
-+ if not cert_loc or isinstance(cert_loc, basestring) and not os.path.exists(cert_loc):
- raise IOError("Could not find a suitable TLS CA certificate bundle, "
- "invalid path: {}".format(cert_loc))
-
- conn.cert_reqs = 'CERT_REQUIRED'
-
-- if not os.path.isdir(cert_loc):
-+ if not isinstance(cert_loc, basestring) or not os.path.isdir(cert_loc):
- conn.ca_certs = cert_loc
- else:
- conn.ca_cert_dir = cert_loc
---- contrib/python/requests/requests/utils.py (index)
-+++ contrib/python/requests/requests/utils.py (working tree)
-@@ -246,7 +246,7 @@ def extract_zipped_paths(path):
- archive with the location of an extracted copy of the target, or else
- just return the provided path unchanged.
- """
-- if os.path.exists(path):
-+ if callable(path) or os.path.exists(path):
- # this is already a valid path, no need to do anything further
- return path
-
+--- contrib/python/requests/requests/adapters.py (index)
++++ contrib/python/requests/requests/adapters.py (working tree)
+@@ -224,13 +224,13 @@ class HTTPAdapter(BaseAdapter):
+ if not cert_loc:
+ cert_loc = extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH)
+
+- if not cert_loc or not os.path.exists(cert_loc):
++ if not cert_loc or isinstance(cert_loc, basestring) and not os.path.exists(cert_loc):
+ raise IOError("Could not find a suitable TLS CA certificate bundle, "
+ "invalid path: {}".format(cert_loc))
+
+ conn.cert_reqs = 'CERT_REQUIRED'
+
+- if not os.path.isdir(cert_loc):
++ if not isinstance(cert_loc, basestring) or not os.path.isdir(cert_loc):
+ conn.ca_certs = cert_loc
+ else:
+ conn.ca_cert_dir = cert_loc
+--- contrib/python/requests/requests/utils.py (index)
++++ contrib/python/requests/requests/utils.py (working tree)
+@@ -246,7 +246,7 @@ def extract_zipped_paths(path):
+ archive with the location of an extracted copy of the target, or else
+ just return the provided path unchanged.
+ """
+- if os.path.exists(path):
++ if callable(path) or os.path.exists(path):
+ # this is already a valid path, no need to do anything further
+ return path
+
diff --git a/contrib/python/requests/requests/__init__.py b/contrib/python/requests/requests/__init__.py
index db6ddf9192..53a5b42af6 100644
--- a/contrib/python/requests/requests/__init__.py
+++ b/contrib/python/requests/requests/__init__.py
@@ -9,32 +9,32 @@
Requests HTTP Library
~~~~~~~~~~~~~~~~~~~~~
-Requests is an HTTP library, written in Python, for human beings.
-Basic GET usage:
+Requests is an HTTP library, written in Python, for human beings.
+Basic GET usage:
>>> import requests
>>> r = requests.get('https://www.python.org')
>>> r.status_code
200
- >>> b'Python is a programming language' in r.content
+ >>> b'Python is a programming language' in r.content
True
... or POST:
>>> payload = dict(key1='value1', key2='value2')
- >>> r = requests.post('https://httpbin.org/post', data=payload)
+ >>> r = requests.post('https://httpbin.org/post', data=payload)
>>> print(r.text)
{
...
"form": {
- "key1": "value1",
- "key2": "value2"
+ "key1": "value1",
+ "key2": "value2"
},
...
}
The other HTTP methods are supported - see `requests.api`. Full documentation
-is at <https://requests.readthedocs.io>.
+is at <https://requests.readthedocs.io>.
:copyright: (c) 2017 by Kenneth Reitz.
:license: Apache 2.0, see LICENSE for more details.
@@ -44,17 +44,17 @@ import urllib3
import warnings
from .exceptions import RequestsDependencyWarning
-try:
- from charset_normalizer import __version__ as charset_normalizer_version
-except ImportError:
- charset_normalizer_version = None
-
-try:
- from chardet import __version__ as chardet_version
-except ImportError:
- chardet_version = None
-
-def check_compatibility(urllib3_version, chardet_version, charset_normalizer_version):
+try:
+ from charset_normalizer import __version__ as charset_normalizer_version
+except ImportError:
+ charset_normalizer_version = None
+
+try:
+ from chardet import __version__ as chardet_version
+except ImportError:
+ chardet_version = None
+
+def check_compatibility(urllib3_version, chardet_version, charset_normalizer_version):
urllib3_version = urllib3_version.split('.')
assert urllib3_version != ['dev'] # Verify urllib3 isn't installed from git.
@@ -65,60 +65,60 @@ def check_compatibility(urllib3_version, chardet_version, charset_normalizer_ver
# Check urllib3 for compatibility.
major, minor, patch = urllib3_version # noqa: F811
major, minor, patch = int(major), int(minor), int(patch)
- # urllib3 >= 1.21.1, <= 1.26
+ # urllib3 >= 1.21.1, <= 1.26
assert major == 1
assert minor >= 21
- assert minor <= 26
-
- # Check charset_normalizer for compatibility.
- if chardet_version:
- major, minor, patch = chardet_version.split('.')[:3]
- major, minor, patch = int(major), int(minor), int(patch)
- # chardet_version >= 3.0.2, < 5.0.0
- assert (3, 0, 2) <= (major, minor, patch) < (5, 0, 0)
- elif charset_normalizer_version:
- major, minor, patch = charset_normalizer_version.split('.')[:3]
- major, minor, patch = int(major), int(minor), int(patch)
- # charset_normalizer >= 2.0.0 < 3.0.0
- assert (2, 0, 0) <= (major, minor, patch) < (3, 0, 0)
- else:
- raise Exception("You need either charset_normalizer or chardet installed")
-
-def _check_cryptography(cryptography_version):
- # cryptography < 1.3.4
- try:
- cryptography_version = list(map(int, cryptography_version.split('.')))
- except ValueError:
- return
-
- if cryptography_version < [1, 3, 4]:
- warning = 'Old version of cryptography ({}) may cause slowdown.'.format(cryptography_version)
- warnings.warn(warning, RequestsDependencyWarning)
-
+ assert minor <= 26
+
+ # Check charset_normalizer for compatibility.
+ if chardet_version:
+ major, minor, patch = chardet_version.split('.')[:3]
+ major, minor, patch = int(major), int(minor), int(patch)
+ # chardet_version >= 3.0.2, < 5.0.0
+ assert (3, 0, 2) <= (major, minor, patch) < (5, 0, 0)
+ elif charset_normalizer_version:
+ major, minor, patch = charset_normalizer_version.split('.')[:3]
+ major, minor, patch = int(major), int(minor), int(patch)
+ # charset_normalizer >= 2.0.0 < 3.0.0
+ assert (2, 0, 0) <= (major, minor, patch) < (3, 0, 0)
+ else:
+ raise Exception("You need either charset_normalizer or chardet installed")
+
+def _check_cryptography(cryptography_version):
+ # cryptography < 1.3.4
+ try:
+ cryptography_version = list(map(int, cryptography_version.split('.')))
+ except ValueError:
+ return
+
+ if cryptography_version < [1, 3, 4]:
+ warning = 'Old version of cryptography ({}) may cause slowdown.'.format(cryptography_version)
+ warnings.warn(warning, RequestsDependencyWarning)
+
# Check imported dependencies for compatibility.
try:
- check_compatibility(urllib3.__version__, chardet_version, charset_normalizer_version)
+ check_compatibility(urllib3.__version__, chardet_version, charset_normalizer_version)
except (AssertionError, ValueError):
- warnings.warn("urllib3 ({}) or chardet ({})/charset_normalizer ({}) doesn't match a supported "
- "version!".format(urllib3.__version__, chardet_version, charset_normalizer_version),
+ warnings.warn("urllib3 ({}) or chardet ({})/charset_normalizer ({}) doesn't match a supported "
+ "version!".format(urllib3.__version__, chardet_version, charset_normalizer_version),
RequestsDependencyWarning)
-# Attempt to enable urllib3's fallback for SNI support
-# if the standard library doesn't support SNI or the
-# 'ssl' library isn't available.
+# Attempt to enable urllib3's fallback for SNI support
+# if the standard library doesn't support SNI or the
+# 'ssl' library isn't available.
try:
- try:
- import ssl
- except ImportError:
- ssl = None
-
- if not getattr(ssl, "HAS_SNI", False):
- from urllib3.contrib import pyopenssl
- pyopenssl.inject_into_urllib3()
-
- # Check cryptography version
- from cryptography import __version__ as cryptography_version
- _check_cryptography(cryptography_version)
+ try:
+ import ssl
+ except ImportError:
+ ssl = None
+
+ if not getattr(ssl, "HAS_SNI", False):
+ from urllib3.contrib import pyopenssl
+ pyopenssl.inject_into_urllib3()
+
+ # Check cryptography version
+ from cryptography import __version__ as cryptography_version
+ _check_cryptography(cryptography_version)
except ImportError:
pass
@@ -139,12 +139,12 @@ from .status_codes import codes
from .exceptions import (
RequestException, Timeout, URLRequired,
TooManyRedirects, HTTPError, ConnectionError,
- FileModeWarning, ConnectTimeout, ReadTimeout, JSONDecodeError
+ FileModeWarning, ConnectTimeout, ReadTimeout, JSONDecodeError
)
# Set default logging handler to avoid "No handler found" warnings.
import logging
-from logging import NullHandler
+from logging import NullHandler
logging.getLogger(__name__).addHandler(NullHandler())
diff --git a/contrib/python/requests/requests/__version__.py b/contrib/python/requests/requests/__version__.py
index d84dd0a0f9..e973b03b5f 100644
--- a/contrib/python/requests/requests/__version__.py
+++ b/contrib/python/requests/requests/__version__.py
@@ -4,11 +4,11 @@
__title__ = 'requests'
__description__ = 'Python HTTP for Humans.'
-__url__ = 'https://requests.readthedocs.io'
-__version__ = '2.27.1'
-__build__ = 0x022701
+__url__ = 'https://requests.readthedocs.io'
+__version__ = '2.27.1'
+__build__ = 0x022701
__author__ = 'Kenneth Reitz'
__author_email__ = 'me@kennethreitz.org'
__license__ = 'Apache 2.0'
-__copyright__ = 'Copyright 2022 Kenneth Reitz'
+__copyright__ = 'Copyright 2022 Kenneth Reitz'
__cake__ = u'\u2728 \U0001f370 \u2728'
diff --git a/contrib/python/requests/requests/adapters.py b/contrib/python/requests/requests/adapters.py
index a3f3002b7c..3a8463b7db 100644
--- a/contrib/python/requests/requests/adapters.py
+++ b/contrib/python/requests/requests/adapters.py
@@ -13,13 +13,13 @@ import socket
from urllib3.poolmanager import PoolManager, proxy_from_url
from urllib3.response import HTTPResponse
-from urllib3.util import parse_url
+from urllib3.util import parse_url
from urllib3.util import Timeout as TimeoutSauce
from urllib3.util.retry import Retry
from urllib3.exceptions import ClosedPoolError
from urllib3.exceptions import ConnectTimeoutError
from urllib3.exceptions import HTTPError as _HTTPError
-from urllib3.exceptions import InvalidHeader as _InvalidHeader
+from urllib3.exceptions import InvalidHeader as _InvalidHeader
from urllib3.exceptions import MaxRetryError
from urllib3.exceptions import NewConnectionError
from urllib3.exceptions import ProxyError as _ProxyError
@@ -27,18 +27,18 @@ from urllib3.exceptions import ProtocolError
from urllib3.exceptions import ReadTimeoutError
from urllib3.exceptions import SSLError as _SSLError
from urllib3.exceptions import ResponseError
-from urllib3.exceptions import LocationValueError
+from urllib3.exceptions import LocationValueError
from .models import Response
from .compat import urlparse, basestring
-from .utils import (DEFAULT_CA_BUNDLE_PATH, extract_zipped_paths,
- get_encoding_from_headers, prepend_scheme_if_needed,
- get_auth_from_url, urldefragauth, select_proxy)
+from .utils import (DEFAULT_CA_BUNDLE_PATH, extract_zipped_paths,
+ get_encoding_from_headers, prepend_scheme_if_needed,
+ get_auth_from_url, urldefragauth, select_proxy)
from .structures import CaseInsensitiveDict
from .cookies import extract_cookies_to_jar
from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError,
- ProxyError, RetryError, InvalidSchema, InvalidProxyURL,
- InvalidURL, InvalidHeader)
+ ProxyError, RetryError, InvalidSchema, InvalidProxyURL,
+ InvalidURL, InvalidHeader)
from .auth import _basic_auth_str
try:
@@ -130,7 +130,7 @@ class HTTPAdapter(BaseAdapter):
self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block)
def __getstate__(self):
- return {attr: getattr(self, attr, None) for attr in self.__attrs__}
+ return {attr: getattr(self, attr, None) for attr in self.__attrs__}
def __setstate__(self, state):
# Can't handle by adding 'proxy_manager' to self.__attrs__ because
@@ -222,11 +222,11 @@ class HTTPAdapter(BaseAdapter):
cert_loc = verify
if not cert_loc:
- cert_loc = extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH)
+ cert_loc = extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH)
if not cert_loc or isinstance(cert_loc, basestring) and not os.path.exists(cert_loc):
raise IOError("Could not find a suitable TLS CA certificate bundle, "
- "invalid path: {}".format(cert_loc))
+ "invalid path: {}".format(cert_loc))
conn.cert_reqs = 'CERT_REQUIRED'
@@ -248,10 +248,10 @@ class HTTPAdapter(BaseAdapter):
conn.key_file = None
if conn.cert_file and not os.path.exists(conn.cert_file):
raise IOError("Could not find the TLS certificate file, "
- "invalid path: {}".format(conn.cert_file))
+ "invalid path: {}".format(conn.cert_file))
if conn.key_file and not os.path.exists(conn.key_file):
raise IOError("Could not find the TLS key file, "
- "invalid path: {}".format(conn.key_file))
+ "invalid path: {}".format(conn.key_file))
def build_response(self, req, resp):
"""Builds a :class:`Response <requests.Response>` object from a urllib3
@@ -303,10 +303,10 @@ class HTTPAdapter(BaseAdapter):
if proxy:
proxy = prepend_scheme_if_needed(proxy, 'http')
- proxy_url = parse_url(proxy)
- if not proxy_url.host:
- raise InvalidProxyURL("Please check proxy URL. It is malformed"
- " and could be missing the host.")
+ proxy_url = parse_url(proxy)
+ if not proxy_url.host:
+ raise InvalidProxyURL("Please check proxy URL. It is malformed"
+ " and could be missing the host.")
proxy_manager = self.proxy_manager_for(proxy)
conn = proxy_manager.connection_from_url(url)
else:
@@ -380,7 +380,7 @@ class HTTPAdapter(BaseAdapter):
when subclassing the
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
- :param proxy: The url of the proxy being used for this request.
+ :param proxy: The url of the proxy being used for this request.
:rtype: dict
"""
headers = {}
@@ -409,14 +409,14 @@ class HTTPAdapter(BaseAdapter):
:rtype: requests.Response
"""
- try:
- conn = self.get_connection(request.url, proxies)
- except LocationValueError as e:
- raise InvalidURL(e, request=request)
+ try:
+ conn = self.get_connection(request.url, proxies)
+ except LocationValueError as e:
+ raise InvalidURL(e, request=request)
self.cert_verify(conn, request.url, verify, cert)
url = self.request_url(request, proxies)
- self.add_headers(request, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies)
+ self.add_headers(request, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies)
chunked = not (request.body is None or 'Content-Length' in request.headers)
@@ -426,7 +426,7 @@ class HTTPAdapter(BaseAdapter):
timeout = TimeoutSauce(connect=connect, read=read)
except ValueError as e:
# this may raise a string formatting error.
- err = ("Invalid timeout {}. Pass a (connect, read) "
+ err = ("Invalid timeout {}. Pass a (connect, read) "
"timeout tuple, or a single float to set "
"both timeouts to the same value".format(timeout))
raise ValueError(err)
@@ -458,11 +458,11 @@ class HTTPAdapter(BaseAdapter):
low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT)
try:
- skip_host = 'Host' in request.headers
+ skip_host = 'Host' in request.headers
low_conn.putrequest(request.method,
url,
- skip_accept_encoding=True,
- skip_host=skip_host)
+ skip_accept_encoding=True,
+ skip_host=skip_host)
for header, value in request.headers.items():
low_conn.putheader(header, value)
@@ -478,10 +478,10 @@ class HTTPAdapter(BaseAdapter):
# Receive the response from the server
try:
- # For Python 2.7, use buffering of HTTP responses
+ # For Python 2.7, use buffering of HTTP responses
r = low_conn.getresponse(buffering=True)
except TypeError:
- # For compatibility with Python 3.3+
+ # For compatibility with Python 3.3+
r = low_conn.getresponse()
resp = HTTPResponse.from_httplib(
@@ -530,8 +530,8 @@ class HTTPAdapter(BaseAdapter):
raise SSLError(e, request=request)
elif isinstance(e, ReadTimeoutError):
raise ReadTimeout(e, request=request)
- elif isinstance(e, _InvalidHeader):
- raise InvalidHeader(e, request=request)
+ elif isinstance(e, _InvalidHeader):
+ raise InvalidHeader(e, request=request)
else:
raise
diff --git a/contrib/python/requests/requests/api.py b/contrib/python/requests/requests/api.py
index 2b41d9e0bc..4cba90eefe 100644
--- a/contrib/python/requests/requests/api.py
+++ b/contrib/python/requests/requests/api.py
@@ -16,13 +16,13 @@ from . import sessions
def request(method, url, **kwargs):
"""Constructs and sends a :class:`Request <Request>`.
- :param method: method for the new :class:`Request` object: ``GET``, ``OPTIONS``, ``HEAD``, ``POST``, ``PUT``, ``PATCH``, or ``DELETE``.
+ :param method: method for the new :class:`Request` object: ``GET``, ``OPTIONS``, ``HEAD``, ``POST``, ``PUT``, ``PATCH``, or ``DELETE``.
:param url: URL for the new :class:`Request` object.
- :param params: (optional) Dictionary, list of tuples or bytes to send
- in the query string for the :class:`Request`.
- :param data: (optional) Dictionary, list of tuples, bytes, or file-like
- object to send in the body of the :class:`Request`.
- :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`.
+ :param params: (optional) Dictionary, list of tuples or bytes to send
+ in the query string for the :class:`Request`.
+ :param data: (optional) Dictionary, list of tuples, bytes, or file-like
+ object to send in the body of the :class:`Request`.
+ :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`.
:param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.
:param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.
:param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload.
@@ -49,8 +49,8 @@ def request(method, url, **kwargs):
Usage::
>>> import requests
- >>> req = requests.request('GET', 'https://httpbin.org/get')
- >>> req
+ >>> req = requests.request('GET', 'https://httpbin.org/get')
+ >>> req
<Response [200]>
"""
@@ -65,8 +65,8 @@ def get(url, params=None, **kwargs):
r"""Sends a GET request.
:param url: URL for the new :class:`Request` object.
- :param params: (optional) Dictionary, list of tuples or bytes to send
- in the query string for the :class:`Request`.
+ :param params: (optional) Dictionary, list of tuples or bytes to send
+ in the query string for the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
@@ -91,9 +91,9 @@ def head(url, **kwargs):
r"""Sends a HEAD request.
:param url: URL for the new :class:`Request` object.
- :param \*\*kwargs: Optional arguments that ``request`` takes. If
- `allow_redirects` is not provided, it will be set to `False` (as
- opposed to the default :meth:`request` behavior).
+ :param \*\*kwargs: Optional arguments that ``request`` takes. If
+ `allow_redirects` is not provided, it will be set to `False` (as
+ opposed to the default :meth:`request` behavior).
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
@@ -106,8 +106,8 @@ def post(url, data=None, json=None, **kwargs):
r"""Sends a POST request.
:param url: URL for the new :class:`Request` object.
- :param data: (optional) Dictionary, list of tuples, bytes, or file-like
- object to send in the body of the :class:`Request`.
+ :param data: (optional) Dictionary, list of tuples, bytes, or file-like
+ object to send in the body of the :class:`Request`.
:param json: (optional) json data to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
@@ -121,8 +121,8 @@ def put(url, data=None, **kwargs):
r"""Sends a PUT request.
:param url: URL for the new :class:`Request` object.
- :param data: (optional) Dictionary, list of tuples, bytes, or file-like
- object to send in the body of the :class:`Request`.
+ :param data: (optional) Dictionary, list of tuples, bytes, or file-like
+ object to send in the body of the :class:`Request`.
:param json: (optional) json data to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
@@ -136,8 +136,8 @@ def patch(url, data=None, **kwargs):
r"""Sends a PATCH request.
:param url: URL for the new :class:`Request` object.
- :param data: (optional) Dictionary, list of tuples, bytes, or file-like
- object to send in the body of the :class:`Request`.
+ :param data: (optional) Dictionary, list of tuples, bytes, or file-like
+ object to send in the body of the :class:`Request`.
:param json: (optional) json data to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
diff --git a/contrib/python/requests/requests/auth.py b/contrib/python/requests/requests/auth.py
index 86c467c68c..eeface39ae 100644
--- a/contrib/python/requests/requests/auth.py
+++ b/contrib/python/requests/requests/auth.py
@@ -38,7 +38,7 @@ def _basic_auth_str(username, password):
if not isinstance(username, basestring):
warnings.warn(
"Non-string usernames will no longer be supported in Requests "
- "3.0.0. Please convert the object you've passed in ({!r}) to "
+ "3.0.0. Please convert the object you've passed in ({!r}) to "
"a string or bytes object in the near future to avoid "
"problems.".format(username),
category=DeprecationWarning,
@@ -48,9 +48,9 @@ def _basic_auth_str(username, password):
if not isinstance(password, basestring):
warnings.warn(
"Non-string passwords will no longer be supported in Requests "
- "3.0.0. Please convert the object you've passed in ({!r}) to "
+ "3.0.0. Please convert the object you've passed in ({!r}) to "
"a string or bytes object in the near future to avoid "
- "problems.".format(type(password)),
+ "problems.".format(type(password)),
category=DeprecationWarning,
)
password = str(password)
@@ -153,18 +153,18 @@ class HTTPDigestAuth(AuthBase):
x = x.encode('utf-8')
return hashlib.sha1(x).hexdigest()
hash_utf8 = sha_utf8
- elif _algorithm == 'SHA-256':
- def sha256_utf8(x):
- if isinstance(x, str):
- x = x.encode('utf-8')
- return hashlib.sha256(x).hexdigest()
- hash_utf8 = sha256_utf8
- elif _algorithm == 'SHA-512':
- def sha512_utf8(x):
- if isinstance(x, str):
- x = x.encode('utf-8')
- return hashlib.sha512(x).hexdigest()
- hash_utf8 = sha512_utf8
+ elif _algorithm == 'SHA-256':
+ def sha256_utf8(x):
+ if isinstance(x, str):
+ x = x.encode('utf-8')
+ return hashlib.sha256(x).hexdigest()
+ hash_utf8 = sha256_utf8
+ elif _algorithm == 'SHA-512':
+ def sha512_utf8(x):
+ if isinstance(x, str):
+ x = x.encode('utf-8')
+ return hashlib.sha512(x).hexdigest()
+ hash_utf8 = sha512_utf8
KD = lambda s, d: hash_utf8("%s:%s" % (s, d))
@@ -239,7 +239,7 @@ class HTTPDigestAuth(AuthBase):
"""
# If response is not 4xx, do not auth
- # See https://github.com/psf/requests/issues/3772
+ # See https://github.com/psf/requests/issues/3772
if not 400 <= r.status_code < 500:
self._thread_local.num_401_calls = 1
return r
diff --git a/contrib/python/requests/requests/compat.py b/contrib/python/requests/requests/compat.py
index 6de042e00f..029ae62ac3 100644
--- a/contrib/python/requests/requests/compat.py
+++ b/contrib/python/requests/requests/compat.py
@@ -8,10 +8,10 @@ This module handles import compatibility issues between Python 2 and
Python 3.
"""
-try:
- import chardet
-except ImportError:
- import charset_normalizer as chardet
+try:
+ import chardet
+except ImportError:
+ import charset_normalizer as chardet
import sys
@@ -28,10 +28,10 @@ is_py2 = (_ver[0] == 2)
#: Python 3.x?
is_py3 = (_ver[0] == 3)
-has_simplejson = False
+has_simplejson = False
try:
import simplejson as json
- has_simplejson = True
+ has_simplejson = True
except ImportError:
import json
@@ -48,8 +48,8 @@ if is_py2:
import cookielib
from Cookie import Morsel
from StringIO import StringIO
- # Keep OrderedDict for backwards compatibility.
- from collections import Callable, Mapping, MutableMapping, OrderedDict
+ # Keep OrderedDict for backwards compatibility.
+ from collections import Callable, Mapping, MutableMapping, OrderedDict
builtin_str = str
bytes = str
@@ -57,7 +57,7 @@ if is_py2:
basestring = basestring
numeric_types = (int, long, float)
integer_types = (int, long)
- JSONDecodeError = ValueError
+ JSONDecodeError = ValueError
elif is_py3:
from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag
@@ -65,13 +65,13 @@ elif is_py3:
from http import cookiejar as cookielib
from http.cookies import Morsel
from io import StringIO
- # Keep OrderedDict for backwards compatibility.
+ # Keep OrderedDict for backwards compatibility.
from collections import OrderedDict
- from collections.abc import Callable, Mapping, MutableMapping
- if has_simplejson:
- from simplejson import JSONDecodeError
- else:
- from json import JSONDecodeError
+ from collections.abc import Callable, Mapping, MutableMapping
+ if has_simplejson:
+ from simplejson import JSONDecodeError
+ else:
+ from json import JSONDecodeError
builtin_str = str
str = str
diff --git a/contrib/python/requests/requests/cookies.py b/contrib/python/requests/requests/cookies.py
index 9a801736a2..56fccd9c25 100644
--- a/contrib/python/requests/requests/cookies.py
+++ b/contrib/python/requests/requests/cookies.py
@@ -14,7 +14,7 @@ import time
import calendar
from ._internal_utils import to_native_string
-from .compat import cookielib, urlparse, urlunparse, Morsel, MutableMapping
+from .compat import cookielib, urlparse, urlunparse, Morsel, MutableMapping
try:
import threading
@@ -168,7 +168,7 @@ class CookieConflictError(RuntimeError):
"""
-class RequestsCookieJar(cookielib.CookieJar, MutableMapping):
+class RequestsCookieJar(cookielib.CookieJar, MutableMapping):
"""Compatibility class; is a cookielib.CookieJar, but exposes a dict
interface.
@@ -414,15 +414,15 @@ class RequestsCookieJar(cookielib.CookieJar, MutableMapping):
def copy(self):
"""Return a copy of this RequestsCookieJar."""
new_cj = RequestsCookieJar()
- new_cj.set_policy(self.get_policy())
+ new_cj.set_policy(self.get_policy())
new_cj.update(self)
return new_cj
- def get_policy(self):
- """Return the CookiePolicy instance used."""
- return self._policy
+ def get_policy(self):
+ """Return the CookiePolicy instance used."""
+ return self._policy
+
-
def _copy_cookie_jar(jar):
if jar is None:
return None
@@ -444,21 +444,21 @@ def create_cookie(name, value, **kwargs):
By default, the pair of `name` and `value` will be set for the domain ''
and sent on every request (this is sometimes called a "supercookie").
"""
- result = {
- 'version': 0,
- 'name': name,
- 'value': value,
- 'port': None,
- 'domain': '',
- 'path': '/',
- 'secure': False,
- 'expires': None,
- 'discard': True,
- 'comment': None,
- 'comment_url': None,
- 'rest': {'HttpOnly': None},
- 'rfc2109': False,
- }
+ result = {
+ 'version': 0,
+ 'name': name,
+ 'value': value,
+ 'port': None,
+ 'domain': '',
+ 'path': '/',
+ 'secure': False,
+ 'expires': None,
+ 'discard': True,
+ 'comment': None,
+ 'comment_url': None,
+ 'rest': {'HttpOnly': None},
+ 'rfc2109': False,
+ }
badargs = set(kwargs) - set(result)
if badargs:
@@ -512,7 +512,7 @@ def cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True):
:param cookiejar: (optional) A cookiejar to add the cookies to.
:param overwrite: (optional) If False, will not replace cookies
already in the jar with new ones.
- :rtype: CookieJar
+ :rtype: CookieJar
"""
if cookiejar is None:
cookiejar = RequestsCookieJar()
@@ -531,7 +531,7 @@ def merge_cookies(cookiejar, cookies):
:param cookiejar: CookieJar object to add the cookies to.
:param cookies: Dictionary or CookieJar object to be added.
- :rtype: CookieJar
+ :rtype: CookieJar
"""
if not isinstance(cookiejar, cookielib.CookieJar):
raise ValueError('You can only merge into CookieJar')
diff --git a/contrib/python/requests/requests/exceptions.py b/contrib/python/requests/requests/exceptions.py
index 10840684a9..79697635a5 100644
--- a/contrib/python/requests/requests/exceptions.py
+++ b/contrib/python/requests/requests/exceptions.py
@@ -8,9 +8,9 @@ This module contains the set of Requests' exceptions.
"""
from urllib3.exceptions import HTTPError as BaseHTTPError
-from .compat import JSONDecodeError as CompatJSONDecodeError
+from .compat import JSONDecodeError as CompatJSONDecodeError
+
-
class RequestException(IOError):
"""There was an ambiguous exception that occurred while handling your
request.
@@ -27,14 +27,14 @@ class RequestException(IOError):
super(RequestException, self).__init__(*args, **kwargs)
-class InvalidJSONError(RequestException):
- """A JSON error occurred."""
-
-
-class JSONDecodeError(InvalidJSONError, CompatJSONDecodeError):
- """Couldn't decode the text into json"""
-
-
+class InvalidJSONError(RequestException):
+ """A JSON error occurred."""
+
+
+class JSONDecodeError(InvalidJSONError, CompatJSONDecodeError):
+ """Couldn't decode the text into json"""
+
+
class HTTPError(RequestException):
"""An HTTP error occurred."""
@@ -80,11 +80,11 @@ class TooManyRedirects(RequestException):
class MissingSchema(RequestException, ValueError):
- """The URL scheme (e.g. http or https) is missing."""
+ """The URL scheme (e.g. http or https) is missing."""
class InvalidSchema(RequestException, ValueError):
- """The URL scheme provided is either invalid or unsupported."""
+ """The URL scheme provided is either invalid or unsupported."""
class InvalidURL(RequestException, ValueError):
@@ -95,20 +95,20 @@ class InvalidHeader(RequestException, ValueError):
"""The header value provided was somehow invalid."""
-class InvalidProxyURL(InvalidURL):
- """The proxy URL provided is invalid."""
-
-
+class InvalidProxyURL(InvalidURL):
+ """The proxy URL provided is invalid."""
+
+
class ChunkedEncodingError(RequestException):
"""The server declared chunked encoding but sent an invalid chunk."""
class ContentDecodingError(RequestException, BaseHTTPError):
- """Failed to decode response content."""
+ """Failed to decode response content."""
class StreamConsumedError(RequestException, TypeError):
- """The content for this response was already consumed."""
+ """The content for this response was already consumed."""
class RetryError(RequestException):
@@ -116,7 +116,7 @@ class RetryError(RequestException):
class UnrewindableBodyError(RequestException):
- """Requests encountered an error when trying to rewind a body."""
+ """Requests encountered an error when trying to rewind a body."""
# Warnings
diff --git a/contrib/python/requests/requests/help.py b/contrib/python/requests/requests/help.py
index 9dc33ab20a..4cd6389f55 100644
--- a/contrib/python/requests/requests/help.py
+++ b/contrib/python/requests/requests/help.py
@@ -12,17 +12,17 @@ import urllib3
from . import __version__ as requests_version
try:
- import charset_normalizer
-except ImportError:
- charset_normalizer = None
-
-try:
- import chardet
-except ImportError:
- chardet = None
-
-try:
- from urllib3.contrib import pyopenssl
+ import charset_normalizer
+except ImportError:
+ charset_normalizer = None
+
+try:
+ import chardet
+except ImportError:
+ chardet = None
+
+try:
+ from urllib3.contrib import pyopenssl
except ImportError:
pyopenssl = None
OpenSSL = None
@@ -80,12 +80,12 @@ def info():
implementation_info = _implementation()
urllib3_info = {'version': urllib3.__version__}
- charset_normalizer_info = {'version': None}
- chardet_info = {'version': None}
- if charset_normalizer:
- charset_normalizer_info = {'version': charset_normalizer.__version__}
- if chardet:
- chardet_info = {'version': chardet.__version__}
+ charset_normalizer_info = {'version': None}
+ chardet_info = {'version': None}
+ if charset_normalizer:
+ charset_normalizer_info = {'version': charset_normalizer.__version__}
+ if chardet:
+ chardet_info = {'version': chardet.__version__}
pyopenssl_info = {
'version': None,
@@ -103,7 +103,7 @@ def info():
'version': getattr(idna, '__version__', ''),
}
- system_ssl = ssl.OPENSSL_VERSION_NUMBER
+ system_ssl = ssl.OPENSSL_VERSION_NUMBER
system_ssl_info = {
'version': '%x' % system_ssl if system_ssl is not None else ''
}
@@ -113,11 +113,11 @@ def info():
'implementation': implementation_info,
'system_ssl': system_ssl_info,
'using_pyopenssl': pyopenssl is not None,
- 'using_charset_normalizer': chardet is None,
+ 'using_charset_normalizer': chardet is None,
'pyOpenSSL': pyopenssl_info,
'urllib3': urllib3_info,
'chardet': chardet_info,
- 'charset_normalizer': charset_normalizer_info,
+ 'charset_normalizer': charset_normalizer_info,
'cryptography': cryptography_info,
'idna': idna_info,
'requests': {
diff --git a/contrib/python/requests/requests/hooks.py b/contrib/python/requests/requests/hooks.py
index 501c6f6361..7a51f212c8 100644
--- a/contrib/python/requests/requests/hooks.py
+++ b/contrib/python/requests/requests/hooks.py
@@ -15,14 +15,14 @@ HOOKS = ['response']
def default_hooks():
- return {event: [] for event in HOOKS}
+ return {event: [] for event in HOOKS}
# TODO: response is the only one
def dispatch_hook(key, hooks, hook_data, **kwargs):
"""Dispatches a hook dictionary on a given piece of data."""
- hooks = hooks or {}
+ hooks = hooks or {}
hooks = hooks.get(key)
if hooks:
if hasattr(hooks, '__call__'):
diff --git a/contrib/python/requests/requests/models.py b/contrib/python/requests/requests/models.py
index 21b74ddcce..dfbea854f9 100644
--- a/contrib/python/requests/requests/models.py
+++ b/contrib/python/requests/requests/models.py
@@ -12,7 +12,7 @@ import sys
# Import encoding now, to avoid implicit import later.
# Implicit import within threads may cause LookupError when standard library is in a ZIP,
-# such as in Embedded Python. See https://github.com/psf/requests/issues/3578.
+# such as in Embedded Python. See https://github.com/psf/requests/issues/3578.
import encodings.idna
from urllib3.fields import RequestField
@@ -29,18 +29,18 @@ from .auth import HTTPBasicAuth
from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar
from .exceptions import (
HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError,
- ContentDecodingError, ConnectionError, StreamConsumedError,
- InvalidJSONError)
-from .exceptions import JSONDecodeError as RequestsJSONDecodeError
+ ContentDecodingError, ConnectionError, StreamConsumedError,
+ InvalidJSONError)
+from .exceptions import JSONDecodeError as RequestsJSONDecodeError
from ._internal_utils import to_native_string, unicode_is_ascii
from .utils import (
guess_filename, get_auth_from_url, requote_uri,
stream_decode_response_unicode, to_key_val_list, parse_header_links,
iter_slices, guess_json_utf, super_len, check_header_validity)
from .compat import (
- Callable, Mapping,
+ Callable, Mapping,
cookielib, urlunparse, urlsplit, urlencode, str, bytes,
- is_py2, chardet, builtin_str, basestring, JSONDecodeError)
+ is_py2, chardet, builtin_str, basestring, JSONDecodeError)
from .compat import json as complexjson
from .status_codes import codes
@@ -157,12 +157,12 @@ class RequestEncodingMixin(object):
if isinstance(fp, (str, bytes, bytearray)):
fdata = fp
- elif hasattr(fp, 'read'):
- fdata = fp.read()
- elif fp is None:
- continue
+ elif hasattr(fp, 'read'):
+ fdata = fp.read()
+ elif fp is None:
+ continue
else:
- fdata = fp
+ fdata = fp
rf = RequestField(name=k, data=fdata, filename=fn, headers=fh)
rf.make_multipart(content_type=ft)
@@ -180,10 +180,10 @@ class RequestHooksMixin(object):
if event not in self.hooks:
raise ValueError('Unsupported event specified, with event name "%s"' % (event))
- if isinstance(hook, Callable):
+ if isinstance(hook, Callable):
self.hooks[event].append(hook)
elif hasattr(hook, '__iter__'):
- self.hooks[event].extend(h for h in hook if isinstance(h, Callable))
+ self.hooks[event].extend(h for h in hook if isinstance(h, Callable))
def deregister_hook(self, event, hook):
"""Deregister a previously registered hook.
@@ -206,13 +206,13 @@ class Request(RequestHooksMixin):
:param url: URL to send.
:param headers: dictionary of headers to send.
:param files: dictionary of {filename: fileobject} files to multipart upload.
- :param data: the body to attach to the request. If a dictionary or
- list of tuples ``[(key, value)]`` is provided, form-encoding will
- take place.
+ :param data: the body to attach to the request. If a dictionary or
+ list of tuples ``[(key, value)]`` is provided, form-encoding will
+ take place.
:param json: json for the body to attach to the request (if files or data is not specified).
- :param params: URL parameters to append to the URL. If a dictionary or
- list of tuples ``[(key, value)]`` is provided, form-encoding will
- take place.
+ :param params: URL parameters to append to the URL. If a dictionary or
+ list of tuples ``[(key, value)]`` is provided, form-encoding will
+ take place.
:param auth: Auth handler or (user, pass) tuple.
:param cookies: dictionary or CookieJar of cookies to attach to this request.
:param hooks: dictionary of callback hooks, for internal usage.
@@ -220,7 +220,7 @@ class Request(RequestHooksMixin):
Usage::
>>> import requests
- >>> req = requests.Request('GET', 'https://httpbin.org/get')
+ >>> req = requests.Request('GET', 'https://httpbin.org/get')
>>> req.prepare()
<PreparedRequest [GET]>
"""
@@ -275,16 +275,16 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
"""The fully mutable :class:`PreparedRequest <PreparedRequest>` object,
containing the exact bytes that will be sent to the server.
- Instances are generated from a :class:`Request <Request>` object, and
- should not be instantiated manually; doing so may produce undesirable
- effects.
+ Instances are generated from a :class:`Request <Request>` object, and
+ should not be instantiated manually; doing so may produce undesirable
+ effects.
Usage::
>>> import requests
- >>> req = requests.Request('GET', 'https://httpbin.org/get')
+ >>> req = requests.Request('GET', 'https://httpbin.org/get')
>>> r = req.prepare()
- >>> r
+ >>> r
<PreparedRequest [GET]>
>>> s = requests.Session()
@@ -363,7 +363,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
#: We're unable to blindly call unicode/str functions
#: as this will include the bytestring indicator (b'')
#: on python 3.x.
- #: https://github.com/psf/requests/pull/2238
+ #: https://github.com/psf/requests/pull/2238
if isinstance(url, bytes):
url = url.decode('utf8')
else:
@@ -386,7 +386,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
raise InvalidURL(*e.args)
if not scheme:
- error = ("Invalid URL {0!r}: No scheme supplied. Perhaps you meant http://{0}?")
+ error = ("Invalid URL {0!r}: No scheme supplied. Perhaps you meant http://{0}?")
error = error.format(to_native_string(url, 'utf8'))
raise MissingSchema(error)
@@ -403,7 +403,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
host = self._get_idna_encoded_host(host)
except UnicodeError:
raise InvalidURL('URL has an invalid label.')
- elif host.startswith((u'*', u'.')):
+ elif host.startswith((u'*', u'.')):
raise InvalidURL('URL has an invalid label.')
# Carefully reconstruct the network location
@@ -468,25 +468,25 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
# urllib3 requires a bytes-like body. Python 2's json.dumps
# provides this natively, but Python 3 gives a Unicode string.
content_type = 'application/json'
-
- try:
- body = complexjson.dumps(json, allow_nan=False)
- except ValueError as ve:
- raise InvalidJSONError(ve, request=self)
-
+
+ try:
+ body = complexjson.dumps(json, allow_nan=False)
+ except ValueError as ve:
+ raise InvalidJSONError(ve, request=self)
+
if not isinstance(body, bytes):
body = body.encode('utf-8')
is_stream = all([
hasattr(data, '__iter__'),
- not isinstance(data, (basestring, list, tuple, Mapping))
+ not isinstance(data, (basestring, list, tuple, Mapping))
])
- if is_stream:
- try:
- length = super_len(data)
- except (TypeError, AttributeError, UnsupportedOperation):
- length = None
+ if is_stream:
+ try:
+ length = super_len(data)
+ except (TypeError, AttributeError, UnsupportedOperation):
+ length = None
body = data
@@ -618,7 +618,7 @@ class Response(object):
#: File-like object representation of response (for advanced usage).
#: Use of ``raw`` requires that ``stream=True`` be set on the request.
- #: This requirement does not apply for use internally to Requests.
+ #: This requirement does not apply for use internally to Requests.
self.raw = None
#: Final URL location of Response.
@@ -662,7 +662,7 @@ class Response(object):
if not self._content_consumed:
self.content
- return {attr: getattr(self, attr, None) for attr in self.__attrs__}
+ return {attr: getattr(self, attr, None) for attr in self.__attrs__}
def __setstate__(self, state):
for name, value in state.items():
@@ -701,11 +701,11 @@ class Response(object):
@property
def ok(self):
- """Returns True if :attr:`status_code` is less than 400, False if not.
+ """Returns True if :attr:`status_code` is less than 400, False if not.
This attribute checks if the status code of the response is between
400 and 600 to see if there was a client error or a server error. If
- the status code is between 200 and 400, this will return True. This
+ the status code is between 200 and 400, this will return True. This
is **not** a check to see if the response code is ``200 OK``.
"""
try:
@@ -733,7 +733,7 @@ class Response(object):
@property
def apparent_encoding(self):
- """The apparent encoding, provided by the charset_normalizer or chardet libraries."""
+ """The apparent encoding, provided by the charset_normalizer or chardet libraries."""
return chardet.detect(self.content)['encoding']
def iter_content(self, chunk_size=1, decode_unicode=False):
@@ -791,7 +791,7 @@ class Response(object):
return chunks
- def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=False, delimiter=None):
+ def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=False, delimiter=None):
"""Iterates over the response data, one line at a time. When
stream=True is set on the request, this avoids reading the
content at once into memory for large responses.
@@ -835,7 +835,7 @@ class Response(object):
if self.status_code == 0 or self.raw is None:
self._content = None
else:
- self._content = b''.join(self.iter_content(CONTENT_CHUNK_SIZE)) or b''
+ self._content = b''.join(self.iter_content(CONTENT_CHUNK_SIZE)) or b''
self._content_consumed = True
# don't need to release the connection; that's been handled by urllib3
@@ -847,7 +847,7 @@ class Response(object):
"""Content of the response, in unicode.
If Response.encoding is None, encoding will be guessed using
- ``charset_normalizer`` or ``chardet``.
+ ``charset_normalizer`` or ``chardet``.
The encoding of the response content is determined based solely on HTTP
headers, following RFC 2616 to the letter. If you can take advantage of
@@ -884,14 +884,14 @@ class Response(object):
r"""Returns the json-encoded content of a response, if any.
:param \*\*kwargs: Optional arguments that ``json.loads`` takes.
- :raises requests.exceptions.JSONDecodeError: If the response body does not
- contain valid json.
+ :raises requests.exceptions.JSONDecodeError: If the response body does not
+ contain valid json.
"""
if not self.encoding and self.content and len(self.content) > 3:
# No encoding set. JSON RFC 4627 section 3 states we should expect
# UTF-8, -16 or -32. Detect which one to use; If the detection or
- # decoding fails, fall back to `self.text` (using charset_normalizer to make
+ # decoding fails, fall back to `self.text` (using charset_normalizer to make
# a best guess).
encoding = guess_json_utf(self.content)
if encoding is not None:
@@ -906,16 +906,16 @@ class Response(object):
# used.
pass
- try:
- return complexjson.loads(self.text, **kwargs)
- except JSONDecodeError as e:
- # Catch JSON-related errors and raise as requests.JSONDecodeError
- # This aliases json.JSONDecodeError and simplejson.JSONDecodeError
- if is_py2: # e is a ValueError
- raise RequestsJSONDecodeError(e.message)
- else:
- raise RequestsJSONDecodeError(e.msg, e.doc, e.pos)
-
+ try:
+ return complexjson.loads(self.text, **kwargs)
+ except JSONDecodeError as e:
+ # Catch JSON-related errors and raise as requests.JSONDecodeError
+ # This aliases json.JSONDecodeError and simplejson.JSONDecodeError
+ if is_py2: # e is a ValueError
+ raise RequestsJSONDecodeError(e.message)
+ else:
+ raise RequestsJSONDecodeError(e.msg, e.doc, e.pos)
+
@property
def links(self):
"""Returns the parsed header links of the response, if any."""
@@ -935,7 +935,7 @@ class Response(object):
return l
def raise_for_status(self):
- """Raises :class:`HTTPError`, if one occurred."""
+ """Raises :class:`HTTPError`, if one occurred."""
http_error_msg = ''
if isinstance(self.reason, bytes):
diff --git a/contrib/python/requests/requests/packages.py b/contrib/python/requests/requests/packages.py
index c0e0143056..00196bff25 100644
--- a/contrib/python/requests/requests/packages.py
+++ b/contrib/python/requests/requests/packages.py
@@ -1,17 +1,17 @@
import sys
-try:
- import chardet
-except ImportError:
- import charset_normalizer as chardet
- import warnings
-
- warnings.filterwarnings('ignore', 'Trying to detect', module='charset_normalizer')
-
+try:
+ import chardet
+except ImportError:
+ import charset_normalizer as chardet
+ import warnings
+
+ warnings.filterwarnings('ignore', 'Trying to detect', module='charset_normalizer')
+
# This code exists for backwards compatibility reasons.
# I don't like it either. Just look the other way. :)
-for package in ('urllib3', 'idna'):
+for package in ('urllib3', 'idna'):
locals()[package] = __import__(package)
# This traversal is apparently necessary such that the identities are
# preserved (requests.packages.urllib3.* is urllib3.*)
@@ -19,8 +19,8 @@ for package in ('urllib3', 'idna'):
if mod == package or mod.startswith(package + '.'):
sys.modules['requests.packages.' + mod] = sys.modules[mod]
-target = chardet.__name__
-for mod in list(sys.modules):
- if mod == target or mod.startswith(target + '.'):
- sys.modules['requests.packages.' + target.replace(target, 'chardet')] = sys.modules[mod]
+target = chardet.__name__
+for mod in list(sys.modules):
+ if mod == target or mod.startswith(target + '.'):
+ sys.modules['requests.packages.' + target.replace(target, 'chardet')] = sys.modules[mod]
# Kinda cool, though, right?
diff --git a/contrib/python/requests/requests/sessions.py b/contrib/python/requests/requests/sessions.py
index b077af19ff..3f59cab922 100644
--- a/contrib/python/requests/requests/sessions.py
+++ b/contrib/python/requests/requests/sessions.py
@@ -1,26 +1,26 @@
# -*- coding: utf-8 -*-
"""
-requests.sessions
-~~~~~~~~~~~~~~~~~
+requests.sessions
+~~~~~~~~~~~~~~~~~
This module provides a Session object to manage and persist settings across
requests (cookies, auth, proxies).
"""
import os
-import sys
+import sys
import time
from datetime import timedelta
-from collections import OrderedDict
+from collections import OrderedDict
from .auth import _basic_auth_str
-from .compat import cookielib, is_py3, urljoin, urlparse, Mapping
+from .compat import cookielib, is_py3, urljoin, urlparse, Mapping
from .cookies import (
cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies)
from .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT
from .hooks import default_hooks, dispatch_hook
from ._internal_utils import to_native_string
-from .utils import to_key_val_list, default_headers, DEFAULT_PORTS
+from .utils import to_key_val_list, default_headers, DEFAULT_PORTS
from .exceptions import (
TooManyRedirects, InvalidSchema, ChunkedEncodingError, ContentDecodingError)
@@ -29,7 +29,7 @@ from .adapters import HTTPAdapter
from .utils import (
requote_uri, get_environ_proxies, get_netrc_auth, should_bypass_proxies,
- get_auth_from_url, rewind_body, resolve_proxies
+ get_auth_from_url, rewind_body, resolve_proxies
)
from .status_codes import codes
@@ -38,8 +38,8 @@ from .status_codes import codes
from .models import REDIRECT_STATI
# Preferred clock, based on which one is more accurate on a given system.
-if sys.platform == 'win32':
- try: # Python 3.4+
+if sys.platform == 'win32':
+ try: # Python 3.4+
preferred_clock = time.perf_counter
except AttributeError: # Earlier than Python 3.
preferred_clock = time.clock
@@ -116,31 +116,31 @@ class SessionRedirectMixin(object):
return to_native_string(location, 'utf8')
return None
- def should_strip_auth(self, old_url, new_url):
- """Decide whether Authorization header should be removed when redirecting"""
- old_parsed = urlparse(old_url)
- new_parsed = urlparse(new_url)
- if old_parsed.hostname != new_parsed.hostname:
- return True
- # Special case: allow http -> https redirect when using the standard
- # ports. This isn't specified by RFC 7235, but is kept to avoid
- # breaking backwards compatibility with older versions of requests
- # that allowed any redirects on the same host.
- if (old_parsed.scheme == 'http' and old_parsed.port in (80, None)
- and new_parsed.scheme == 'https' and new_parsed.port in (443, None)):
- return False
-
- # Handle default port usage corresponding to scheme.
- changed_port = old_parsed.port != new_parsed.port
- changed_scheme = old_parsed.scheme != new_parsed.scheme
- default_port = (DEFAULT_PORTS.get(old_parsed.scheme, None), None)
- if (not changed_scheme and old_parsed.port in default_port
- and new_parsed.port in default_port):
- return False
-
- # Standard case: root URI must match
- return changed_port or changed_scheme
-
+ def should_strip_auth(self, old_url, new_url):
+ """Decide whether Authorization header should be removed when redirecting"""
+ old_parsed = urlparse(old_url)
+ new_parsed = urlparse(new_url)
+ if old_parsed.hostname != new_parsed.hostname:
+ return True
+ # Special case: allow http -> https redirect when using the standard
+ # ports. This isn't specified by RFC 7235, but is kept to avoid
+ # breaking backwards compatibility with older versions of requests
+ # that allowed any redirects on the same host.
+ if (old_parsed.scheme == 'http' and old_parsed.port in (80, None)
+ and new_parsed.scheme == 'https' and new_parsed.port in (443, None)):
+ return False
+
+ # Handle default port usage corresponding to scheme.
+ changed_port = old_parsed.port != new_parsed.port
+ changed_scheme = old_parsed.scheme != new_parsed.scheme
+ default_port = (DEFAULT_PORTS.get(old_parsed.scheme, None), None)
+ if (not changed_scheme and old_parsed.port in default_port
+ and new_parsed.port in default_port):
+ return False
+
+ # Standard case: root URI must match
+ return changed_port or changed_scheme
+
def resolve_redirects(self, resp, req, stream=False, timeout=None,
verify=True, cert=None, proxies=None, yield_requests=False, **adapter_kwargs):
"""Receives a Response. Returns a generator of Responses or Requests."""
@@ -148,7 +148,7 @@ class SessionRedirectMixin(object):
hist = [] # keep track of history
url = self.get_redirect_target(resp)
- previous_fragment = urlparse(req.url).fragment
+ previous_fragment = urlparse(req.url).fragment
while url:
prepared_request = req.copy()
@@ -163,7 +163,7 @@ class SessionRedirectMixin(object):
resp.raw.read(decode_content=False)
if len(resp.history) >= self.max_redirects:
- raise TooManyRedirects('Exceeded {} redirects.'.format(self.max_redirects), response=resp)
+ raise TooManyRedirects('Exceeded {} redirects.'.format(self.max_redirects), response=resp)
# Release the connection back into the pool.
resp.close()
@@ -171,14 +171,14 @@ class SessionRedirectMixin(object):
# Handle redirection without scheme (see: RFC 1808 Section 4)
if url.startswith('//'):
parsed_rurl = urlparse(resp.url)
- url = ':'.join([to_native_string(parsed_rurl.scheme), url])
+ url = ':'.join([to_native_string(parsed_rurl.scheme), url])
- # Normalize url case and attach previous fragment if needed (RFC 7231 7.1.2)
+ # Normalize url case and attach previous fragment if needed (RFC 7231 7.1.2)
parsed = urlparse(url)
- if parsed.fragment == '' and previous_fragment:
- parsed = parsed._replace(fragment=previous_fragment)
- elif parsed.fragment:
- previous_fragment = parsed.fragment
+ if parsed.fragment == '' and previous_fragment:
+ parsed = parsed._replace(fragment=previous_fragment)
+ elif parsed.fragment:
+ previous_fragment = parsed.fragment
url = parsed.geturl()
# Facilitate relative 'location' headers, as allowed by RFC 7231.
@@ -193,16 +193,16 @@ class SessionRedirectMixin(object):
self.rebuild_method(prepared_request, resp)
- # https://github.com/psf/requests/issues/1084
+ # https://github.com/psf/requests/issues/1084
if resp.status_code not in (codes.temporary_redirect, codes.permanent_redirect):
- # https://github.com/psf/requests/issues/3490
+ # https://github.com/psf/requests/issues/3490
purged_headers = ('Content-Length', 'Content-Type', 'Transfer-Encoding')
for header in purged_headers:
prepared_request.headers.pop(header, None)
prepared_request.body = None
headers = prepared_request.headers
- headers.pop('Cookie', None)
+ headers.pop('Cookie', None)
# Extract any cookies sent on the response to the cookiejar
# in the new request. Because we've mutated our copied prepared
@@ -259,10 +259,10 @@ class SessionRedirectMixin(object):
headers = prepared_request.headers
url = prepared_request.url
- if 'Authorization' in headers and self.should_strip_auth(response.request.url, url):
+ if 'Authorization' in headers and self.should_strip_auth(response.request.url, url):
# If we get redirected to a new host, we should strip out any
# authentication headers.
- del headers['Authorization']
+ del headers['Authorization']
# .netrc might have more auth for us on our new host.
new_auth = get_netrc_auth(url) if self.trust_env else None
@@ -282,8 +282,8 @@ class SessionRedirectMixin(object):
:rtype: dict
"""
headers = prepared_request.headers
- scheme = urlparse(prepared_request.url).scheme
- new_proxies = resolve_proxies(prepared_request, proxies, self.trust_env)
+ scheme = urlparse(prepared_request.url).scheme
+ new_proxies = resolve_proxies(prepared_request, proxies, self.trust_env)
if 'Proxy-Authorization' in headers:
del headers['Proxy-Authorization']
@@ -304,7 +304,7 @@ class SessionRedirectMixin(object):
"""
method = prepared_request.method
- # https://tools.ietf.org/html/rfc7231#section-6.4.4
+ # https://tools.ietf.org/html/rfc7231#section-6.4.4
if response.status_code == codes.see_other and method != 'HEAD':
method = 'GET'
@@ -330,19 +330,19 @@ class Session(SessionRedirectMixin):
>>> import requests
>>> s = requests.Session()
- >>> s.get('https://httpbin.org/get')
+ >>> s.get('https://httpbin.org/get')
<Response [200]>
Or as a context manager::
>>> with requests.Session() as s:
- ... s.get('https://httpbin.org/get')
+ ... s.get('https://httpbin.org/get')
<Response [200]>
"""
__attrs__ = [
'headers', 'cookies', 'auth', 'proxies', 'hooks', 'params', 'verify',
- 'cert', 'adapters', 'stream', 'trust_env',
+ 'cert', 'adapters', 'stream', 'trust_env',
'max_redirects',
]
@@ -374,13 +374,13 @@ class Session(SessionRedirectMixin):
self.stream = False
#: SSL Verification default.
- #: Defaults to `True`, requiring requests to verify the TLS certificate at the
- #: remote end.
- #: If verify is set to `False`, requests will accept any TLS certificate
- #: presented by the server, and will ignore hostname mismatches and/or
- #: expired certificates, which will make your application vulnerable to
- #: man-in-the-middle (MitM) attacks.
- #: Only set this to `False` for testing.
+ #: Defaults to `True`, requiring requests to verify the TLS certificate at the
+ #: remote end.
+ #: If verify is set to `False`, requests will accept any TLS certificate
+ #: presented by the server, and will ignore hostname mismatches and/or
+ #: expired certificates, which will make your application vulnerable to
+ #: man-in-the-middle (MitM) attacks.
+ #: Only set this to `False` for testing.
self.verify = True
#: SSL client certificate default, if String, path to ssl client
@@ -465,8 +465,8 @@ class Session(SessionRedirectMixin):
:param url: URL for the new :class:`Request` object.
:param params: (optional) Dictionary or bytes to be sent in the query
string for the :class:`Request`.
- :param data: (optional) Dictionary, list of tuples, bytes, or file-like
- object to send in the body of the :class:`Request`.
+ :param data: (optional) Dictionary, list of tuples, bytes, or file-like
+ object to send in the body of the :class:`Request`.
:param json: (optional) json to send in the body of the
:class:`Request`.
:param headers: (optional) Dictionary of HTTP Headers to send with the
@@ -489,12 +489,12 @@ class Session(SessionRedirectMixin):
content. Defaults to ``False``.
:param verify: (optional) Either a boolean, in which case it controls whether we verify
the server's TLS certificate, or a string, in which case it must be a path
- to a CA bundle to use. Defaults to ``True``. When set to
- ``False``, requests will accept any TLS certificate presented by
- the server, and will ignore hostname mismatches and/or expired
- certificates, which will make your application vulnerable to
- man-in-the-middle (MitM) attacks. Setting verify to ``False``
- may be useful during local development or testing.
+ to a CA bundle to use. Defaults to ``True``. When set to
+ ``False``, requests will accept any TLS certificate presented by
+ the server, and will ignore hostname mismatches and/or expired
+ certificates, which will make your application vulnerable to
+ man-in-the-middle (MitM) attacks. Setting verify to ``False``
+ may be useful during local development or testing.
:param cert: (optional) if String, path to ssl client cert file (.pem).
If Tuple, ('cert', 'key') pair.
:rtype: requests.Response
@@ -567,8 +567,8 @@ class Session(SessionRedirectMixin):
r"""Sends a POST request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
- :param data: (optional) Dictionary, list of tuples, bytes, or file-like
- object to send in the body of the :class:`Request`.
+ :param data: (optional) Dictionary, list of tuples, bytes, or file-like
+ object to send in the body of the :class:`Request`.
:param json: (optional) json to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:rtype: requests.Response
@@ -580,8 +580,8 @@ class Session(SessionRedirectMixin):
r"""Sends a PUT request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
- :param data: (optional) Dictionary, list of tuples, bytes, or file-like
- object to send in the body of the :class:`Request`.
+ :param data: (optional) Dictionary, list of tuples, bytes, or file-like
+ object to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:rtype: requests.Response
"""
@@ -592,8 +592,8 @@ class Session(SessionRedirectMixin):
r"""Sends a PATCH request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
- :param data: (optional) Dictionary, list of tuples, bytes, or file-like
- object to send in the body of the :class:`Request`.
+ :param data: (optional) Dictionary, list of tuples, bytes, or file-like
+ object to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:rtype: requests.Response
"""
@@ -620,10 +620,10 @@ class Session(SessionRedirectMixin):
kwargs.setdefault('stream', self.stream)
kwargs.setdefault('verify', self.verify)
kwargs.setdefault('cert', self.cert)
- if 'proxies' not in kwargs:
- kwargs['proxies'] = resolve_proxies(
- request, self.proxies, self.trust_env
- )
+ if 'proxies' not in kwargs:
+ kwargs['proxies'] = resolve_proxies(
+ request, self.proxies, self.trust_env
+ )
# It's possible that users might accidentally send a Request object.
# Guard against that specific failure case.
@@ -661,12 +661,12 @@ class Session(SessionRedirectMixin):
extract_cookies_to_jar(self.cookies, request, r.raw)
# Resolve redirects if allowed.
- if allow_redirects:
- # Redirect resolving generator.
- gen = self.resolve_redirects(r, request, **kwargs)
- history = [resp for resp in gen]
- else:
- history = []
+ if allow_redirects:
+ # Redirect resolving generator.
+ gen = self.resolve_redirects(r, request, **kwargs)
+ history = [resp for resp in gen]
+ else:
+ history = []
# Shuffle things around if there's history.
if history:
@@ -725,11 +725,11 @@ class Session(SessionRedirectMixin):
"""
for (prefix, adapter) in self.adapters.items():
- if url.lower().startswith(prefix.lower()):
+ if url.lower().startswith(prefix.lower()):
return adapter
# Nothing matches :-/
- raise InvalidSchema("No connection adapters were found for {!r}".format(url))
+ raise InvalidSchema("No connection adapters were found for {!r}".format(url))
def close(self):
"""Closes all adapters and as such the session"""
@@ -748,7 +748,7 @@ class Session(SessionRedirectMixin):
self.adapters[key] = self.adapters.pop(key)
def __getstate__(self):
- state = {attr: getattr(self, attr, None) for attr in self.__attrs__}
+ state = {attr: getattr(self, attr, None) for attr in self.__attrs__}
return state
def __setstate__(self, state):
@@ -760,12 +760,12 @@ def session():
"""
Returns a :class:`Session` for context-management.
- .. deprecated:: 1.0.0
-
- This method has been deprecated since version 1.0.0 and is only kept for
- backwards compatibility. New code should use :class:`~requests.sessions.Session`
- to create a session. This may be removed at a future date.
-
+ .. deprecated:: 1.0.0
+
+ This method has been deprecated since version 1.0.0 and is only kept for
+ backwards compatibility. New code should use :class:`~requests.sessions.Session`
+ to create a session. This may be removed at a future date.
+
:rtype: Session
"""
return Session()
diff --git a/contrib/python/requests/requests/status_codes.py b/contrib/python/requests/requests/status_codes.py
index 78bc96534f..d80a7cd4dd 100644
--- a/contrib/python/requests/requests/status_codes.py
+++ b/contrib/python/requests/requests/status_codes.py
@@ -1,25 +1,25 @@
# -*- coding: utf-8 -*-
-r"""
-The ``codes`` object defines a mapping from common names for HTTP statuses
-to their numerical codes, accessible either as attributes or as dictionary
-items.
-
-Example::
-
- >>> import requests
- >>> requests.codes['temporary_redirect']
- 307
- >>> requests.codes.teapot
- 418
- >>> requests.codes['\o/']
- 200
-
-Some codes have multiple names, and both upper- and lower-case versions of
-the names are allowed. For example, ``codes.ok``, ``codes.OK``, and
-``codes.okay`` all correspond to the HTTP status code 200.
-"""
-
+r"""
+The ``codes`` object defines a mapping from common names for HTTP statuses
+to their numerical codes, accessible either as attributes or as dictionary
+items.
+
+Example::
+
+ >>> import requests
+ >>> requests.codes['temporary_redirect']
+ 307
+ >>> requests.codes.teapot
+ 418
+ >>> requests.codes['\o/']
+ 200
+
+Some codes have multiple names, and both upper- and lower-case versions of
+the names are allowed. For example, ``codes.ok``, ``codes.OK``, and
+``codes.okay`` all correspond to the HTTP status code 200.
+"""
+
from .structures import LookupDict
_codes = {
@@ -104,20 +104,20 @@ _codes = {
codes = LookupDict(name='status_codes')
-def _init():
- for code, titles in _codes.items():
- for title in titles:
- setattr(codes, title, code)
- if not title.startswith(('\\', '/')):
- setattr(codes, title.upper(), code)
-
- def doc(code):
- names = ', '.join('``%s``' % n for n in _codes[code])
- return '* %d: %s' % (code, names)
-
- global __doc__
- __doc__ = (__doc__ + '\n' +
- '\n'.join(doc(code) for code in sorted(_codes))
- if __doc__ is not None else None)
-
-_init()
+def _init():
+ for code, titles in _codes.items():
+ for title in titles:
+ setattr(codes, title, code)
+ if not title.startswith(('\\', '/')):
+ setattr(codes, title.upper(), code)
+
+ def doc(code):
+ names = ', '.join('``%s``' % n for n in _codes[code])
+ return '* %d: %s' % (code, names)
+
+ global __doc__
+ __doc__ = (__doc__ + '\n' +
+ '\n'.join(doc(code) for code in sorted(_codes))
+ if __doc__ is not None else None)
+
+_init()
diff --git a/contrib/python/requests/requests/structures.py b/contrib/python/requests/requests/structures.py
index 09a3480b88..8ee0ba7a08 100644
--- a/contrib/python/requests/requests/structures.py
+++ b/contrib/python/requests/requests/structures.py
@@ -7,16 +7,16 @@ requests.structures
Data structures that power Requests.
"""
-from collections import OrderedDict
+from collections import OrderedDict
-from .compat import Mapping, MutableMapping
+from .compat import Mapping, MutableMapping
-
-class CaseInsensitiveDict(MutableMapping):
+
+class CaseInsensitiveDict(MutableMapping):
"""A case-insensitive ``dict``-like object.
Implements all methods and operations of
- ``MutableMapping`` as well as dict's ``copy``. Also
+ ``MutableMapping`` as well as dict's ``copy``. Also
provides ``lower_items``.
All keys are expected to be strings. The structure remembers the
@@ -71,7 +71,7 @@ class CaseInsensitiveDict(MutableMapping):
)
def __eq__(self, other):
- if isinstance(other, Mapping):
+ if isinstance(other, Mapping):
other = CaseInsensitiveDict(other)
else:
return NotImplemented
diff --git a/contrib/python/requests/requests/utils.py b/contrib/python/requests/requests/utils.py
index a383b5aeee..611bdbe290 100644
--- a/contrib/python/requests/requests/utils.py
+++ b/contrib/python/requests/requests/utils.py
@@ -15,13 +15,13 @@ import os
import re
import socket
import struct
-import sys
-import tempfile
+import sys
+import tempfile
import warnings
-import zipfile
-from collections import OrderedDict
-from urllib3.util import make_headers
-from urllib3.util import parse_url
+import zipfile
+from collections import OrderedDict
+from urllib3.util import make_headers
+from urllib3.util import parse_url
from .__version__ import __version__
from . import certs
@@ -29,9 +29,9 @@ from . import certs
from ._internal_utils import to_native_string
from .compat import parse_http_list as _parse_list_header
from .compat import (
- quote, urlparse, bytes, str, unquote, getproxies,
+ quote, urlparse, bytes, str, unquote, getproxies,
proxy_bypass, urlunparse, basestring, integer_types, is_py3,
- proxy_bypass_environment, getproxies_environment, Mapping)
+ proxy_bypass_environment, getproxies_environment, Mapping)
from .cookies import cookiejar_from_dict
from .structures import CaseInsensitiveDict
from .exceptions import (
@@ -41,33 +41,33 @@ NETRC_FILES = ('.netrc', '_netrc')
DEFAULT_CA_BUNDLE_PATH = certs.where()
-DEFAULT_PORTS = {'http': 80, 'https': 443}
+DEFAULT_PORTS = {'http': 80, 'https': 443}
-# Ensure that ', ' is used to preserve previous delimiter behavior.
-DEFAULT_ACCEPT_ENCODING = ", ".join(
- re.split(r",\s*", make_headers(accept_encoding=True)["accept-encoding"])
-)
-
-
-if sys.platform == 'win32':
+# Ensure that ', ' is used to preserve previous delimiter behavior.
+DEFAULT_ACCEPT_ENCODING = ", ".join(
+ re.split(r",\s*", make_headers(accept_encoding=True)["accept-encoding"])
+)
+
+
+if sys.platform == 'win32':
# provide a proxy_bypass version on Windows without DNS lookups
def proxy_bypass_registry(host):
try:
- if is_py3:
- import winreg
- else:
- import _winreg as winreg
- except ImportError:
- return False
-
- try:
+ if is_py3:
+ import winreg
+ else:
+ import _winreg as winreg
+ except ImportError:
+ return False
+
+ try:
internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER,
r'Software\Microsoft\Windows\CurrentVersion\Internet Settings')
- # ProxyEnable could be REG_SZ or REG_DWORD, normalizing it
- proxyEnable = int(winreg.QueryValueEx(internetSettings,
- 'ProxyEnable')[0])
- # ProxyOverride is almost always a string
+ # ProxyEnable could be REG_SZ or REG_DWORD, normalizing it
+ proxyEnable = int(winreg.QueryValueEx(internetSettings,
+ 'ProxyEnable')[0])
+ # ProxyOverride is almost always a string
proxyOverride = winreg.QueryValueEx(internetSettings,
'ProxyOverride')[0]
except OSError:
@@ -125,10 +125,10 @@ def super_len(o):
elif hasattr(o, 'fileno'):
try:
fileno = o.fileno()
- except (io.UnsupportedOperation, AttributeError):
- # AttributeError is a surprising exception, seeing as how we've just checked
- # that `hasattr(o, 'fileno')`. It happens for objects obtained via
- # `Tarfile.extractfile()`, per issue 5229.
+ except (io.UnsupportedOperation, AttributeError):
+ # AttributeError is a surprising exception, seeing as how we've just checked
+ # that `hasattr(o, 'fileno')`. It happens for objects obtained via
+ # `Tarfile.extractfile()`, per issue 5229.
pass
else:
total_length = os.fstat(fileno).st_size
@@ -158,7 +158,7 @@ def super_len(o):
current_position = total_length
else:
if hasattr(o, 'seek') and total_length is None:
- # StringIO and BytesIO have seek but no usable fileno
+ # StringIO and BytesIO have seek but no usable fileno
try:
# seek to end of file
o.seek(0, 2)
@@ -179,24 +179,24 @@ def super_len(o):
def get_netrc_auth(url, raise_errors=False):
"""Returns the Requests tuple auth for a given url from netrc."""
- netrc_file = os.environ.get('NETRC')
- if netrc_file is not None:
- netrc_locations = (netrc_file,)
- else:
- netrc_locations = ('~/{}'.format(f) for f in NETRC_FILES)
-
+ netrc_file = os.environ.get('NETRC')
+ if netrc_file is not None:
+ netrc_locations = (netrc_file,)
+ else:
+ netrc_locations = ('~/{}'.format(f) for f in NETRC_FILES)
+
try:
from netrc import netrc, NetrcParseError
netrc_path = None
- for f in netrc_locations:
+ for f in netrc_locations:
try:
- loc = os.path.expanduser(f)
+ loc = os.path.expanduser(f)
except KeyError:
# os.path.expanduser can fail when $HOME is undefined and
- # getpwuid fails. See https://bugs.python.org/issue20164 &
- # https://github.com/psf/requests/issues/1846
+ # getpwuid fails. See https://bugs.python.org/issue20164 &
+ # https://github.com/psf/requests/issues/1846
return
if os.path.exists(loc):
@@ -228,7 +228,7 @@ def get_netrc_auth(url, raise_errors=False):
if raise_errors:
raise
- # App Engine hackiness.
+ # App Engine hackiness.
except (ImportError, AttributeError):
pass
@@ -241,57 +241,57 @@ def guess_filename(obj):
return os.path.basename(name)
-def extract_zipped_paths(path):
- """Replace nonexistent paths that look like they refer to a member of a zip
- archive with the location of an extracted copy of the target, or else
- just return the provided path unchanged.
- """
- if callable(path) or os.path.exists(path):
- # this is already a valid path, no need to do anything further
- return path
-
- # find the first valid part of the provided path and treat that as a zip archive
- # assume the rest of the path is the name of a member in the archive
- archive, member = os.path.split(path)
- while archive and not os.path.exists(archive):
- archive, prefix = os.path.split(archive)
- if not prefix:
- # If we don't check for an empty prefix after the split (in other words, archive remains unchanged after the split),
- # we _can_ end up in an infinite loop on a rare corner case affecting a small number of users
- break
- member = '/'.join([prefix, member])
-
- if not zipfile.is_zipfile(archive):
- return path
-
- zip_file = zipfile.ZipFile(archive)
- if member not in zip_file.namelist():
- return path
-
- # we have a valid zip archive and a valid member of that archive
- tmp = tempfile.gettempdir()
- extracted_path = os.path.join(tmp, member.split('/')[-1])
- if not os.path.exists(extracted_path):
- # use read + write to avoid the creating nested folders, we only want the file, avoids mkdir racing condition
- with atomic_open(extracted_path) as file_handler:
- file_handler.write(zip_file.read(member))
- return extracted_path
-
-
-@contextlib.contextmanager
-def atomic_open(filename):
- """Write a file to the disk in an atomic fashion"""
- replacer = os.rename if sys.version_info[0] == 2 else os.replace
- tmp_descriptor, tmp_name = tempfile.mkstemp(dir=os.path.dirname(filename))
- try:
- with os.fdopen(tmp_descriptor, 'wb') as tmp_handler:
- yield tmp_handler
- replacer(tmp_name, filename)
- except BaseException:
- os.remove(tmp_name)
- raise
-
-
+def extract_zipped_paths(path):
+ """Replace nonexistent paths that look like they refer to a member of a zip
+ archive with the location of an extracted copy of the target, or else
+ just return the provided path unchanged.
+ """
+ if callable(path) or os.path.exists(path):
+ # this is already a valid path, no need to do anything further
+ return path
+
+ # find the first valid part of the provided path and treat that as a zip archive
+ # assume the rest of the path is the name of a member in the archive
+ archive, member = os.path.split(path)
+ while archive and not os.path.exists(archive):
+ archive, prefix = os.path.split(archive)
+ if not prefix:
+ # If we don't check for an empty prefix after the split (in other words, archive remains unchanged after the split),
+ # we _can_ end up in an infinite loop on a rare corner case affecting a small number of users
+ break
+ member = '/'.join([prefix, member])
+
+ if not zipfile.is_zipfile(archive):
+ return path
+
+ zip_file = zipfile.ZipFile(archive)
+ if member not in zip_file.namelist():
+ return path
+
+ # we have a valid zip archive and a valid member of that archive
+ tmp = tempfile.gettempdir()
+ extracted_path = os.path.join(tmp, member.split('/')[-1])
+ if not os.path.exists(extracted_path):
+ # use read + write to avoid the creating nested folders, we only want the file, avoids mkdir racing condition
+ with atomic_open(extracted_path) as file_handler:
+ file_handler.write(zip_file.read(member))
+ return extracted_path
+
+
+@contextlib.contextmanager
+def atomic_open(filename):
+ """Write a file to the disk in an atomic fashion"""
+ replacer = os.rename if sys.version_info[0] == 2 else os.replace
+ tmp_descriptor, tmp_name = tempfile.mkstemp(dir=os.path.dirname(filename))
+ try:
+ with os.fdopen(tmp_descriptor, 'wb') as tmp_handler:
+ yield tmp_handler
+ replacer(tmp_name, filename)
+ except BaseException:
+ os.remove(tmp_name)
+ raise
+
+
def from_key_val_list(value):
"""Take an object and test to see if it can be represented as a
dictionary. Unless it can not be represented as such, return an
@@ -302,9 +302,9 @@ def from_key_val_list(value):
>>> from_key_val_list([('key', 'val')])
OrderedDict([('key', 'val')])
>>> from_key_val_list('string')
- Traceback (most recent call last):
- ...
- ValueError: cannot encode objects that are not 2-tuples
+ Traceback (most recent call last):
+ ...
+ ValueError: cannot encode objects that are not 2-tuples
>>> from_key_val_list({'key': 'val'})
OrderedDict([('key', 'val')])
@@ -330,9 +330,9 @@ def to_key_val_list(value):
>>> to_key_val_list({'key': 'val'})
[('key', 'val')]
>>> to_key_val_list('string')
- Traceback (most recent call last):
- ...
- ValueError: cannot encode objects that are not 2-tuples
+ Traceback (most recent call last):
+ ...
+ ValueError: cannot encode objects that are not 2-tuples
:rtype: list
"""
@@ -342,7 +342,7 @@ def to_key_val_list(value):
if isinstance(value, (str, bytes, bool, int)):
raise ValueError('cannot encode objects that are not 2-tuples')
- if isinstance(value, Mapping):
+ if isinstance(value, Mapping):
value = value.items()
return list(value)
@@ -487,31 +487,31 @@ def get_encodings_from_content(content):
xml_re.findall(content))
-def _parse_content_type_header(header):
- """Returns content type and parameters from given header
-
- :param header: string
- :return: tuple containing content type and dictionary of
- parameters
- """
-
- tokens = header.split(';')
- content_type, params = tokens[0].strip(), tokens[1:]
- params_dict = {}
- items_to_strip = "\"' "
-
- for param in params:
- param = param.strip()
- if param:
- key, value = param, True
- index_of_equals = param.find("=")
- if index_of_equals != -1:
- key = param[:index_of_equals].strip(items_to_strip)
- value = param[index_of_equals + 1:].strip(items_to_strip)
- params_dict[key.lower()] = value
- return content_type, params_dict
-
-
+def _parse_content_type_header(header):
+ """Returns content type and parameters from given header
+
+ :param header: string
+ :return: tuple containing content type and dictionary of
+ parameters
+ """
+
+ tokens = header.split(';')
+ content_type, params = tokens[0].strip(), tokens[1:]
+ params_dict = {}
+ items_to_strip = "\"' "
+
+ for param in params:
+ param = param.strip()
+ if param:
+ key, value = param, True
+ index_of_equals = param.find("=")
+ if index_of_equals != -1:
+ key = param[:index_of_equals].strip(items_to_strip)
+ value = param[index_of_equals + 1:].strip(items_to_strip)
+ params_dict[key.lower()] = value
+ return content_type, params_dict
+
+
def get_encoding_from_headers(headers):
"""Returns encodings from given HTTP Header Dict.
@@ -524,7 +524,7 @@ def get_encoding_from_headers(headers):
if not content_type:
return None
- content_type, params = _parse_content_type_header(content_type)
+ content_type, params = _parse_content_type_header(content_type)
if 'charset' in params:
return params['charset'].strip("'\"")
@@ -532,11 +532,11 @@ def get_encoding_from_headers(headers):
if 'text' in content_type:
return 'ISO-8859-1'
- if 'application/json' in content_type:
- # Assume UTF-8 based on RFC 4627: https://www.ietf.org/rfc/rfc4627.txt since the charset was unset
- return 'utf-8'
+ if 'application/json' in content_type:
+ # Assume UTF-8 based on RFC 4627: https://www.ietf.org/rfc/rfc4627.txt since the charset was unset
+ return 'utf-8'
+
-
def stream_decode_response_unicode(iterator, r):
"""Stream decodes a iterator."""
@@ -741,8 +741,8 @@ def should_bypass_proxies(url, no_proxy):
:rtype: bool
"""
- # Prioritize lowercase environment variables over uppercase
- # to keep a consistent behaviour with other http projects (curl, wget).
+ # Prioritize lowercase environment variables over uppercase
+ # to keep a consistent behaviour with other http projects (curl, wget).
get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper())
# First check whether no_proxy is defined. If it is, check that the URL
@@ -750,43 +750,43 @@ def should_bypass_proxies(url, no_proxy):
no_proxy_arg = no_proxy
if no_proxy is None:
no_proxy = get_proxy('no_proxy')
- parsed = urlparse(url)
+ parsed = urlparse(url)
+
+ if parsed.hostname is None:
+ # URLs don't always have hostnames, e.g. file:/// urls.
+ return True
- if parsed.hostname is None:
- # URLs don't always have hostnames, e.g. file:/// urls.
- return True
-
if no_proxy:
# We need to check whether we match here. We need to see if we match
- # the end of the hostname, both with and without the port.
+ # the end of the hostname, both with and without the port.
no_proxy = (
host for host in no_proxy.replace(' ', '').split(',') if host
)
- if is_ipv4_address(parsed.hostname):
+ if is_ipv4_address(parsed.hostname):
for proxy_ip in no_proxy:
if is_valid_cidr(proxy_ip):
- if address_in_network(parsed.hostname, proxy_ip):
+ if address_in_network(parsed.hostname, proxy_ip):
return True
- elif parsed.hostname == proxy_ip:
+ elif parsed.hostname == proxy_ip:
# If no_proxy ip was defined in plain IP notation instead of cidr notation &
# matches the IP of the index
return True
else:
- host_with_port = parsed.hostname
- if parsed.port:
- host_with_port += ':{}'.format(parsed.port)
-
+ host_with_port = parsed.hostname
+ if parsed.port:
+ host_with_port += ':{}'.format(parsed.port)
+
for host in no_proxy:
- if parsed.hostname.endswith(host) or host_with_port.endswith(host):
+ if parsed.hostname.endswith(host) or host_with_port.endswith(host):
# The URL does match something in no_proxy, so we don't want
# to apply the proxies on this URL.
return True
with set_environ('no_proxy', no_proxy_arg):
- # parsed.hostname can be `None` in cases such as a file URI.
+ # parsed.hostname can be `None` in cases such as a file URI.
try:
- bypass = proxy_bypass(parsed.hostname)
+ bypass = proxy_bypass(parsed.hostname)
except (TypeError, socket.gaierror):
bypass = False
@@ -834,33 +834,33 @@ def select_proxy(url, proxies):
return proxy
-def resolve_proxies(request, proxies, trust_env=True):
- """This method takes proxy information from a request and configuration
- input to resolve a mapping of target proxies. This will consider settings
- such a NO_PROXY to strip proxy configurations.
-
- :param request: Request or PreparedRequest
- :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs
- :param trust_env: Boolean declaring whether to trust environment configs
-
- :rtype: dict
- """
- proxies = proxies if proxies is not None else {}
- url = request.url
- scheme = urlparse(url).scheme
- no_proxy = proxies.get('no_proxy')
- new_proxies = proxies.copy()
-
- if trust_env and not should_bypass_proxies(url, no_proxy=no_proxy):
- environ_proxies = get_environ_proxies(url, no_proxy=no_proxy)
-
- proxy = environ_proxies.get(scheme, environ_proxies.get('all'))
-
- if proxy:
- new_proxies.setdefault(scheme, proxy)
- return new_proxies
-
-
+def resolve_proxies(request, proxies, trust_env=True):
+ """This method takes proxy information from a request and configuration
+ input to resolve a mapping of target proxies. This will consider settings
+ such a NO_PROXY to strip proxy configurations.
+
+ :param request: Request or PreparedRequest
+ :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs
+ :param trust_env: Boolean declaring whether to trust environment configs
+
+ :rtype: dict
+ """
+ proxies = proxies if proxies is not None else {}
+ url = request.url
+ scheme = urlparse(url).scheme
+ no_proxy = proxies.get('no_proxy')
+ new_proxies = proxies.copy()
+
+ if trust_env and not should_bypass_proxies(url, no_proxy=no_proxy):
+ environ_proxies = get_environ_proxies(url, no_proxy=no_proxy)
+
+ proxy = environ_proxies.get(scheme, environ_proxies.get('all'))
+
+ if proxy:
+ new_proxies.setdefault(scheme, proxy)
+ return new_proxies
+
+
def default_user_agent(name="python-requests"):
"""
Return a string representing the default user agent.
@@ -876,14 +876,14 @@ def default_headers():
"""
return CaseInsensitiveDict({
'User-Agent': default_user_agent(),
- 'Accept-Encoding': DEFAULT_ACCEPT_ENCODING,
+ 'Accept-Encoding': DEFAULT_ACCEPT_ENCODING,
'Accept': '*/*',
'Connection': 'keep-alive',
})
def parse_header_links(value):
- """Return a list of parsed link headers proxies.
+ """Return a list of parsed link headers proxies.
i.e. Link: <http:/.../front.jpeg>; rel=front; type="image/jpeg",<http://.../back.jpeg>; rel=back;type="image/jpeg"
@@ -894,10 +894,10 @@ def parse_header_links(value):
replace_chars = ' \'"'
- value = value.strip(replace_chars)
- if not value:
- return links
-
+ value = value.strip(replace_chars)
+ if not value:
+ return links
+
for val in re.split(', *<', value):
try:
url, params = val.split(';', 1)
@@ -963,29 +963,29 @@ def prepend_scheme_if_needed(url, new_scheme):
:rtype: str
"""
- parsed = parse_url(url)
- scheme, auth, host, port, path, query, fragment = parsed
-
- # A defect in urlparse determines that there isn't a netloc present in some
- # urls. We previously assumed parsing was overly cautious, and swapped the
- # netloc and path. Due to a lack of tests on the original defect, this is
- # maintained with parse_url for backwards compatibility.
- netloc = parsed.netloc
+ parsed = parse_url(url)
+ scheme, auth, host, port, path, query, fragment = parsed
+
+ # A defect in urlparse determines that there isn't a netloc present in some
+ # urls. We previously assumed parsing was overly cautious, and swapped the
+ # netloc and path. Due to a lack of tests on the original defect, this is
+ # maintained with parse_url for backwards compatibility.
+ netloc = parsed.netloc
if not netloc:
netloc, path = path, netloc
- if auth:
- # parse_url doesn't provide the netloc with auth
- # so we'll add it ourselves.
- netloc = '@'.join([auth, netloc])
- if scheme is None:
- scheme = new_scheme
- if path is None:
- path = ''
+ if auth:
+ # parse_url doesn't provide the netloc with auth
+ # so we'll add it ourselves.
+ netloc = '@'.join([auth, netloc])
+ if scheme is None:
+ scheme = new_scheme
+ if path is None:
+ path = ''
+
+ return urlunparse((scheme, netloc, path, '', query, fragment))
- return urlunparse((scheme, netloc, path, '', query, fragment))
-
def get_auth_from_url(url):
"""Given a url with authentication components, extract them into a tuple of
username,password.
diff --git a/contrib/python/requests/ya.make b/contrib/python/requests/ya.make
index 5ee38f1112..f971752d75 100644
--- a/contrib/python/requests/ya.make
+++ b/contrib/python/requests/ya.make
@@ -1,29 +1,29 @@
-# Generated by devtools/yamaker (pypi).
-
+# Generated by devtools/yamaker (pypi).
+
PY23_LIBRARY()
OWNER(g:python-contrib)
-VERSION(2.27.1)
+VERSION(2.27.1)
+
+LICENSE(Apache-2.0)
-LICENSE(Apache-2.0)
-
PEERDIR(
contrib/python/certifi
contrib/python/idna
contrib/python/urllib3
)
-IF (PYTHON2)
- PEERDIR(
- contrib/python/chardet
- )
-ELSE()
- PEERDIR(
- contrib/python/charset-normalizer
- )
-ENDIF()
-
+IF (PYTHON2)
+ PEERDIR(
+ contrib/python/chardet
+ )
+ELSE()
+ PEERDIR(
+ contrib/python/charset-normalizer
+ )
+ENDIF()
+
NO_LINT()
PY_SRCS(
@@ -48,10 +48,10 @@ PY_SRCS(
requests/utils.py
)
-RESOURCE_FILES(
- PREFIX contrib/python/requests/
- .dist-info/METADATA
- .dist-info/top_level.txt
-)
-
+RESOURCE_FILES(
+ PREFIX contrib/python/requests/
+ .dist-info/METADATA
+ .dist-info/top_level.txt
+)
+
END()