aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/deprecated/python
diff options
context:
space:
mode:
authornkozlovskiy <nmk@ydb.tech>2023-10-02 18:57:38 +0300
committernkozlovskiy <nmk@ydb.tech>2023-10-02 19:39:06 +0300
commit6295ef4d23465c11296e898b9dc4524ad9592b5d (patch)
treefc0c852877b2c52f365a1f6ed0710955844338c2 /contrib/deprecated/python
parentde63c80b75948ecc13894854514d147840ff8430 (diff)
downloadydb-6295ef4d23465c11296e898b9dc4524ad9592b5d.tar.gz
oss ydb: fix dstool building and test run
Diffstat (limited to 'contrib/deprecated/python')
-rw-r--r--contrib/deprecated/python/backports-abc/.dist-info/METADATA106
-rw-r--r--contrib/deprecated/python/backports-abc/.dist-info/top_level.txt1
-rw-r--r--contrib/deprecated/python/backports-abc/LICENSE254
-rw-r--r--contrib/deprecated/python/backports-abc/README.rst46
-rw-r--r--contrib/deprecated/python/backports-abc/backports_abc.py216
-rw-r--r--contrib/deprecated/python/backports-abc/ya.make22
-rw-r--r--contrib/deprecated/python/backports.functools-lru-cache/.dist-info/METADATA81
-rw-r--r--contrib/deprecated/python/backports.functools-lru-cache/.dist-info/top_level.txt1
-rw-r--r--contrib/deprecated/python/backports.functools-lru-cache/LICENSE17
-rw-r--r--contrib/deprecated/python/backports.functools-lru-cache/README.rst48
-rw-r--r--contrib/deprecated/python/backports.functools-lru-cache/backports/functools_lru_cache.py195
-rw-r--r--contrib/deprecated/python/backports.functools-lru-cache/ya.make22
-rw-r--r--contrib/deprecated/python/backports.shutil-get-terminal-size/.dist-info/METADATA44
-rw-r--r--contrib/deprecated/python/backports.shutil-get-terminal-size/.dist-info/top_level.txt1
-rw-r--r--contrib/deprecated/python/backports.shutil-get-terminal-size/LICENSE22
-rw-r--r--contrib/deprecated/python/backports.shutil-get-terminal-size/README.rst18
-rw-r--r--contrib/deprecated/python/backports.shutil-get-terminal-size/backports/shutil_get_terminal_size/__init__.py11
-rw-r--r--contrib/deprecated/python/backports.shutil-get-terminal-size/backports/shutil_get_terminal_size/get_terminal_size.py101
-rw-r--r--contrib/deprecated/python/backports.shutil-get-terminal-size/ya.make23
-rw-r--r--contrib/deprecated/python/configparser/.dist-info/METADATA259
-rw-r--r--contrib/deprecated/python/configparser/.dist-info/top_level.txt2
-rw-r--r--contrib/deprecated/python/configparser/LICENSE7
-rw-r--r--contrib/deprecated/python/configparser/README.rst229
-rw-r--r--contrib/deprecated/python/configparser/backports/configparser/__init__.py1473
-rw-r--r--contrib/deprecated/python/configparser/backports/configparser/helpers.py274
-rw-r--r--contrib/deprecated/python/configparser/configparser.py61
-rw-r--r--contrib/deprecated/python/configparser/ya.make24
-rw-r--r--contrib/deprecated/python/enum34/.dist-info/METADATA62
-rw-r--r--contrib/deprecated/python/enum34/.dist-info/top_level.txt1
-rw-r--r--contrib/deprecated/python/enum34/README3
-rw-r--r--contrib/deprecated/python/enum34/enum/LICENSE32
-rw-r--r--contrib/deprecated/python/enum34/enum/README3
-rw-r--r--contrib/deprecated/python/enum34/enum/__init__.py852
-rw-r--r--contrib/deprecated/python/enum34/enum/test.py1867
-rw-r--r--contrib/deprecated/python/enum34/tests/ya.make15
-rw-r--r--contrib/deprecated/python/enum34/ya.make26
-rw-r--r--contrib/deprecated/python/faulthandler/.dist-info/METADATA78
-rw-r--r--contrib/deprecated/python/faulthandler/.dist-info/top_level.txt1
-rw-r--r--contrib/deprecated/python/faulthandler/AUTHORS14
-rw-r--r--contrib/deprecated/python/faulthandler/COPYING25
-rw-r--r--contrib/deprecated/python/faulthandler/README.rst57
-rw-r--r--contrib/deprecated/python/faulthandler/faulthandler.c1415
-rw-r--r--contrib/deprecated/python/faulthandler/traceback.c326
-rw-r--r--contrib/deprecated/python/faulthandler/ya.make30
-rw-r--r--contrib/deprecated/python/futures/.dist-info/METADATA57
-rw-r--r--contrib/deprecated/python/futures/.dist-info/top_level.txt1
-rw-r--r--contrib/deprecated/python/futures/LICENSE48
-rw-r--r--contrib/deprecated/python/futures/README.rst36
-rw-r--r--contrib/deprecated/python/futures/concurrent/__init__.py3
-rw-r--r--contrib/deprecated/python/futures/concurrent/futures/__init__.py23
-rw-r--r--contrib/deprecated/python/futures/concurrent/futures/_base.py673
-rw-r--r--contrib/deprecated/python/futures/concurrent/futures/process.py363
-rw-r--r--contrib/deprecated/python/futures/concurrent/futures/thread.py203
-rw-r--r--contrib/deprecated/python/futures/ya.make26
-rw-r--r--contrib/deprecated/python/ipaddress/.dist-info/METADATA25
-rw-r--r--contrib/deprecated/python/ipaddress/.dist-info/top_level.txt1
-rw-r--r--contrib/deprecated/python/ipaddress/LICENSE50
-rw-r--r--contrib/deprecated/python/ipaddress/README.md28
-rw-r--r--contrib/deprecated/python/ipaddress/ipaddress.py2420
-rw-r--r--contrib/deprecated/python/ipaddress/ya.make22
-rw-r--r--contrib/deprecated/python/scandir/.dist-info/METADATA238
-rw-r--r--contrib/deprecated/python/scandir/.dist-info/top_level.txt2
-rw-r--r--contrib/deprecated/python/scandir/LICENSE.txt27
-rw-r--r--contrib/deprecated/python/scandir/README.rst210
-rw-r--r--contrib/deprecated/python/scandir/_scandir.c1834
-rw-r--r--contrib/deprecated/python/scandir/osdefs.h48
-rw-r--r--contrib/deprecated/python/scandir/scandir.py693
-rw-r--r--contrib/deprecated/python/scandir/tests/test_scandir.py336
-rw-r--r--contrib/deprecated/python/scandir/tests/test_walk.py213
-rw-r--r--contrib/deprecated/python/scandir/tests/ya.make18
-rw-r--r--contrib/deprecated/python/scandir/winreparse.h53
-rw-r--r--contrib/deprecated/python/scandir/ya.make36
-rw-r--r--contrib/deprecated/python/singledispatch/.dist-info/METADATA91
-rw-r--r--contrib/deprecated/python/singledispatch/.dist-info/top_level.txt1
-rw-r--r--contrib/deprecated/python/singledispatch/LICENSE19
-rw-r--r--contrib/deprecated/python/singledispatch/README.rst46
-rw-r--r--contrib/deprecated/python/singledispatch/singledispatch/__init__.py300
-rw-r--r--contrib/deprecated/python/singledispatch/singledispatch/helpers.py217
-rw-r--r--contrib/deprecated/python/singledispatch/ya.make27
-rw-r--r--contrib/deprecated/python/subprocess32/ChangeLog185
-rw-r--r--contrib/deprecated/python/subprocess32/LICENSE283
-rw-r--r--contrib/deprecated/python/subprocess32/README.md50
-rw-r--r--contrib/deprecated/python/subprocess32/_posixsubprocess.c927
-rw-r--r--contrib/deprecated/python/subprocess32/_posixsubprocess_config.h115
-rw-r--r--contrib/deprecated/python/subprocess32/_posixsubprocess_helpers.c174
-rw-r--r--contrib/deprecated/python/subprocess32/subprocess32.py1752
-rw-r--r--contrib/deprecated/python/subprocess32/test_subprocess32.py2485
-rw-r--r--contrib/deprecated/python/subprocess32/testdata/fd_status.py34
-rw-r--r--contrib/deprecated/python/subprocess32/testdata/input_reader.py7
-rw-r--r--contrib/deprecated/python/subprocess32/testdata/qcat.py7
-rw-r--r--contrib/deprecated/python/subprocess32/testdata/qgrep.py10
-rw-r--r--contrib/deprecated/python/subprocess32/testdata/sigchild_ignore.py18
-rw-r--r--contrib/deprecated/python/subprocess32/testdata/ya.make19
-rw-r--r--contrib/deprecated/python/subprocess32/ya.make35
-rw-r--r--contrib/deprecated/python/typing/.dist-info/METADATA50
-rw-r--r--contrib/deprecated/python/typing/.dist-info/top_level.txt1
-rw-r--r--contrib/deprecated/python/typing/LICENSE254
-rw-r--r--contrib/deprecated/python/typing/test/mod_generics_cache.py14
-rw-r--r--contrib/deprecated/python/typing/test/test_typing.py2706
-rw-r--r--contrib/deprecated/python/typing/test/ya.make14
-rw-r--r--contrib/deprecated/python/typing/typing.py2550
-rw-r--r--contrib/deprecated/python/typing/ya.make30
-rw-r--r--contrib/deprecated/python/win-unicode-console/.dist-info/METADATA133
-rw-r--r--contrib/deprecated/python/win-unicode-console/.dist-info/top_level.txt2
-rw-r--r--contrib/deprecated/python/win-unicode-console/README.rst109
-rw-r--r--contrib/deprecated/python/win-unicode-console/win_unicode_console/__init__.py54
-rw-r--r--contrib/deprecated/python/win-unicode-console/win_unicode_console/buffer.py54
-rw-r--r--contrib/deprecated/python/win-unicode-console/win_unicode_console/console.py106
-rw-r--r--contrib/deprecated/python/win-unicode-console/win_unicode_console/file_object.py55
-rw-r--r--contrib/deprecated/python/win-unicode-console/win_unicode_console/info.py17
-rw-r--r--contrib/deprecated/python/win-unicode-console/win_unicode_console/raw_input.py125
-rw-r--r--contrib/deprecated/python/win-unicode-console/win_unicode_console/readline_hook.py149
-rw-r--r--contrib/deprecated/python/win-unicode-console/win_unicode_console/runner.py199
-rw-r--r--contrib/deprecated/python/win-unicode-console/win_unicode_console/streams.py337
-rw-r--r--contrib/deprecated/python/win-unicode-console/win_unicode_console/tokenize_open.py162
-rw-r--r--contrib/deprecated/python/win-unicode-console/win_unicode_console/unicode_argv.py79
-rw-r--r--contrib/deprecated/python/win-unicode-console/ya.make40
117 files changed, 30094 insertions, 0 deletions
diff --git a/contrib/deprecated/python/backports-abc/.dist-info/METADATA b/contrib/deprecated/python/backports-abc/.dist-info/METADATA
new file mode 100644
index 0000000000..40e0734c37
--- /dev/null
+++ b/contrib/deprecated/python/backports-abc/.dist-info/METADATA
@@ -0,0 +1,106 @@
+Metadata-Version: 2.0
+Name: backports-abc
+Version: 0.5
+Summary: A backport of recent additions to the 'collections.abc' module.
+Home-page: https://github.com/cython/backports_abc
+Author: Stefan Behnel et al.
+Author-email: cython-devel@python.org
+License: UNKNOWN
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Python Software Foundation License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 3
+
+=============
+ABC-Backports
+=============
+
+Usage:
+
+.. code-block:: python
+
+ try:
+ # ABCs live in "collections.abc" in Python >= 3.3
+ from collections.abc import Coroutine, Generator
+ except ImportError:
+ # fall back to import from "backports_abc"
+ from backports_abc import Coroutine, Generator
+
+You can also install the ABCs into the stdlib by calling the ``patch()``
+function:
+
+.. code-block:: python
+
+ import backports_abc
+ backports_abc.patch()
+
+ try:
+ # ABCs live in "collections.abc" in Python >= 3.3
+ from collections.abc import Coroutine, Generator
+ except ImportError:
+ # fall back to import from "collections" in Python <= 3.2
+ from backports_abc import Coroutine, Generator
+
+Currently, ``patch()`` provides the following names if missing:
+
+* ``collections.abc.Generator``
+* ``collections.abc.Awaitable``
+* ``collections.abc.Coroutine``
+* ``inspect.isawaitable(obj)``
+
+All of them are also available directly from the ``backports_abc``
+module namespace.
+
+In Python 2.x and Python 3.2, it patches the ``collections`` module
+instead of the ``collections.abc`` module. Any names that are already
+available when importing this module will not be overwritten.
+
+The names that were previously patched by ``patch()`` can be queried
+through the mapping in ``backports_abc.PATCHED``.
+
+Changelog
+=========
+
+0.5 (2016-11-12)
+----------------
+
+* support old-style (mro-missing) classes
+
+0.4 (2015-09-14)
+----------------
+
+* direct wheel building support
+
+* make all names available at the module level instead of requiring patching
+
+
+0.3 (2015-07-03)
+----------------
+
+* removed patching of ``inspect.iscoroutine()`` as it is not ABC based
+
+
+0.2 (2015-07-03)
+----------------
+
+* require explicit ``backports_abc.patch()`` call to do the patching
+ (avoids side-effects on import and allows future configuration)
+
+* provide access to patched names through global ``PATCHED`` dict
+
+* add ABC based implementations of inspect.iscoroutine() and
+ inspect.isawaitable()
+
+
+0.1 (2015-06-24)
+----------------
+
+* initial public release
+
+* provided ABCs: Generator, Coroutine, Awaitable
+
+
diff --git a/contrib/deprecated/python/backports-abc/.dist-info/top_level.txt b/contrib/deprecated/python/backports-abc/.dist-info/top_level.txt
new file mode 100644
index 0000000000..2fff0c3c21
--- /dev/null
+++ b/contrib/deprecated/python/backports-abc/.dist-info/top_level.txt
@@ -0,0 +1 @@
+backports_abc
diff --git a/contrib/deprecated/python/backports-abc/LICENSE b/contrib/deprecated/python/backports-abc/LICENSE
new file mode 100644
index 0000000000..88251f5b6e
--- /dev/null
+++ b/contrib/deprecated/python/backports-abc/LICENSE
@@ -0,0 +1,254 @@
+A. HISTORY OF THE SOFTWARE
+==========================
+
+Python was created in the early 1990s by Guido van Rossum at Stichting
+Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands
+as a successor of a language called ABC. Guido remains Python's
+principal author, although it includes many contributions from others.
+
+In 1995, Guido continued his work on Python at the Corporation for
+National Research Initiatives (CNRI, see http://www.cnri.reston.va.us)
+in Reston, Virginia where he released several versions of the
+software.
+
+In May 2000, Guido and the Python core development team moved to
+BeOpen.com to form the BeOpen PythonLabs team. In October of the same
+year, the PythonLabs team moved to Digital Creations (now Zope
+Corporation, see http://www.zope.com). In 2001, the Python Software
+Foundation (PSF, see http://www.python.org/psf/) was formed, a
+non-profit organization created specifically to own Python-related
+Intellectual Property. Zope Corporation is a sponsoring member of
+the PSF.
+
+All Python releases are Open Source (see http://www.opensource.org for
+the Open Source Definition). Historically, most, but not all, Python
+releases have also been GPL-compatible; the table below summarizes
+the various releases.
+
+ Release Derived Year Owner GPL-
+ from compatible? (1)
+
+ 0.9.0 thru 1.2 1991-1995 CWI yes
+ 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes
+ 1.6 1.5.2 2000 CNRI no
+ 2.0 1.6 2000 BeOpen.com no
+ 1.6.1 1.6 2001 CNRI yes (2)
+ 2.1 2.0+1.6.1 2001 PSF no
+ 2.0.1 2.0+1.6.1 2001 PSF yes
+ 2.1.1 2.1+2.0.1 2001 PSF yes
+ 2.1.2 2.1.1 2002 PSF yes
+ 2.1.3 2.1.2 2002 PSF yes
+ 2.2 and above 2.1.1 2001-now PSF yes
+
+Footnotes:
+
+(1) GPL-compatible doesn't mean that we're distributing Python under
+ the GPL. All Python licenses, unlike the GPL, let you distribute
+ a modified version without making your changes open source. The
+ GPL-compatible licenses make it possible to combine Python with
+ other software that is released under the GPL; the others don't.
+
+(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
+ because its license has a choice of law clause. According to
+ CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
+ is "not incompatible" with the GPL.
+
+Thanks to the many outside volunteers who have worked under Guido's
+direction to make these releases possible.
+
+
+B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
+===============================================================
+
+PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
+--------------------------------------------
+
+1. This LICENSE AGREEMENT is between the Python Software Foundation
+("PSF"), and the Individual or Organization ("Licensee") accessing and
+otherwise using this software ("Python") in source or binary form and
+its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, PSF hereby
+grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
+analyze, test, perform and/or display publicly, prepare derivative works,
+distribute, and otherwise use Python alone or in any derivative version,
+provided, however, that PSF's License Agreement and PSF's notice of copyright,
+i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+2011, 2012, 2013, 2014, 2015 Python Software Foundation; All Rights Reserved"
+are retained in Python alone or in any derivative version prepared by Licensee.
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python.
+
+4. PSF is making Python available to Licensee on an "AS IS"
+basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. Nothing in this License Agreement shall be deemed to create any
+relationship of agency, partnership, or joint venture between PSF and
+Licensee. This License Agreement does not grant permission to use PSF
+trademarks or trade name in a trademark sense to endorse or promote
+products or services of Licensee, or any third party.
+
+8. By copying, installing or otherwise using Python, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
+
+BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
+-------------------------------------------
+
+BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
+
+1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
+office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
+Individual or Organization ("Licensee") accessing and otherwise using
+this software in source or binary form and its associated
+documentation ("the Software").
+
+2. Subject to the terms and conditions of this BeOpen Python License
+Agreement, BeOpen hereby grants Licensee a non-exclusive,
+royalty-free, world-wide license to reproduce, analyze, test, perform
+and/or display publicly, prepare derivative works, distribute, and
+otherwise use the Software alone or in any derivative version,
+provided, however, that the BeOpen Python License is retained in the
+Software, alone or in any derivative version prepared by Licensee.
+
+3. BeOpen is making the Software available to Licensee on an "AS IS"
+basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
+SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
+AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
+DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+5. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+6. This License Agreement shall be governed by and interpreted in all
+respects by the law of the State of California, excluding conflict of
+law provisions. Nothing in this License Agreement shall be deemed to
+create any relationship of agency, partnership, or joint venture
+between BeOpen and Licensee. This License Agreement does not grant
+permission to use BeOpen trademarks or trade names in a trademark
+sense to endorse or promote products or services of Licensee, or any
+third party. As an exception, the "BeOpen Python" logos available at
+http://www.pythonlabs.com/logos.html may be used according to the
+permissions granted on that web page.
+
+7. By copying, installing or otherwise using the software, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
+
+CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
+---------------------------------------
+
+1. This LICENSE AGREEMENT is between the Corporation for National
+Research Initiatives, having an office at 1895 Preston White Drive,
+Reston, VA 20191 ("CNRI"), and the Individual or Organization
+("Licensee") accessing and otherwise using Python 1.6.1 software in
+source or binary form and its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, CNRI
+hereby grants Licensee a nonexclusive, royalty-free, world-wide
+license to reproduce, analyze, test, perform and/or display publicly,
+prepare derivative works, distribute, and otherwise use Python 1.6.1
+alone or in any derivative version, provided, however, that CNRI's
+License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
+1995-2001 Corporation for National Research Initiatives; All Rights
+Reserved" are retained in Python 1.6.1 alone or in any derivative
+version prepared by Licensee. Alternately, in lieu of CNRI's License
+Agreement, Licensee may substitute the following text (omitting the
+quotes): "Python 1.6.1 is made available subject to the terms and
+conditions in CNRI's License Agreement. This Agreement together with
+Python 1.6.1 may be located on the Internet using the following
+unique, persistent identifier (known as a handle): 1895.22/1013. This
+Agreement may also be obtained from a proxy server on the Internet
+using the following URL: http://hdl.handle.net/1895.22/1013".
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python 1.6.1 or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python 1.6.1.
+
+4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
+basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. This License Agreement shall be governed by the federal
+intellectual property law of the United States, including without
+limitation the federal copyright law, and, to the extent such
+U.S. federal law does not apply, by the law of the Commonwealth of
+Virginia, excluding Virginia's conflict of law provisions.
+Notwithstanding the foregoing, with regard to derivative works based
+on Python 1.6.1 that incorporate non-separable material that was
+previously distributed under the GNU General Public License (GPL), the
+law of the Commonwealth of Virginia shall govern this License
+Agreement only as to issues arising under or with respect to
+Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this
+License Agreement shall be deemed to create any relationship of
+agency, partnership, or joint venture between CNRI and Licensee. This
+License Agreement does not grant permission to use CNRI trademarks or
+trade name in a trademark sense to endorse or promote products or
+services of Licensee, or any third party.
+
+8. By clicking on the "ACCEPT" button where indicated, or by copying,
+installing or otherwise using Python 1.6.1, Licensee agrees to be
+bound by the terms and conditions of this License Agreement.
+
+ ACCEPT
+
+
+CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
+--------------------------------------------------
+
+Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
+The Netherlands. All rights reserved.
+
+Permission to use, copy, modify, and distribute this software and its
+documentation for any purpose and without fee is hereby granted,
+provided that the above copyright notice appear in all copies and that
+both that copyright notice and this permission notice appear in
+supporting documentation, and that the name of Stichting Mathematisch
+Centrum or CWI not be used in advertising or publicity pertaining to
+distribution of the software without specific, written prior
+permission.
+
+STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
+THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
+FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
+OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/contrib/deprecated/python/backports-abc/README.rst b/contrib/deprecated/python/backports-abc/README.rst
new file mode 100644
index 0000000000..06cf94aefd
--- /dev/null
+++ b/contrib/deprecated/python/backports-abc/README.rst
@@ -0,0 +1,46 @@
+=============
+ABC-Backports
+=============
+
+Usage:
+
+.. code-block:: python
+
+ try:
+ # ABCs live in "collections.abc" in Python >= 3.3
+ from collections.abc import Coroutine, Generator
+ except ImportError:
+ # fall back to import from "backports_abc"
+ from backports_abc import Coroutine, Generator
+
+You can also install the ABCs into the stdlib by calling the ``patch()``
+function:
+
+.. code-block:: python
+
+ import backports_abc
+ backports_abc.patch()
+
+ try:
+ # ABCs live in "collections.abc" in Python >= 3.3
+ from collections.abc import Coroutine, Generator
+ except ImportError:
+ # fall back to import from "collections" in Python <= 3.2
+ from backports_abc import Coroutine, Generator
+
+Currently, ``patch()`` provides the following names if missing:
+
+* ``collections.abc.Generator``
+* ``collections.abc.Awaitable``
+* ``collections.abc.Coroutine``
+* ``inspect.isawaitable(obj)``
+
+All of them are also available directly from the ``backports_abc``
+module namespace.
+
+In Python 2.x and Python 3.2, it patches the ``collections`` module
+instead of the ``collections.abc`` module. Any names that are already
+available when importing this module will not be overwritten.
+
+The names that were previously patched by ``patch()`` can be queried
+through the mapping in ``backports_abc.PATCHED``.
diff --git a/contrib/deprecated/python/backports-abc/backports_abc.py b/contrib/deprecated/python/backports-abc/backports_abc.py
new file mode 100644
index 0000000000..da4cb32983
--- /dev/null
+++ b/contrib/deprecated/python/backports-abc/backports_abc.py
@@ -0,0 +1,216 @@
+"""
+Patch recently added ABCs into the standard lib module
+``collections.abc`` (Py3) or ``collections`` (Py2).
+
+Usage::
+
+ import backports_abc
+ backports_abc.patch()
+
+or::
+
+ try:
+ from collections.abc import Generator
+ except ImportError:
+ from backports_abc import Generator
+"""
+
+try:
+ import collections.abc as _collections_abc
+except ImportError:
+ import collections as _collections_abc
+
+
+def get_mro(cls):
+ try:
+ return cls.__mro__
+ except AttributeError:
+ return old_style_mro(cls)
+
+
+def old_style_mro(cls):
+ yield cls
+ for base in cls.__bases__:
+ for c in old_style_mro(base):
+ yield c
+
+
+def mk_gen():
+ from abc import abstractmethod
+
+ required_methods = (
+ '__iter__', '__next__' if hasattr(iter(()), '__next__') else 'next',
+ 'send', 'throw', 'close')
+
+ class Generator(_collections_abc.Iterator):
+ __slots__ = ()
+
+ if '__next__' in required_methods:
+ def __next__(self):
+ return self.send(None)
+ else:
+ def next(self):
+ return self.send(None)
+
+ @abstractmethod
+ def send(self, value):
+ raise StopIteration
+
+ @abstractmethod
+ def throw(self, typ, val=None, tb=None):
+ if val is None:
+ if tb is None:
+ raise typ
+ val = typ()
+ if tb is not None:
+ val = val.with_traceback(tb)
+ raise val
+
+ def close(self):
+ try:
+ self.throw(GeneratorExit)
+ except (GeneratorExit, StopIteration):
+ pass
+ else:
+ raise RuntimeError('generator ignored GeneratorExit')
+
+ @classmethod
+ def __subclasshook__(cls, C):
+ if cls is Generator:
+ mro = get_mro(C)
+ for method in required_methods:
+ for base in mro:
+ if method in base.__dict__:
+ break
+ else:
+ return NotImplemented
+ return True
+ return NotImplemented
+
+ generator = type((lambda: (yield))())
+ Generator.register(generator)
+ return Generator
+
+
+def mk_awaitable():
+ from abc import abstractmethod, ABCMeta
+
+ @abstractmethod
+ def __await__(self):
+ yield
+
+ @classmethod
+ def __subclasshook__(cls, C):
+ if cls is Awaitable:
+ for B in get_mro(C):
+ if '__await__' in B.__dict__:
+ if B.__dict__['__await__']:
+ return True
+ break
+ return NotImplemented
+
+ # calling metaclass directly as syntax differs in Py2/Py3
+ Awaitable = ABCMeta('Awaitable', (), {
+ '__slots__': (),
+ '__await__': __await__,
+ '__subclasshook__': __subclasshook__,
+ })
+
+ return Awaitable
+
+
+def mk_coroutine():
+ from abc import abstractmethod
+
+ class Coroutine(Awaitable):
+ __slots__ = ()
+
+ @abstractmethod
+ def send(self, value):
+ """Send a value into the coroutine.
+ Return next yielded value or raise StopIteration.
+ """
+ raise StopIteration
+
+ @abstractmethod
+ def throw(self, typ, val=None, tb=None):
+ """Raise an exception in the coroutine.
+ Return next yielded value or raise StopIteration.
+ """
+ if val is None:
+ if tb is None:
+ raise typ
+ val = typ()
+ if tb is not None:
+ val = val.with_traceback(tb)
+ raise val
+
+ def close(self):
+ """Raise GeneratorExit inside coroutine.
+ """
+ try:
+ self.throw(GeneratorExit)
+ except (GeneratorExit, StopIteration):
+ pass
+ else:
+ raise RuntimeError('coroutine ignored GeneratorExit')
+
+ @classmethod
+ def __subclasshook__(cls, C):
+ if cls is Coroutine:
+ mro = get_mro(C)
+ for method in ('__await__', 'send', 'throw', 'close'):
+ for base in mro:
+ if method in base.__dict__:
+ break
+ else:
+ return NotImplemented
+ return True
+ return NotImplemented
+
+ return Coroutine
+
+
+###
+# make all ABCs available in this module
+
+try:
+ Generator = _collections_abc.Generator
+except AttributeError:
+ Generator = mk_gen()
+
+try:
+ Awaitable = _collections_abc.Awaitable
+except AttributeError:
+ Awaitable = mk_awaitable()
+
+try:
+ Coroutine = _collections_abc.Coroutine
+except AttributeError:
+ Coroutine = mk_coroutine()
+
+try:
+ from inspect import isawaitable
+except ImportError:
+ def isawaitable(obj):
+ return isinstance(obj, Awaitable)
+
+
+###
+# allow patching the stdlib
+
+PATCHED = {}
+
+
+def patch(patch_inspect=True):
+ """
+ Main entry point for patching the ``collections.abc`` and ``inspect``
+ standard library modules.
+ """
+ PATCHED['collections.abc.Generator'] = _collections_abc.Generator = Generator
+ PATCHED['collections.abc.Coroutine'] = _collections_abc.Coroutine = Coroutine
+ PATCHED['collections.abc.Awaitable'] = _collections_abc.Awaitable = Awaitable
+
+ if patch_inspect:
+ import inspect
+ PATCHED['inspect.isawaitable'] = inspect.isawaitable = isawaitable
diff --git a/contrib/deprecated/python/backports-abc/ya.make b/contrib/deprecated/python/backports-abc/ya.make
new file mode 100644
index 0000000000..cc0330f80d
--- /dev/null
+++ b/contrib/deprecated/python/backports-abc/ya.make
@@ -0,0 +1,22 @@
+# Generated by devtools/yamaker (pypi).
+
+PY2_LIBRARY()
+
+VERSION(0.5)
+
+LICENSE(PSF-2.0)
+
+NO_LINT()
+
+PY_SRCS(
+ TOP_LEVEL
+ backports_abc.py
+)
+
+RESOURCE_FILES(
+ PREFIX contrib/deprecated/python/backports-abc/
+ .dist-info/METADATA
+ .dist-info/top_level.txt
+)
+
+END()
diff --git a/contrib/deprecated/python/backports.functools-lru-cache/.dist-info/METADATA b/contrib/deprecated/python/backports.functools-lru-cache/.dist-info/METADATA
new file mode 100644
index 0000000000..49450fe4d0
--- /dev/null
+++ b/contrib/deprecated/python/backports.functools-lru-cache/.dist-info/METADATA
@@ -0,0 +1,81 @@
+Metadata-Version: 2.1
+Name: backports.functools-lru-cache
+Version: 1.6.6
+Summary: Backport of functools.lru_cache
+Home-page: https://github.com/jaraco/backports.functools_lru_cache
+Author: Raymond Hettinger
+Author-email: raymond.hettinger@gmail.com
+Maintainer: Jason R. Coombs
+Maintainer-email: jaraco@jaraco.com
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Requires-Python: >=2.6
+License-File: LICENSE
+Provides-Extra: docs
+Requires-Dist: sphinx (>=3.5) ; extra == 'docs'
+Requires-Dist: jaraco.packaging (>=9.3) ; extra == 'docs'
+Requires-Dist: rst.linker (>=1.9) ; extra == 'docs'
+Requires-Dist: furo ; extra == 'docs'
+Requires-Dist: sphinx-lint ; extra == 'docs'
+Requires-Dist: jaraco.tidelift (>=1.4) ; extra == 'docs'
+Provides-Extra: testing
+Requires-Dist: pytest (>=6) ; extra == 'testing'
+Requires-Dist: pytest-checkdocs (>=2.4) ; extra == 'testing'
+Requires-Dist: pytest-cov ; extra == 'testing'
+Requires-Dist: pytest-enabler (>=2.2) ; extra == 'testing'
+Requires-Dist: pytest-ruff ; extra == 'testing'
+Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing'
+Requires-Dist: pytest-mypy (>=0.9.1) ; (platform_python_implementation != "PyPy") and extra == 'testing'
+
+.. image:: https://img.shields.io/pypi/v/backports.functools_lru_cache.svg
+ :target: https://pypi.org/project/backports.functools_lru_cache
+
+.. image:: https://img.shields.io/pypi/pyversions/backports.functools_lru_cache.svg
+
+.. image:: https://github.com/jaraco/backports.functools_lru_cache/workflows/tests/badge.svg
+ :target: https://github.com/jaraco/backports.functools_lru_cache/actions?query=workflow%3A%22tests%22
+ :alt: tests
+
+.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
+ :target: https://github.com/astral-sh/ruff
+ :alt: Ruff
+
+.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+ :target: https://github.com/psf/black
+ :alt: Code style: Black
+
+.. image:: https://readthedocs.org/projects/backportsfunctools_lru_cache/badge/?version=latest
+ :target: https://backportsfunctools_lru_cache.readthedocs.io/en/latest/?badge=latest
+
+.. image:: https://img.shields.io/badge/skeleton-2023-informational
+ :target: https://blog.jaraco.com/skeleton
+
+.. image:: https://tidelift.com/badges/package/pypi/backports.functools_lru_cache
+ :target: https://tidelift.com/subscription/pkg/pypi-backports.functools_lru_cache?utm_source=pypi-backports.functools_lru_cache&utm_medium=readme
+
+Backport of functools.lru_cache from Python 3.3 as published at `ActiveState
+<http://code.activestate.com/recipes/578078/>`_.
+
+Usage
+=====
+
+Consider using this technique for importing the 'lru_cache' function::
+
+ try:
+ from functools import lru_cache
+ except ImportError:
+ from backports.functools_lru_cache import lru_cache
+
+
+For Enterprise
+==============
+
+Available as part of the Tidelift Subscription.
+
+This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use.
+
+`Learn more <https://tidelift.com/subscription/pkg/pypi-backports.functools_lru_cache?utm_source=pypi-backports.functools_lru_cache&utm_medium=referral&utm_campaign=github>`_.
diff --git a/contrib/deprecated/python/backports.functools-lru-cache/.dist-info/top_level.txt b/contrib/deprecated/python/backports.functools-lru-cache/.dist-info/top_level.txt
new file mode 100644
index 0000000000..99d2be5b64
--- /dev/null
+++ b/contrib/deprecated/python/backports.functools-lru-cache/.dist-info/top_level.txt
@@ -0,0 +1 @@
+backports
diff --git a/contrib/deprecated/python/backports.functools-lru-cache/LICENSE b/contrib/deprecated/python/backports.functools-lru-cache/LICENSE
new file mode 100644
index 0000000000..1bb5a44356
--- /dev/null
+++ b/contrib/deprecated/python/backports.functools-lru-cache/LICENSE
@@ -0,0 +1,17 @@
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
diff --git a/contrib/deprecated/python/backports.functools-lru-cache/README.rst b/contrib/deprecated/python/backports.functools-lru-cache/README.rst
new file mode 100644
index 0000000000..713bef76f7
--- /dev/null
+++ b/contrib/deprecated/python/backports.functools-lru-cache/README.rst
@@ -0,0 +1,48 @@
+.. image:: https://img.shields.io/pypi/v/backports.functools_lru_cache.svg
+ :target: https://pypi.org/project/backports.functools_lru_cache
+
+.. image:: https://img.shields.io/pypi/pyversions/backports.functools_lru_cache.svg
+
+.. image:: https://github.com/jaraco/backports.functools_lru_cache/workflows/tests/badge.svg
+ :target: https://github.com/jaraco/backports.functools_lru_cache/actions?query=workflow%3A%22tests%22
+ :alt: tests
+
+.. image:: https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/charliermarsh/ruff/main/assets/badge/v2.json
+ :target: https://github.com/astral-sh/ruff
+ :alt: Ruff
+
+.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+ :target: https://github.com/psf/black
+ :alt: Code style: Black
+
+.. image:: https://readthedocs.org/projects/backportsfunctools_lru_cache/badge/?version=latest
+ :target: https://backportsfunctools_lru_cache.readthedocs.io/en/latest/?badge=latest
+
+.. image:: https://img.shields.io/badge/skeleton-2023-informational
+ :target: https://blog.jaraco.com/skeleton
+
+.. image:: https://tidelift.com/badges/package/pypi/backports.functools_lru_cache
+ :target: https://tidelift.com/subscription/pkg/pypi-backports.functools_lru_cache?utm_source=pypi-backports.functools_lru_cache&utm_medium=readme
+
+Backport of functools.lru_cache from Python 3.3 as published at `ActiveState
+<http://code.activestate.com/recipes/578078/>`_.
+
+Usage
+=====
+
+Consider using this technique for importing the 'lru_cache' function::
+
+ try:
+ from functools import lru_cache
+ except ImportError:
+ from backports.functools_lru_cache import lru_cache
+
+
+For Enterprise
+==============
+
+Available as part of the Tidelift Subscription.
+
+This project and the maintainers of thousands of other packages are working with Tidelift to deliver one enterprise subscription that covers all of the open source you use.
+
+`Learn more <https://tidelift.com/subscription/pkg/pypi-backports.functools_lru_cache?utm_source=pypi-backports.functools_lru_cache&utm_medium=referral&utm_campaign=github>`_.
diff --git a/contrib/deprecated/python/backports.functools-lru-cache/backports/functools_lru_cache.py b/contrib/deprecated/python/backports.functools-lru-cache/backports/functools_lru_cache.py
new file mode 100644
index 0000000000..1b83fe995e
--- /dev/null
+++ b/contrib/deprecated/python/backports.functools-lru-cache/backports/functools_lru_cache.py
@@ -0,0 +1,195 @@
+from __future__ import absolute_import
+
+import functools
+from collections import namedtuple
+from threading import RLock
+
+_CacheInfo = namedtuple("_CacheInfo", ["hits", "misses", "maxsize", "currsize"])
+
+
+@functools.wraps(functools.update_wrapper)
+def update_wrapper(
+ wrapper,
+ wrapped,
+ assigned=functools.WRAPPER_ASSIGNMENTS,
+ updated=functools.WRAPPER_UPDATES,
+):
+ """
+ Patch two bugs in functools.update_wrapper.
+ """
+ # workaround for http://bugs.python.org/issue3445
+ assigned = tuple(attr for attr in assigned if hasattr(wrapped, attr))
+ wrapper = functools.update_wrapper(wrapper, wrapped, assigned, updated)
+ # workaround for https://bugs.python.org/issue17482
+ wrapper.__wrapped__ = wrapped
+ return wrapper
+
+
+class _HashedSeq(list):
+ __slots__ = 'hashvalue'
+
+ def __init__(self, tup, hash=hash):
+ self[:] = tup
+ self.hashvalue = hash(tup)
+
+ def __hash__(self):
+ return self.hashvalue
+
+
+def _make_key(
+ args,
+ kwds,
+ typed,
+ kwd_mark=(object(),),
+ fasttypes=set([int, str, frozenset, type(None)]),
+ sorted=sorted,
+ tuple=tuple,
+ type=type,
+ len=len,
+):
+ 'Make a cache key from optionally typed positional and keyword arguments'
+ key = args
+ if kwds:
+ sorted_items = sorted(kwds.items())
+ key += kwd_mark
+ for item in sorted_items:
+ key += item
+ if typed:
+ key += tuple(type(v) for v in args)
+ if kwds:
+ key += tuple(type(v) for k, v in sorted_items)
+ elif len(key) == 1 and type(key[0]) in fasttypes:
+ return key[0]
+ return _HashedSeq(key)
+
+
+def lru_cache(maxsize=100, typed=False): # noqa: C901
+ """Least-recently-used cache decorator.
+
+ If *maxsize* is set to None, the LRU features are disabled and the cache
+ can grow without bound.
+
+ If *typed* is True, arguments of different types will be cached separately.
+ For example, f(3.0) and f(3) will be treated as distinct calls with
+ distinct results.
+
+ Arguments to the cached function must be hashable.
+
+ View the cache statistics named tuple (hits, misses, maxsize, currsize) with
+ f.cache_info(). Clear the cache and statistics with f.cache_clear().
+ Access the underlying function with f.__wrapped__.
+
+ See: http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used
+
+ """
+
+ # Users should only access the lru_cache through its public API:
+ # cache_info, cache_clear, and f.__wrapped__
+ # The internals of the lru_cache are encapsulated for thread safety and
+ # to allow the implementation to change (including a possible C version).
+
+ def decorating_function(user_function):
+ cache = dict()
+ stats = [0, 0] # make statistics updateable non-locally
+ HITS, MISSES = 0, 1 # names for the stats fields
+ make_key = _make_key
+ cache_get = cache.get # bound method to lookup key or return None
+ _len = len # localize the global len() function
+ lock = RLock() # because linkedlist updates aren't threadsafe
+ root = [] # root of the circular doubly linked list
+ root[:] = [root, root, None, None] # initialize by pointing to self
+ nonlocal_root = [root] # make updateable non-locally
+ PREV, NEXT, KEY, RESULT = 0, 1, 2, 3 # names for the link fields
+
+ if maxsize == 0:
+
+ def wrapper(*args, **kwds):
+ # no caching, just do a statistics update after a successful call
+ result = user_function(*args, **kwds)
+ stats[MISSES] += 1
+ return result
+
+ elif maxsize is None:
+
+ def wrapper(*args, **kwds):
+ # simple caching without ordering or size limit
+ key = make_key(args, kwds, typed)
+ result = cache_get(
+ key, root
+ ) # root used here as a unique not-found sentinel
+ if result is not root:
+ stats[HITS] += 1
+ return result
+ result = user_function(*args, **kwds)
+ cache[key] = result
+ stats[MISSES] += 1
+ return result
+
+ else:
+
+ def wrapper(*args, **kwds):
+ # size limited caching that tracks accesses by recency
+ key = make_key(args, kwds, typed) if kwds or typed else args
+ with lock:
+ link = cache_get(key)
+ if link is not None:
+ # record recent use of the key by moving it
+ # to the front of the list
+ (root,) = nonlocal_root
+ link_prev, link_next, key, result = link
+ link_prev[NEXT] = link_next
+ link_next[PREV] = link_prev
+ last = root[PREV]
+ last[NEXT] = root[PREV] = link
+ link[PREV] = last
+ link[NEXT] = root
+ stats[HITS] += 1
+ return result
+ result = user_function(*args, **kwds)
+ with lock:
+ (root,) = nonlocal_root
+ if key in cache:
+ # getting here means that this same key was added to the
+ # cache while the lock was released. since the link
+ # update is already done, we need only return the
+ # computed result and update the count of misses.
+ pass
+ elif _len(cache) >= maxsize:
+ # use the old root to store the new key and result
+ oldroot = root
+ oldroot[KEY] = key
+ oldroot[RESULT] = result
+ # empty the oldest link and make it the new root
+ root = nonlocal_root[0] = oldroot[NEXT]
+ oldkey = root[KEY]
+ root[KEY] = root[RESULT] = None
+ # now update the cache dictionary for the new links
+ del cache[oldkey]
+ cache[key] = oldroot
+ else:
+ # put result in a new link at the front of the list
+ last = root[PREV]
+ link = [last, root, key, result]
+ last[NEXT] = root[PREV] = cache[key] = link
+ stats[MISSES] += 1
+ return result
+
+ def cache_info():
+ """Report cache statistics"""
+ with lock:
+ return _CacheInfo(stats[HITS], stats[MISSES], maxsize, len(cache))
+
+ def cache_clear():
+ """Clear the cache and cache statistics"""
+ with lock:
+ cache.clear()
+ root = nonlocal_root[0]
+ root[:] = [root, root, None, None]
+ stats[:] = [0, 0]
+
+ wrapper.__wrapped__ = user_function
+ wrapper.cache_info = cache_info
+ wrapper.cache_clear = cache_clear
+ return update_wrapper(wrapper, user_function)
+
+ return decorating_function
diff --git a/contrib/deprecated/python/backports.functools-lru-cache/ya.make b/contrib/deprecated/python/backports.functools-lru-cache/ya.make
new file mode 100644
index 0000000000..40049ffe97
--- /dev/null
+++ b/contrib/deprecated/python/backports.functools-lru-cache/ya.make
@@ -0,0 +1,22 @@
+# Generated by devtools/yamaker (pypi).
+
+PY2_LIBRARY()
+
+VERSION(1.6.6)
+
+LICENSE(MIT)
+
+NO_LINT()
+
+PY_SRCS(
+ TOP_LEVEL
+ backports/functools_lru_cache.py
+)
+
+RESOURCE_FILES(
+ PREFIX contrib/deprecated/python/backports.functools-lru-cache/
+ .dist-info/METADATA
+ .dist-info/top_level.txt
+)
+
+END()
diff --git a/contrib/deprecated/python/backports.shutil-get-terminal-size/.dist-info/METADATA b/contrib/deprecated/python/backports.shutil-get-terminal-size/.dist-info/METADATA
new file mode 100644
index 0000000000..6a1a0e9375
--- /dev/null
+++ b/contrib/deprecated/python/backports.shutil-get-terminal-size/.dist-info/METADATA
@@ -0,0 +1,44 @@
+Metadata-Version: 2.0
+Name: backports.shutil-get-terminal-size
+Version: 1.0.0
+Summary: A backport of the get_terminal_size function from Python 3.3's shutil.
+Home-page: https://github.com/chrippa/backports.shutil_get_terminal_size
+Author: Christopher Rosell
+Author-email: chrippa@tanuki.se
+License: MIT
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3.2
+
+backports.shutil_get_terminal_size
+==================================
+
+A backport of the `get_terminal_size`_ function from Python 3.3's shutil.
+
+Unlike the original version it is written in pure Python rather than C,
+so it might be a tiny bit slower.
+
+.. _get_terminal_size: https://docs.python.org/3/library/shutil.html#shutil.get_terminal_size
+
+
+Example usage
+-------------
+
+ >>> from backports.shutil_get_terminal_size import get_terminal_size
+ >>> get_terminal_size()
+ terminal_size(columns=105, lines=33)
+
+
+
+History
+=======
+
+1.0.0 (2014-08-19)
+------------------
+
+First release.
+
+
diff --git a/contrib/deprecated/python/backports.shutil-get-terminal-size/.dist-info/top_level.txt b/contrib/deprecated/python/backports.shutil-get-terminal-size/.dist-info/top_level.txt
new file mode 100644
index 0000000000..99d2be5b64
--- /dev/null
+++ b/contrib/deprecated/python/backports.shutil-get-terminal-size/.dist-info/top_level.txt
@@ -0,0 +1 @@
+backports
diff --git a/contrib/deprecated/python/backports.shutil-get-terminal-size/LICENSE b/contrib/deprecated/python/backports.shutil-get-terminal-size/LICENSE
new file mode 100644
index 0000000000..d62803cf99
--- /dev/null
+++ b/contrib/deprecated/python/backports.shutil-get-terminal-size/LICENSE
@@ -0,0 +1,22 @@
+The MIT License (MIT)
+
+Copyright (c) 2014 Christopher Rosell
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+
diff --git a/contrib/deprecated/python/backports.shutil-get-terminal-size/README.rst b/contrib/deprecated/python/backports.shutil-get-terminal-size/README.rst
new file mode 100644
index 0000000000..385127b52e
--- /dev/null
+++ b/contrib/deprecated/python/backports.shutil-get-terminal-size/README.rst
@@ -0,0 +1,18 @@
+backports.shutil_get_terminal_size
+==================================
+
+A backport of the `get_terminal_size`_ function from Python 3.3's shutil.
+
+Unlike the original version it is written in pure Python rather than C,
+so it might be a tiny bit slower.
+
+.. _get_terminal_size: https://docs.python.org/3/library/shutil.html#shutil.get_terminal_size
+
+
+Example usage
+-------------
+
+ >>> from backports.shutil_get_terminal_size import get_terminal_size
+ >>> get_terminal_size()
+ terminal_size(columns=105, lines=33)
+
diff --git a/contrib/deprecated/python/backports.shutil-get-terminal-size/backports/shutil_get_terminal_size/__init__.py b/contrib/deprecated/python/backports.shutil-get-terminal-size/backports/shutil_get_terminal_size/__init__.py
new file mode 100644
index 0000000000..cfcbdf6671
--- /dev/null
+++ b/contrib/deprecated/python/backports.shutil-get-terminal-size/backports/shutil_get_terminal_size/__init__.py
@@ -0,0 +1,11 @@
+"""A backport of the get_terminal_size function from Python 3.3's shutil."""
+
+__title__ = "backports.shutil_get_terminal_size"
+__version__ = "1.0.0"
+__license__ = "MIT"
+__author__ = "Christopher Rosell"
+__copyright__ = "Copyright 2014 Christopher Rosell"
+
+__all__ = ["get_terminal_size"]
+
+from .get_terminal_size import get_terminal_size
diff --git a/contrib/deprecated/python/backports.shutil-get-terminal-size/backports/shutil_get_terminal_size/get_terminal_size.py b/contrib/deprecated/python/backports.shutil-get-terminal-size/backports/shutil_get_terminal_size/get_terminal_size.py
new file mode 100644
index 0000000000..bbda2ba122
--- /dev/null
+++ b/contrib/deprecated/python/backports.shutil-get-terminal-size/backports/shutil_get_terminal_size/get_terminal_size.py
@@ -0,0 +1,101 @@
+"""This is a backport of shutil.get_terminal_size from Python 3.3.
+
+The original implementation is in C, but here we use the ctypes and
+fcntl modules to create a pure Python version of os.get_terminal_size.
+"""
+
+import os
+import struct
+import sys
+
+from collections import namedtuple
+
+__all__ = ["get_terminal_size"]
+
+
+terminal_size = namedtuple("terminal_size", "columns lines")
+
+try:
+ from ctypes import windll, create_string_buffer
+
+ _handles = {
+ 0: windll.kernel32.GetStdHandle(-10),
+ 1: windll.kernel32.GetStdHandle(-11),
+ 2: windll.kernel32.GetStdHandle(-12),
+ }
+
+ def _get_terminal_size(fd):
+ columns = lines = 0
+
+ try:
+ handle = _handles[fd]
+ csbi = create_string_buffer(22)
+ res = windll.kernel32.GetConsoleScreenBufferInfo(handle, csbi)
+ if res:
+ res = struct.unpack("hhhhHhhhhhh", csbi.raw)
+ left, top, right, bottom = res[5:9]
+ columns = right - left + 1
+ lines = bottom - top + 1
+ except Exception:
+ pass
+
+ return terminal_size(columns, lines)
+
+except (ImportError, OSError):
+ import fcntl
+ import termios
+
+ def _get_terminal_size(fd):
+ try:
+ res = fcntl.ioctl(fd, termios.TIOCGWINSZ, b"\x00" * 4)
+ lines, columns = struct.unpack("hh", res)
+ except Exception:
+ columns = lines = 0
+
+ return terminal_size(columns, lines)
+
+
+def get_terminal_size(fallback=(80, 24)):
+ """Get the size of the terminal window.
+
+ For each of the two dimensions, the environment variable, COLUMNS
+ and LINES respectively, is checked. If the variable is defined and
+ the value is a positive integer, it is used.
+
+ When COLUMNS or LINES is not defined, which is the common case,
+ the terminal connected to sys.__stdout__ is queried
+ by invoking os.get_terminal_size.
+
+ If the terminal size cannot be successfully queried, either because
+ the system doesn't support querying, or because we are not
+ connected to a terminal, the value given in fallback parameter
+ is used. Fallback defaults to (80, 24) which is the default
+ size used by many terminal emulators.
+
+ The value returned is a named tuple of type os.terminal_size.
+ """
+ # Try the environment first
+ try:
+ columns = int(os.environ["COLUMNS"])
+ except (KeyError, ValueError):
+ columns = 0
+
+ try:
+ lines = int(os.environ["LINES"])
+ except (KeyError, ValueError):
+ lines = 0
+
+ # Only query if necessary
+ if columns <= 0 or lines <= 0:
+ try:
+ size = _get_terminal_size(sys.__stdout__.fileno())
+ except (NameError, OSError):
+ size = terminal_size(*fallback)
+
+ if columns <= 0:
+ columns = size.columns
+ if lines <= 0:
+ lines = size.lines
+
+ return terminal_size(columns, lines)
+
diff --git a/contrib/deprecated/python/backports.shutil-get-terminal-size/ya.make b/contrib/deprecated/python/backports.shutil-get-terminal-size/ya.make
new file mode 100644
index 0000000000..e9b60bde51
--- /dev/null
+++ b/contrib/deprecated/python/backports.shutil-get-terminal-size/ya.make
@@ -0,0 +1,23 @@
+# Generated by devtools/yamaker (pypi).
+
+PY2_LIBRARY()
+
+VERSION(1.0.0)
+
+LICENSE(MIT)
+
+NO_LINT()
+
+PY_SRCS(
+ TOP_LEVEL
+ backports/shutil_get_terminal_size/__init__.py
+ backports/shutil_get_terminal_size/get_terminal_size.py
+)
+
+RESOURCE_FILES(
+ PREFIX contrib/deprecated/python/backports.shutil-get-terminal-size/
+ .dist-info/METADATA
+ .dist-info/top_level.txt
+)
+
+END()
diff --git a/contrib/deprecated/python/configparser/.dist-info/METADATA b/contrib/deprecated/python/configparser/.dist-info/METADATA
new file mode 100644
index 0000000000..e805cc962c
--- /dev/null
+++ b/contrib/deprecated/python/configparser/.dist-info/METADATA
@@ -0,0 +1,259 @@
+Metadata-Version: 2.1
+Name: configparser
+Version: 4.0.2
+Summary: Updated configparser from Python 3.7 for Python 2.6+.
+Home-page: https://github.com/jaraco/configparser/
+Author: Łukasz Langa
+Author-email: lukasz@langa.pl
+Maintainer: Jason R. Coombs
+Maintainer-email: jaraco@jaraco.com
+License: UNKNOWN
+Keywords: configparser ini parsing conf cfg configuration file
+Platform: any
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Requires-Python: >=2.6
+Provides-Extra: docs
+Requires-Dist: sphinx ; extra == 'docs'
+Requires-Dist: jaraco.packaging (>=3.2) ; extra == 'docs'
+Requires-Dist: rst.linker (>=1.9) ; extra == 'docs'
+Provides-Extra: testing
+Requires-Dist: pytest (!=3.7.3,>=3.5) ; extra == 'testing'
+Requires-Dist: pytest-checkdocs (>=1.2) ; extra == 'testing'
+Requires-Dist: pytest-flake8 ; extra == 'testing'
+Requires-Dist: pytest-black-multipy ; extra == 'testing'
+
+.. image:: https://img.shields.io/pypi/v/configparser.svg
+ :target: https://pypi.org/project/configparser
+
+.. image:: https://img.shields.io/pypi/pyversions/configparser.svg
+
+.. image:: https://img.shields.io/travis/jaraco/configparser/master.svg
+ :target: https://travis-ci.org/jaraco/configparser
+
+.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+ :target: https://github.com/ambv/black
+ :alt: Code style: Black
+
+.. .. image:: https://img.shields.io/appveyor/ci/jaraco/configparser/master.svg
+.. :target: https://ci.appveyor.com/project/jaraco/configparser/branch/master
+
+.. image:: https://readthedocs.org/projects/configparser/badge/?version=latest
+ :target: https://configparser.readthedocs.io/en/latest/?badge=latest
+
+.. image:: https://tidelift.com/badges/package/pypi/configparser
+ :target: https://tidelift.com/subscription/pkg/pypi-configparser?utm_source=pypi-configparser&utm_medium=readme
+
+
+The ancient ``ConfigParser`` module available in the standard library 2.x has
+seen a major update in Python 3.2. This is a backport of those changes so that
+they can be used directly in Python 2.6 - 3.5.
+
+To use the ``configparser`` backport instead of the built-in version on both
+Python 2 and Python 3, simply import it explicitly as a backport::
+
+ from backports import configparser
+
+If you'd like to use the backport on Python 2 and the built-in version on
+Python 3, use that invocation instead::
+
+ import configparser
+
+For detailed documentation consult the vanilla version at
+http://docs.python.org/3/library/configparser.html.
+
+Why you'll love ``configparser``
+--------------------------------
+
+Whereas almost completely compatible with its older brother, ``configparser``
+sports a bunch of interesting new features:
+
+* full mapping protocol access (`more info
+ <http://docs.python.org/3/library/configparser.html#mapping-protocol-access>`_)::
+
+ >>> parser = ConfigParser()
+ >>> parser.read_string("""
+ [DEFAULT]
+ location = upper left
+ visible = yes
+ editable = no
+ color = blue
+
+ [main]
+ title = Main Menu
+ color = green
+
+ [options]
+ title = Options
+ """)
+ >>> parser['main']['color']
+ 'green'
+ >>> parser['main']['editable']
+ 'no'
+ >>> section = parser['options']
+ >>> section['title']
+ 'Options'
+ >>> section['title'] = 'Options (editable: %(editable)s)'
+ >>> section['title']
+ 'Options (editable: no)'
+
+* there's now one default ``ConfigParser`` class, which basically is the old
+ ``SafeConfigParser`` with a bunch of tweaks which make it more predictable for
+ users. Don't need interpolation? Simply use
+ ``ConfigParser(interpolation=None)``, no need to use a distinct
+ ``RawConfigParser`` anymore.
+
+* the parser is highly `customizable upon instantiation
+ <http://docs.python.org/3/library/configparser.html#customizing-parser-behaviour>`__
+ supporting things like changing option delimiters, comment characters, the
+ name of the DEFAULT section, the interpolation syntax, etc.
+
+* you can easily create your own interpolation syntax but there are two powerful
+ implementations built-in (`more info
+ <http://docs.python.org/3/library/configparser.html#interpolation-of-values>`__):
+
+ * the classic ``%(string-like)s`` syntax (called ``BasicInterpolation``)
+
+ * a new ``${buildout:like}`` syntax (called ``ExtendedInterpolation``)
+
+* fallback values may be specified in getters (`more info
+ <http://docs.python.org/3/library/configparser.html#fallback-values>`__)::
+
+ >>> config.get('closet', 'monster',
+ ... fallback='No such things as monsters')
+ 'No such things as monsters'
+
+* ``ConfigParser`` objects can now read data directly `from strings
+ <http://docs.python.org/3/library/configparser.html#configparser.ConfigParser.read_string>`__
+ and `from dictionaries
+ <http://docs.python.org/3/library/configparser.html#configparser.ConfigParser.read_dict>`__.
+ That means importing configuration from JSON or specifying default values for
+ the whole configuration (multiple sections) is now a single line of code. Same
+ goes for copying data from another ``ConfigParser`` instance, thanks to its
+ mapping protocol support.
+
+* many smaller tweaks, updates and fixes
+
+A few words about Unicode
+-------------------------
+
+``configparser`` comes from Python 3 and as such it works well with Unicode.
+The library is generally cleaned up in terms of internal data storage and
+reading/writing files. There are a couple of incompatibilities with the old
+``ConfigParser`` due to that. However, the work required to migrate is well
+worth it as it shows the issues that would likely come up during migration of
+your project to Python 3.
+
+The design assumes that Unicode strings are used whenever possible [1]_. That
+gives you the certainty that what's stored in a configuration object is text.
+Once your configuration is read, the rest of your application doesn't have to
+deal with encoding issues. All you have is text [2]_. The only two phases when
+you should explicitly state encoding is when you either read from an external
+source (e.g. a file) or write back.
+
+Versioning
+----------
+
+This project uses `semver <https://semver.org/spec/v2.0.0.html>`_ to
+communicate the impact of various releases while periodically syncing
+with the upstream implementation in CPython.
+`The changelog <https://github.com/jaraco/configparser/blob/master/CHANGES.rst>`_
+serves as a reference indicating which versions incorporate
+which upstream functionality.
+
+Prior to the ``4.0.0`` release, `another scheme
+<https://github.com/jaraco/configparser/blob/3.8.1/README.rst#versioning>`_
+was used to associate the CPython and backports releases.
+
+Maintenance
+-----------
+
+This backport was originally authored by Łukasz Langa, the current vanilla
+``configparser`` maintainer for CPython and is currently maintained by
+Jason R. Coombs:
+
+* `configparser repository <https://github.com/jaraco/configparser>`_
+
+* `configparser issue tracker <https://github.com/jaraco/configparser/issues>`_
+
+Security Contact
+----------------
+
+To report a security vulnerability, please use the
+`Tidelift security contact <https://tidelift.com/security>`_.
+Tidelift will coordinate the fix and disclosure.
+
+Conversion Process
+------------------
+
+This section is technical and should bother you only if you are wondering how
+this backport is produced. If the implementation details of this backport are
+not important for you, feel free to ignore the following content.
+
+``configparser`` is converted using `python-future
+<http://python-future.org>`_. The project takes the following
+branching approach:
+
+* the ``3.x`` branch holds unchanged files synchronized from the upstream
+ CPython repository. The synchronization is currently done by manually copying
+ the required files and stating from which CPython changeset they come from.
+
+* the ``master`` branch holds a version of the ``3.x`` code with some tweaks
+ that make it independent from libraries and constructions unavailable on 2.x.
+ Code on this branch still *must* work on the corresponding Python 3.x but
+ will also work on Python 2.6 and 2.7 (including PyPy). You can check this
+ running the supplied unit tests with ``tox``.
+
+The process works like this:
+
+1. In the ``3.x`` branch, run ``pip-run -- sync-upstream.py``, which
+ downloads the latest stable release of Python and copies the relevant
+ files from there into their new locations here and then commits those
+ changes with a nice reference to the relevant upstream commit hash.
+
+2. I check for new names in ``__all__`` and update imports in
+ ``configparser.py`` accordingly. I run the tests on Python 3. Commit.
+
+3. I merge the new commit to ``master``. I run ``tox``. Commit.
+
+4. If there are necessary changes, I do them now (on ``master``). Note that
+ the changes should be written in the syntax subset supported by Python
+ 2.6.
+
+5. I run ``tox``. If it works, I update the docs and release the new version.
+ Otherwise, I go back to point 3. I might use ``pasteurize`` to suggest me
+ required changes but usually I do them manually to keep resulting code in
+ a nicer form.
+
+
+Footnotes
+---------
+
+.. [1] To somewhat ease migration, passing bytestrings is still supported but
+ they are converted to Unicode for internal storage anyway. This means
+ that for the vast majority of strings used in configuration files, it
+ won't matter if you pass them as bytestrings or Unicode. However, if you
+ pass a bytestring that cannot be converted to Unicode using the naive
+ ASCII codec, a ``UnicodeDecodeError`` will be raised. This is purposeful
+ and helps you manage proper encoding for all content you store in
+ memory, read from various sources and write back.
+
+.. [2] Life gets much easier when you understand that you basically manage
+ **text** in your application. You don't care about bytes but about
+ letters. In that regard the concept of content encoding is meaningless.
+ The only time when you deal with raw bytes is when you write the data to
+ a file. Then you have to specify how your text should be encoded. On
+ the other end, to get meaningful text from a file, the application
+ reading it has to know which encoding was used during its creation. But
+ once the bytes are read and properly decoded, all you have is text. This
+ is especially powerful when you start interacting with multiple data
+ sources. Even if each of them uses a different encoding, inside your
+ application data is held in abstract text form. You can program your
+ business logic without worrying about which data came from which source.
+ You can freely exchange the data you store between sources. Only
+ reading/writing files requires encoding your text to bytes.
+
+
diff --git a/contrib/deprecated/python/configparser/.dist-info/top_level.txt b/contrib/deprecated/python/configparser/.dist-info/top_level.txt
new file mode 100644
index 0000000000..a6cb03ad92
--- /dev/null
+++ b/contrib/deprecated/python/configparser/.dist-info/top_level.txt
@@ -0,0 +1,2 @@
+backports
+configparser
diff --git a/contrib/deprecated/python/configparser/LICENSE b/contrib/deprecated/python/configparser/LICENSE
new file mode 100644
index 0000000000..5e795a61f3
--- /dev/null
+++ b/contrib/deprecated/python/configparser/LICENSE
@@ -0,0 +1,7 @@
+Copyright Jason R. Coombs
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/contrib/deprecated/python/configparser/README.rst b/contrib/deprecated/python/configparser/README.rst
new file mode 100644
index 0000000000..cfd4859091
--- /dev/null
+++ b/contrib/deprecated/python/configparser/README.rst
@@ -0,0 +1,229 @@
+.. image:: https://img.shields.io/pypi/v/configparser.svg
+ :target: https://pypi.org/project/configparser
+
+.. image:: https://img.shields.io/pypi/pyversions/configparser.svg
+
+.. image:: https://img.shields.io/travis/jaraco/configparser/master.svg
+ :target: https://travis-ci.org/jaraco/configparser
+
+.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+ :target: https://github.com/ambv/black
+ :alt: Code style: Black
+
+.. .. image:: https://img.shields.io/appveyor/ci/jaraco/configparser/master.svg
+.. :target: https://ci.appveyor.com/project/jaraco/configparser/branch/master
+
+.. image:: https://readthedocs.org/projects/configparser/badge/?version=latest
+ :target: https://configparser.readthedocs.io/en/latest/?badge=latest
+
+.. image:: https://tidelift.com/badges/package/pypi/configparser
+ :target: https://tidelift.com/subscription/pkg/pypi-configparser?utm_source=pypi-configparser&utm_medium=readme
+
+
+The ancient ``ConfigParser`` module available in the standard library 2.x has
+seen a major update in Python 3.2. This is a backport of those changes so that
+they can be used directly in Python 2.6 - 3.5.
+
+To use the ``configparser`` backport instead of the built-in version on both
+Python 2 and Python 3, simply import it explicitly as a backport::
+
+ from backports import configparser
+
+If you'd like to use the backport on Python 2 and the built-in version on
+Python 3, use that invocation instead::
+
+ import configparser
+
+For detailed documentation consult the vanilla version at
+http://docs.python.org/3/library/configparser.html.
+
+Why you'll love ``configparser``
+--------------------------------
+
+Whereas almost completely compatible with its older brother, ``configparser``
+sports a bunch of interesting new features:
+
+* full mapping protocol access (`more info
+ <http://docs.python.org/3/library/configparser.html#mapping-protocol-access>`_)::
+
+ >>> parser = ConfigParser()
+ >>> parser.read_string("""
+ [DEFAULT]
+ location = upper left
+ visible = yes
+ editable = no
+ color = blue
+
+ [main]
+ title = Main Menu
+ color = green
+
+ [options]
+ title = Options
+ """)
+ >>> parser['main']['color']
+ 'green'
+ >>> parser['main']['editable']
+ 'no'
+ >>> section = parser['options']
+ >>> section['title']
+ 'Options'
+ >>> section['title'] = 'Options (editable: %(editable)s)'
+ >>> section['title']
+ 'Options (editable: no)'
+
+* there's now one default ``ConfigParser`` class, which basically is the old
+ ``SafeConfigParser`` with a bunch of tweaks which make it more predictable for
+ users. Don't need interpolation? Simply use
+ ``ConfigParser(interpolation=None)``, no need to use a distinct
+ ``RawConfigParser`` anymore.
+
+* the parser is highly `customizable upon instantiation
+ <http://docs.python.org/3/library/configparser.html#customizing-parser-behaviour>`__
+ supporting things like changing option delimiters, comment characters, the
+ name of the DEFAULT section, the interpolation syntax, etc.
+
+* you can easily create your own interpolation syntax but there are two powerful
+ implementations built-in (`more info
+ <http://docs.python.org/3/library/configparser.html#interpolation-of-values>`__):
+
+ * the classic ``%(string-like)s`` syntax (called ``BasicInterpolation``)
+
+ * a new ``${buildout:like}`` syntax (called ``ExtendedInterpolation``)
+
+* fallback values may be specified in getters (`more info
+ <http://docs.python.org/3/library/configparser.html#fallback-values>`__)::
+
+ >>> config.get('closet', 'monster',
+ ... fallback='No such things as monsters')
+ 'No such things as monsters'
+
+* ``ConfigParser`` objects can now read data directly `from strings
+ <http://docs.python.org/3/library/configparser.html#configparser.ConfigParser.read_string>`__
+ and `from dictionaries
+ <http://docs.python.org/3/library/configparser.html#configparser.ConfigParser.read_dict>`__.
+ That means importing configuration from JSON or specifying default values for
+ the whole configuration (multiple sections) is now a single line of code. Same
+ goes for copying data from another ``ConfigParser`` instance, thanks to its
+ mapping protocol support.
+
+* many smaller tweaks, updates and fixes
+
+A few words about Unicode
+-------------------------
+
+``configparser`` comes from Python 3 and as such it works well with Unicode.
+The library is generally cleaned up in terms of internal data storage and
+reading/writing files. There are a couple of incompatibilities with the old
+``ConfigParser`` due to that. However, the work required to migrate is well
+worth it as it shows the issues that would likely come up during migration of
+your project to Python 3.
+
+The design assumes that Unicode strings are used whenever possible [1]_. That
+gives you the certainty that what's stored in a configuration object is text.
+Once your configuration is read, the rest of your application doesn't have to
+deal with encoding issues. All you have is text [2]_. The only two phases when
+you should explicitly state encoding is when you either read from an external
+source (e.g. a file) or write back.
+
+Versioning
+----------
+
+This project uses `semver <https://semver.org/spec/v2.0.0.html>`_ to
+communicate the impact of various releases while periodically syncing
+with the upstream implementation in CPython.
+`The changelog <https://github.com/jaraco/configparser/blob/master/CHANGES.rst>`_
+serves as a reference indicating which versions incorporate
+which upstream functionality.
+
+Prior to the ``4.0.0`` release, `another scheme
+<https://github.com/jaraco/configparser/blob/3.8.1/README.rst#versioning>`_
+was used to associate the CPython and backports releases.
+
+Maintenance
+-----------
+
+This backport was originally authored by Łukasz Langa, the current vanilla
+``configparser`` maintainer for CPython and is currently maintained by
+Jason R. Coombs:
+
+* `configparser repository <https://github.com/jaraco/configparser>`_
+
+* `configparser issue tracker <https://github.com/jaraco/configparser/issues>`_
+
+Security Contact
+----------------
+
+To report a security vulnerability, please use the
+`Tidelift security contact <https://tidelift.com/security>`_.
+Tidelift will coordinate the fix and disclosure.
+
+Conversion Process
+------------------
+
+This section is technical and should bother you only if you are wondering how
+this backport is produced. If the implementation details of this backport are
+not important for you, feel free to ignore the following content.
+
+``configparser`` is converted using `python-future
+<http://python-future.org>`_. The project takes the following
+branching approach:
+
+* the ``3.x`` branch holds unchanged files synchronized from the upstream
+ CPython repository. The synchronization is currently done by manually copying
+ the required files and stating from which CPython changeset they come from.
+
+* the ``master`` branch holds a version of the ``3.x`` code with some tweaks
+ that make it independent from libraries and constructions unavailable on 2.x.
+ Code on this branch still *must* work on the corresponding Python 3.x but
+ will also work on Python 2.6 and 2.7 (including PyPy). You can check this
+ running the supplied unit tests with ``tox``.
+
+The process works like this:
+
+1. In the ``3.x`` branch, run ``pip-run -- sync-upstream.py``, which
+ downloads the latest stable release of Python and copies the relevant
+ files from there into their new locations here and then commits those
+ changes with a nice reference to the relevant upstream commit hash.
+
+2. I check for new names in ``__all__`` and update imports in
+ ``configparser.py`` accordingly. I run the tests on Python 3. Commit.
+
+3. I merge the new commit to ``master``. I run ``tox``. Commit.
+
+4. If there are necessary changes, I do them now (on ``master``). Note that
+ the changes should be written in the syntax subset supported by Python
+ 2.6.
+
+5. I run ``tox``. If it works, I update the docs and release the new version.
+ Otherwise, I go back to point 3. I might use ``pasteurize`` to suggest me
+ required changes but usually I do them manually to keep resulting code in
+ a nicer form.
+
+
+Footnotes
+---------
+
+.. [1] To somewhat ease migration, passing bytestrings is still supported but
+ they are converted to Unicode for internal storage anyway. This means
+ that for the vast majority of strings used in configuration files, it
+ won't matter if you pass them as bytestrings or Unicode. However, if you
+ pass a bytestring that cannot be converted to Unicode using the naive
+ ASCII codec, a ``UnicodeDecodeError`` will be raised. This is purposeful
+ and helps you manage proper encoding for all content you store in
+ memory, read from various sources and write back.
+
+.. [2] Life gets much easier when you understand that you basically manage
+ **text** in your application. You don't care about bytes but about
+ letters. In that regard the concept of content encoding is meaningless.
+ The only time when you deal with raw bytes is when you write the data to
+ a file. Then you have to specify how your text should be encoded. On
+ the other end, to get meaningful text from a file, the application
+ reading it has to know which encoding was used during its creation. But
+ once the bytes are read and properly decoded, all you have is text. This
+ is especially powerful when you start interacting with multiple data
+ sources. Even if each of them uses a different encoding, inside your
+ application data is held in abstract text form. You can program your
+ business logic without worrying about which data came from which source.
+ You can freely exchange the data you store between sources. Only
+ reading/writing files requires encoding your text to bytes.
diff --git a/contrib/deprecated/python/configparser/backports/configparser/__init__.py b/contrib/deprecated/python/configparser/backports/configparser/__init__.py
new file mode 100644
index 0000000000..603d604764
--- /dev/null
+++ b/contrib/deprecated/python/configparser/backports/configparser/__init__.py
@@ -0,0 +1,1473 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# flake8: noqa
+
+"""Configuration file parser.
+
+A configuration file consists of sections, lead by a "[section]" header,
+and followed by "name: value" entries, with continuations and such in
+the style of RFC 822.
+
+Intrinsic defaults can be specified by passing them into the
+ConfigParser constructor as a dictionary.
+
+class:
+
+ConfigParser -- responsible for parsing a list of
+ configuration files, and managing the parsed database.
+
+ methods:
+
+ __init__(defaults=None, dict_type=_default_dict, allow_no_value=False,
+ delimiters=('=', ':'), comment_prefixes=('#', ';'),
+ inline_comment_prefixes=None, strict=True,
+ empty_lines_in_values=True, default_section='DEFAULT',
+ interpolation=<unset>, converters=<unset>):
+ Create the parser. When `defaults' is given, it is initialized into the
+ dictionary or intrinsic defaults. The keys must be strings, the values
+ must be appropriate for %()s string interpolation.
+
+ When `dict_type' is given, it will be used to create the dictionary
+ objects for the list of sections, for the options within a section, and
+ for the default values.
+
+ When `delimiters' is given, it will be used as the set of substrings
+ that divide keys from values.
+
+ When `comment_prefixes' is given, it will be used as the set of
+ substrings that prefix comments in empty lines. Comments can be
+ indented.
+
+ When `inline_comment_prefixes' is given, it will be used as the set of
+ substrings that prefix comments in non-empty lines.
+
+ When `strict` is True, the parser won't allow for any section or option
+ duplicates while reading from a single source (file, string or
+ dictionary). Default is True.
+
+ When `empty_lines_in_values' is False (default: True), each empty line
+ marks the end of an option. Otherwise, internal empty lines of
+ a multiline option are kept as part of the value.
+
+ When `allow_no_value' is True (default: False), options without
+ values are accepted; the value presented for these is None.
+
+ When `default_section' is given, the name of the special section is
+ named accordingly. By default it is called ``"DEFAULT"`` but this can
+ be customized to point to any other valid section name. Its current
+ value can be retrieved using the ``parser_instance.default_section``
+ attribute and may be modified at runtime.
+
+ When `interpolation` is given, it should be an Interpolation subclass
+ instance. It will be used as the handler for option value
+ pre-processing when using getters. RawConfigParser objects don't do
+ any sort of interpolation, whereas ConfigParser uses an instance of
+ BasicInterpolation. The library also provides a ``zc.buildbot``
+ inspired ExtendedInterpolation implementation.
+
+ When `converters` is given, it should be a dictionary where each key
+ represents the name of a type converter and each value is a callable
+ implementing the conversion from string to the desired datatype. Every
+ converter gets its corresponding get*() method on the parser object and
+ section proxies.
+
+ sections()
+ Return all the configuration section names, sans DEFAULT.
+
+ has_section(section)
+ Return whether the given section exists.
+
+ has_option(section, option)
+ Return whether the given option exists in the given section.
+
+ options(section)
+ Return list of configuration options for the named section.
+
+ read(filenames, encoding=None)
+ Read and parse the iterable of named configuration files, given by
+ name. A single filename is also allowed. Non-existing files
+ are ignored. Return list of successfully read files.
+
+ read_file(f, filename=None)
+ Read and parse one configuration file, given as a file object.
+ The filename defaults to f.name; it is only used in error
+ messages (if f has no `name' attribute, the string `<???>' is used).
+
+ read_string(string)
+ Read configuration from a given string.
+
+ read_dict(dictionary)
+ Read configuration from a dictionary. Keys are section names,
+ values are dictionaries with keys and values that should be present
+ in the section. If the used dictionary type preserves order, sections
+ and their keys will be added in order. Values are automatically
+ converted to strings.
+
+ get(section, option, raw=False, vars=None, fallback=_UNSET)
+ Return a string value for the named option. All % interpolations are
+ expanded in the return values, based on the defaults passed into the
+ constructor and the DEFAULT section. Additional substitutions may be
+ provided using the `vars' argument, which must be a dictionary whose
+ contents override any pre-existing defaults. If `option' is a key in
+ `vars', the value from `vars' is used.
+
+ getint(section, options, raw=False, vars=None, fallback=_UNSET)
+ Like get(), but convert value to an integer.
+
+ getfloat(section, options, raw=False, vars=None, fallback=_UNSET)
+ Like get(), but convert value to a float.
+
+ getboolean(section, options, raw=False, vars=None, fallback=_UNSET)
+ Like get(), but convert value to a boolean (currently case
+ insensitively defined as 0, false, no, off for False, and 1, true,
+ yes, on for True). Returns False or True.
+
+ items(section=_UNSET, raw=False, vars=None)
+ If section is given, return a list of tuples with (name, value) for
+ each option in the section. Otherwise, return a list of tuples with
+ (section_name, section_proxy) for each section, including DEFAULTSECT.
+
+ remove_section(section)
+ Remove the given file section and all its options.
+
+ remove_option(section, option)
+ Remove the given option from the given section.
+
+ set(section, option, value)
+ Set the given option.
+
+ write(fp, space_around_delimiters=True)
+ Write the configuration state in .ini format. If
+ `space_around_delimiters' is True (the default), delimiters
+ between keys and values are surrounded by spaces.
+"""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+try:
+ from collections.abc import MutableMapping
+except ImportError:
+ from collections import MutableMapping
+import functools
+import io
+import itertools
+import os
+import re
+import sys
+import warnings
+
+from backports.configparser.helpers import OrderedDict as _default_dict
+from backports.configparser.helpers import ChainMap as _ChainMap
+from backports.configparser.helpers import from_none, open, str, PY2
+from backports.configparser.helpers import PathLike, fspath
+from backports.configparser.helpers import MutableMapping
+
+__all__ = [
+ "NoSectionError",
+ "DuplicateOptionError",
+ "DuplicateSectionError",
+ "NoOptionError",
+ "InterpolationError",
+ "InterpolationDepthError",
+ "InterpolationMissingOptionError",
+ "InterpolationSyntaxError",
+ "ParsingError",
+ "MissingSectionHeaderError",
+ "ConfigParser",
+ "SafeConfigParser",
+ "RawConfigParser",
+ "Interpolation",
+ "BasicInterpolation",
+ "ExtendedInterpolation",
+ "LegacyInterpolation",
+ "SectionProxy",
+ "ConverterMapping",
+ "DEFAULTSECT",
+ "MAX_INTERPOLATION_DEPTH",
+]
+
+DEFAULTSECT = "DEFAULT"
+
+MAX_INTERPOLATION_DEPTH = 10
+
+
+# exception classes
+class Error(Exception):
+ """Base class for ConfigParser exceptions."""
+
+ def __init__(self, msg=''):
+ self.message = msg
+ Exception.__init__(self, msg)
+
+ def __repr__(self):
+ return self.message
+
+ __str__ = __repr__
+
+
+class NoSectionError(Error):
+ """Raised when no section matches a requested option."""
+
+ def __init__(self, section):
+ Error.__init__(self, 'No section: %r' % (section,))
+ self.section = section
+ self.args = (section,)
+
+
+class DuplicateSectionError(Error):
+ """Raised when a section is repeated in an input source.
+
+ Possible repetitions that raise this exception are: multiple creation
+ using the API or in strict parsers when a section is found more than once
+ in a single input file, string or dictionary.
+ """
+
+ def __init__(self, section, source=None, lineno=None):
+ msg = [repr(section), " already exists"]
+ if source is not None:
+ message = ["While reading from ", repr(source)]
+ if lineno is not None:
+ message.append(" [line {0:2d}]".format(lineno))
+ message.append(": section ")
+ message.extend(msg)
+ msg = message
+ else:
+ msg.insert(0, "Section ")
+ Error.__init__(self, "".join(msg))
+ self.section = section
+ self.source = source
+ self.lineno = lineno
+ self.args = (section, source, lineno)
+
+
+class DuplicateOptionError(Error):
+ """Raised by strict parsers when an option is repeated in an input source.
+
+ Current implementation raises this exception only when an option is found
+ more than once in a single file, string or dictionary.
+ """
+
+ def __init__(self, section, option, source=None, lineno=None):
+ msg = [repr(option), " in section ", repr(section), " already exists"]
+ if source is not None:
+ message = ["While reading from ", repr(source)]
+ if lineno is not None:
+ message.append(" [line {0:2d}]".format(lineno))
+ message.append(": option ")
+ message.extend(msg)
+ msg = message
+ else:
+ msg.insert(0, "Option ")
+ Error.__init__(self, "".join(msg))
+ self.section = section
+ self.option = option
+ self.source = source
+ self.lineno = lineno
+ self.args = (section, option, source, lineno)
+
+
+class NoOptionError(Error):
+ """A requested option was not found."""
+
+ def __init__(self, option, section):
+ Error.__init__(self, "No option %r in section: %r" % (option, section))
+ self.option = option
+ self.section = section
+ self.args = (option, section)
+
+
+class InterpolationError(Error):
+ """Base class for interpolation-related exceptions."""
+
+ def __init__(self, option, section, msg):
+ Error.__init__(self, msg)
+ self.option = option
+ self.section = section
+ self.args = (option, section, msg)
+
+
+class InterpolationMissingOptionError(InterpolationError):
+ """A string substitution required a setting which was not available."""
+
+ def __init__(self, option, section, rawval, reference):
+ msg = (
+ "Bad value substitution: option {0!r} in section {1!r} contains "
+ "an interpolation key {2!r} which is not a valid option name. "
+ "Raw value: {3!r}".format(option, section, reference, rawval)
+ )
+ InterpolationError.__init__(self, option, section, msg)
+ self.reference = reference
+ self.args = (option, section, rawval, reference)
+
+
+class InterpolationSyntaxError(InterpolationError):
+ """Raised when the source text contains invalid syntax.
+
+ Current implementation raises this exception when the source text into
+ which substitutions are made does not conform to the required syntax.
+ """
+
+
+class InterpolationDepthError(InterpolationError):
+ """Raised when substitutions are nested too deeply."""
+
+ def __init__(self, option, section, rawval):
+ msg = (
+ "Recursion limit exceeded in value substitution: option {0!r} "
+ "in section {1!r} contains an interpolation key which "
+ "cannot be substituted in {2} steps. Raw value: {3!r}"
+ "".format(option, section, MAX_INTERPOLATION_DEPTH, rawval)
+ )
+ InterpolationError.__init__(self, option, section, msg)
+ self.args = (option, section, rawval)
+
+
+class ParsingError(Error):
+ """Raised when a configuration file does not follow legal syntax."""
+
+ def __init__(self, source=None, filename=None):
+ # Exactly one of `source'/`filename' arguments has to be given.
+ # `filename' kept for compatibility.
+ if filename and source:
+ raise ValueError(
+ "Cannot specify both `filename' and `source'. " "Use `source'."
+ )
+ elif not filename and not source:
+ raise ValueError("Required argument `source' not given.")
+ elif filename:
+ source = filename
+ Error.__init__(self, 'Source contains parsing errors: %r' % source)
+ self.source = source
+ self.errors = []
+ self.args = (source,)
+
+ @property
+ def filename(self):
+ """Deprecated, use `source'."""
+ warnings.warn(
+ "The 'filename' attribute will be removed in future versions. "
+ "Use 'source' instead.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return self.source
+
+ @filename.setter
+ def filename(self, value):
+ """Deprecated, user `source'."""
+ warnings.warn(
+ "The 'filename' attribute will be removed in future versions. "
+ "Use 'source' instead.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ self.source = value
+
+ def append(self, lineno, line):
+ self.errors.append((lineno, line))
+ self.message += '\n\t[line %2d]: %s' % (lineno, line)
+
+
+class MissingSectionHeaderError(ParsingError):
+ """Raised when a key-value pair is found before any section header."""
+
+ def __init__(self, filename, lineno, line):
+ Error.__init__(
+ self,
+ 'File contains no section headers.\nfile: %r, line: %d\n%r'
+ % (filename, lineno, line),
+ )
+ self.source = filename
+ self.lineno = lineno
+ self.line = line
+ self.args = (filename, lineno, line)
+
+
+# Used in parser getters to indicate the default behaviour when a specific
+# option is not found it to raise an exception. Created to enable `None' as
+# a valid fallback value.
+_UNSET = object()
+
+
+class Interpolation(object):
+ """Dummy interpolation that passes the value through with no changes."""
+
+ def before_get(self, parser, section, option, value, defaults):
+ return value
+
+ def before_set(self, parser, section, option, value):
+ return value
+
+ def before_read(self, parser, section, option, value):
+ return value
+
+ def before_write(self, parser, section, option, value):
+ return value
+
+
+class BasicInterpolation(Interpolation):
+ """Interpolation as implemented in the classic ConfigParser.
+
+ The option values can contain format strings which refer to other values in
+ the same section, or values in the special default section.
+
+ For example:
+
+ something: %(dir)s/whatever
+
+ would resolve the "%(dir)s" to the value of dir. All reference
+ expansions are done late, on demand. If a user needs to use a bare % in
+ a configuration file, she can escape it by writing %%. Other % usage
+ is considered a user error and raises `InterpolationSyntaxError'."""
+
+ _KEYCRE = re.compile(r"%\(([^)]+)\)s")
+
+ def before_get(self, parser, section, option, value, defaults):
+ L = []
+ self._interpolate_some(parser, option, L, value, section, defaults, 1)
+ return ''.join(L)
+
+ def before_set(self, parser, section, option, value):
+ tmp_value = value.replace('%%', '') # escaped percent signs
+ tmp_value = self._KEYCRE.sub('', tmp_value) # valid syntax
+ if '%' in tmp_value:
+ raise ValueError(
+ "invalid interpolation syntax in %r at "
+ "position %d" % (value, tmp_value.find('%'))
+ )
+ return value
+
+ def _interpolate_some(self, parser, option, accum, rest, section, map, depth):
+ rawval = parser.get(section, option, raw=True, fallback=rest)
+ if depth > MAX_INTERPOLATION_DEPTH:
+ raise InterpolationDepthError(option, section, rawval)
+ while rest:
+ p = rest.find("%")
+ if p < 0:
+ accum.append(rest)
+ return
+ if p > 0:
+ accum.append(rest[:p])
+ rest = rest[p:]
+ # p is no longer used
+ c = rest[1:2]
+ if c == "%":
+ accum.append("%")
+ rest = rest[2:]
+ elif c == "(":
+ m = self._KEYCRE.match(rest)
+ if m is None:
+ raise InterpolationSyntaxError(
+ option,
+ section,
+ "bad interpolation variable reference %r" % rest,
+ )
+ var = parser.optionxform(m.group(1))
+ rest = rest[m.end() :]
+ try:
+ v = map[var]
+ except KeyError:
+ raise from_none(
+ InterpolationMissingOptionError(option, section, rawval, var)
+ )
+ if "%" in v:
+ self._interpolate_some(
+ parser, option, accum, v, section, map, depth + 1
+ )
+ else:
+ accum.append(v)
+ else:
+ raise InterpolationSyntaxError(
+ option,
+ section,
+ "'%%' must be followed by '%%' or '(', " "found: %r" % (rest,),
+ )
+
+
+class ExtendedInterpolation(Interpolation):
+ """Advanced variant of interpolation, supports the syntax used by
+ `zc.buildout'. Enables interpolation between sections."""
+
+ _KEYCRE = re.compile(r"\$\{([^}]+)\}")
+
+ def before_get(self, parser, section, option, value, defaults):
+ L = []
+ self._interpolate_some(parser, option, L, value, section, defaults, 1)
+ return ''.join(L)
+
+ def before_set(self, parser, section, option, value):
+ tmp_value = value.replace('$$', '') # escaped dollar signs
+ tmp_value = self._KEYCRE.sub('', tmp_value) # valid syntax
+ if '$' in tmp_value:
+ raise ValueError(
+ "invalid interpolation syntax in %r at "
+ "position %d" % (value, tmp_value.find('$'))
+ )
+ return value
+
+ def _interpolate_some(self, parser, option, accum, rest, section, map, depth):
+ rawval = parser.get(section, option, raw=True, fallback=rest)
+ if depth > MAX_INTERPOLATION_DEPTH:
+ raise InterpolationDepthError(option, section, rawval)
+ while rest:
+ p = rest.find("$")
+ if p < 0:
+ accum.append(rest)
+ return
+ if p > 0:
+ accum.append(rest[:p])
+ rest = rest[p:]
+ # p is no longer used
+ c = rest[1:2]
+ if c == "$":
+ accum.append("$")
+ rest = rest[2:]
+ elif c == "{":
+ m = self._KEYCRE.match(rest)
+ if m is None:
+ raise InterpolationSyntaxError(
+ option,
+ section,
+ "bad interpolation variable reference %r" % rest,
+ )
+ path = m.group(1).split(':')
+ rest = rest[m.end() :]
+ sect = section
+ opt = option
+ try:
+ if len(path) == 1:
+ opt = parser.optionxform(path[0])
+ v = map[opt]
+ elif len(path) == 2:
+ sect = path[0]
+ opt = parser.optionxform(path[1])
+ v = parser.get(sect, opt, raw=True)
+ else:
+ raise InterpolationSyntaxError(
+ option, section, "More than one ':' found: %r" % (rest,)
+ )
+ except (KeyError, NoSectionError, NoOptionError):
+ raise from_none(
+ InterpolationMissingOptionError(
+ option, section, rawval, ":".join(path)
+ )
+ )
+ if "$" in v:
+ self._interpolate_some(
+ parser,
+ opt,
+ accum,
+ v,
+ sect,
+ dict(parser.items(sect, raw=True)),
+ depth + 1,
+ )
+ else:
+ accum.append(v)
+ else:
+ raise InterpolationSyntaxError(
+ option,
+ section,
+ "'$' must be followed by '$' or '{', " "found: %r" % (rest,),
+ )
+
+
+class LegacyInterpolation(Interpolation):
+ """Deprecated interpolation used in old versions of ConfigParser.
+ Use BasicInterpolation or ExtendedInterpolation instead."""
+
+ _KEYCRE = re.compile(r"%\(([^)]*)\)s|.")
+
+ def before_get(self, parser, section, option, value, vars):
+ rawval = value
+ depth = MAX_INTERPOLATION_DEPTH
+ while depth: # Loop through this until it's done
+ depth -= 1
+ if value and "%(" in value:
+ replace = functools.partial(self._interpolation_replace, parser=parser)
+ value = self._KEYCRE.sub(replace, value)
+ try:
+ value = value % vars
+ except KeyError as e:
+ raise from_none(
+ InterpolationMissingOptionError(
+ option, section, rawval, e.args[0]
+ )
+ )
+ else:
+ break
+ if value and "%(" in value:
+ raise InterpolationDepthError(option, section, rawval)
+ return value
+
+ def before_set(self, parser, section, option, value):
+ return value
+
+ @staticmethod
+ def _interpolation_replace(match, parser):
+ s = match.group(1)
+ if s is None:
+ return match.group()
+ else:
+ return "%%(%s)s" % parser.optionxform(s)
+
+
+class RawConfigParser(MutableMapping):
+ """ConfigParser that does not do interpolation."""
+
+ # Regular expressions for parsing section headers and options
+ _SECT_TMPL = r"""
+ \[ # [
+ (?P<header>[^]]+) # very permissive!
+ \] # ]
+ """
+ _OPT_TMPL = r"""
+ (?P<option>.*?) # very permissive!
+ \s*(?P<vi>{delim})\s* # any number of space/tab,
+ # followed by any of the
+ # allowed delimiters,
+ # followed by any space/tab
+ (?P<value>.*)$ # everything up to eol
+ """
+ _OPT_NV_TMPL = r"""
+ (?P<option>.*?) # very permissive!
+ \s*(?: # any number of space/tab,
+ (?P<vi>{delim})\s* # optionally followed by
+ # any of the allowed
+ # delimiters, followed by any
+ # space/tab
+ (?P<value>.*))?$ # everything up to eol
+ """
+ # Interpolation algorithm to be used if the user does not specify another
+ _DEFAULT_INTERPOLATION = Interpolation()
+ # Compiled regular expression for matching sections
+ SECTCRE = re.compile(_SECT_TMPL, re.VERBOSE)
+ # Compiled regular expression for matching options with typical separators
+ OPTCRE = re.compile(_OPT_TMPL.format(delim="=|:"), re.VERBOSE)
+ # Compiled regular expression for matching options with optional values
+ # delimited using typical separators
+ OPTCRE_NV = re.compile(_OPT_NV_TMPL.format(delim="=|:"), re.VERBOSE)
+ # Compiled regular expression for matching leading whitespace in a line
+ NONSPACECRE = re.compile(r"\S")
+ # Possible boolean values in the configuration.
+ BOOLEAN_STATES = {
+ '1': True,
+ 'yes': True,
+ 'true': True,
+ 'on': True,
+ '0': False,
+ 'no': False,
+ 'false': False,
+ 'off': False,
+ }
+
+ def __init__(
+ self, defaults=None, dict_type=_default_dict, allow_no_value=False, **kwargs
+ ):
+
+ # keyword-only arguments
+ delimiters = kwargs.get('delimiters', ('=', ':'))
+ comment_prefixes = kwargs.get('comment_prefixes', ('#', ';'))
+ inline_comment_prefixes = kwargs.get('inline_comment_prefixes', None)
+ strict = kwargs.get('strict', True)
+ empty_lines_in_values = kwargs.get('empty_lines_in_values', True)
+ default_section = kwargs.get('default_section', DEFAULTSECT)
+ interpolation = kwargs.get('interpolation', _UNSET)
+ converters = kwargs.get('converters', _UNSET)
+
+ self._dict = dict_type
+ self._sections = self._dict()
+ self._defaults = self._dict()
+ self._converters = ConverterMapping(self)
+ self._proxies = self._dict()
+ self._proxies[default_section] = SectionProxy(self, default_section)
+ self._delimiters = tuple(delimiters)
+ if delimiters == ('=', ':'):
+ self._optcre = self.OPTCRE_NV if allow_no_value else self.OPTCRE
+ else:
+ d = "|".join(re.escape(d) for d in delimiters)
+ if allow_no_value:
+ self._optcre = re.compile(self._OPT_NV_TMPL.format(delim=d), re.VERBOSE)
+ else:
+ self._optcre = re.compile(self._OPT_TMPL.format(delim=d), re.VERBOSE)
+ self._comment_prefixes = tuple(comment_prefixes or ())
+ self._inline_comment_prefixes = tuple(inline_comment_prefixes or ())
+ self._strict = strict
+ self._allow_no_value = allow_no_value
+ self._empty_lines_in_values = empty_lines_in_values
+ self.default_section = default_section
+ self._interpolation = interpolation
+ if self._interpolation is _UNSET:
+ self._interpolation = self._DEFAULT_INTERPOLATION
+ if self._interpolation is None:
+ self._interpolation = Interpolation()
+ if converters is not _UNSET:
+ self._converters.update(converters)
+ if defaults:
+ self._read_defaults(defaults)
+
+ def defaults(self):
+ return self._defaults
+
+ def sections(self):
+ """Return a list of section names, excluding [DEFAULT]"""
+ # self._sections will never have [DEFAULT] in it
+ return list(self._sections.keys())
+
+ def add_section(self, section):
+ """Create a new section in the configuration.
+
+ Raise DuplicateSectionError if a section by the specified name
+ already exists. Raise ValueError if name is DEFAULT.
+ """
+ if section == self.default_section:
+ raise ValueError('Invalid section name: %r' % section)
+
+ if section in self._sections:
+ raise DuplicateSectionError(section)
+ self._sections[section] = self._dict()
+ self._proxies[section] = SectionProxy(self, section)
+
+ def has_section(self, section):
+ """Indicate whether the named section is present in the configuration.
+
+ The DEFAULT section is not acknowledged.
+ """
+ return section in self._sections
+
+ def options(self, section):
+ """Return a list of option names for the given section name."""
+ try:
+ opts = self._sections[section].copy()
+ except KeyError:
+ raise from_none(NoSectionError(section))
+ opts.update(self._defaults)
+ return list(opts.keys())
+
+ def read(self, filenames, encoding=None):
+ """Read and parse a filename or an iterable of filenames.
+
+ Files that cannot be opened are silently ignored; this is
+ designed so that you can specify an iterable of potential
+ configuration file locations (e.g. current directory, user's
+ home directory, systemwide directory), and all existing
+ configuration files in the iterable will be read. A single
+ filename may also be given.
+
+ Return list of successfully read files.
+ """
+ if isinstance(filenames, (str, bytes, PathLike)):
+ filenames = [filenames]
+ read_ok = []
+ for filename in filenames:
+ if isinstance(filename, PathLike):
+ filename = fspath(filename)
+ try:
+ with open(filename, encoding=encoding) as fp:
+ self._read(fp, filename)
+ except IOError:
+ continue
+ read_ok.append(filename)
+ return read_ok
+
+ def read_file(self, f, source=None):
+ """Like read() but the argument must be a file-like object.
+
+ The `f' argument must be iterable, returning one line at a time.
+ Optional second argument is the `source' specifying the name of the
+ file being read. If not given, it is taken from f.name. If `f' has no
+ `name' attribute, `<???>' is used.
+ """
+ if source is None:
+ try:
+ source = f.name
+ except AttributeError:
+ source = '<???>'
+ self._read(f, source)
+
+ def read_string(self, string, source='<string>'):
+ """Read configuration from a given string."""
+ sfile = io.StringIO(string)
+ self.read_file(sfile, source)
+
+ def read_dict(self, dictionary, source='<dict>'):
+ """Read configuration from a dictionary.
+
+ Keys are section names, values are dictionaries with keys and values
+ that should be present in the section. If the used dictionary type
+ preserves order, sections and their keys will be added in order.
+
+ All types held in the dictionary are converted to strings during
+ reading, including section names, option names and keys.
+
+ Optional second argument is the `source' specifying the name of the
+ dictionary being read.
+ """
+ elements_added = set()
+ for section, keys in dictionary.items():
+ section = str(section)
+ try:
+ self.add_section(section)
+ except (DuplicateSectionError, ValueError):
+ if self._strict and section in elements_added:
+ raise
+ elements_added.add(section)
+ for key, value in keys.items():
+ key = self.optionxform(str(key))
+ if value is not None:
+ value = str(value)
+ if self._strict and (section, key) in elements_added:
+ raise DuplicateOptionError(section, key, source)
+ elements_added.add((section, key))
+ self.set(section, key, value)
+
+ def readfp(self, fp, filename=None):
+ """Deprecated, use read_file instead."""
+ warnings.warn(
+ "This method will be removed in future versions. "
+ "Use 'parser.read_file()' instead.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ self.read_file(fp, source=filename)
+
+ def get(self, section, option, **kwargs):
+ """Get an option value for a given section.
+
+ If `vars' is provided, it must be a dictionary. The option is looked up
+ in `vars' (if provided), `section', and in `DEFAULTSECT' in that order.
+ If the key is not found and `fallback' is provided, it is used as
+ a fallback value. `None' can be provided as a `fallback' value.
+
+ If interpolation is enabled and the optional argument `raw' is False,
+ all interpolations are expanded in the return values.
+
+ Arguments `raw', `vars', and `fallback' are keyword only.
+
+ The section DEFAULT is special.
+ """
+ # keyword-only arguments
+ raw = kwargs.get('raw', False)
+ vars = kwargs.get('vars', None)
+ fallback = kwargs.get('fallback', _UNSET)
+
+ try:
+ d = self._unify_values(section, vars)
+ except NoSectionError:
+ if fallback is _UNSET:
+ raise
+ else:
+ return fallback
+ option = self.optionxform(option)
+ try:
+ value = d[option]
+ except KeyError:
+ if fallback is _UNSET:
+ raise NoOptionError(option, section)
+ else:
+ return fallback
+
+ if raw or value is None:
+ return value
+ else:
+ return self._interpolation.before_get(self, section, option, value, d)
+
+ def _get(self, section, conv, option, **kwargs):
+ return conv(self.get(section, option, **kwargs))
+
+ def _get_conv(self, section, option, conv, **kwargs):
+ # keyword-only arguments
+ kwargs.setdefault('raw', False)
+ kwargs.setdefault('vars', None)
+ fallback = kwargs.pop('fallback', _UNSET)
+ try:
+ return self._get(section, conv, option, **kwargs)
+ except (NoSectionError, NoOptionError):
+ if fallback is _UNSET:
+ raise
+ return fallback
+
+ # getint, getfloat and getboolean provided directly for backwards compat
+ def getint(self, section, option, **kwargs):
+ # keyword-only arguments
+ kwargs.setdefault('raw', False)
+ kwargs.setdefault('vars', None)
+ kwargs.setdefault('fallback', _UNSET)
+ return self._get_conv(section, option, int, **kwargs)
+
+ def getfloat(self, section, option, **kwargs):
+ # keyword-only arguments
+ kwargs.setdefault('raw', False)
+ kwargs.setdefault('vars', None)
+ kwargs.setdefault('fallback', _UNSET)
+ return self._get_conv(section, option, float, **kwargs)
+
+ def getboolean(self, section, option, **kwargs):
+ # keyword-only arguments
+ kwargs.setdefault('raw', False)
+ kwargs.setdefault('vars', None)
+ kwargs.setdefault('fallback', _UNSET)
+ return self._get_conv(section, option, self._convert_to_boolean, **kwargs)
+
+ def items(self, section=_UNSET, raw=False, vars=None):
+ """Return a list of (name, value) tuples for each option in a section.
+
+ All % interpolations are expanded in the return values, based on the
+ defaults passed into the constructor, unless the optional argument
+ `raw' is true. Additional substitutions may be provided using the
+ `vars' argument, which must be a dictionary whose contents overrides
+ any pre-existing defaults.
+
+ The section DEFAULT is special.
+ """
+ if section is _UNSET:
+ return super(RawConfigParser, self).items()
+ d = self._defaults.copy()
+ try:
+ d.update(self._sections[section])
+ except KeyError:
+ if section != self.default_section:
+ raise NoSectionError(section)
+ orig_keys = list(d.keys())
+ # Update with the entry specific variables
+ if vars:
+ for key, value in vars.items():
+ d[self.optionxform(key)] = value
+ value_getter = lambda option: self._interpolation.before_get(
+ self, section, option, d[option], d
+ )
+ if raw:
+ value_getter = lambda option: d[option]
+ return [(option, value_getter(option)) for option in orig_keys]
+
+ def popitem(self):
+ """Remove a section from the parser and return it as
+ a (section_name, section_proxy) tuple. If no section is present, raise
+ KeyError.
+
+ The section DEFAULT is never returned because it cannot be removed.
+ """
+ for key in self.sections():
+ value = self[key]
+ del self[key]
+ return key, value
+ raise KeyError
+
+ def optionxform(self, optionstr):
+ return optionstr.lower()
+
+ def has_option(self, section, option):
+ """Check for the existence of a given option in a given section.
+ If the specified `section' is None or an empty string, DEFAULT is
+ assumed. If the specified `section' does not exist, returns False."""
+ if not section or section == self.default_section:
+ option = self.optionxform(option)
+ return option in self._defaults
+ elif section not in self._sections:
+ return False
+ else:
+ option = self.optionxform(option)
+ return option in self._sections[section] or option in self._defaults
+
+ def set(self, section, option, value=None):
+ """Set an option."""
+ if value:
+ value = self._interpolation.before_set(self, section, option, value)
+ if not section or section == self.default_section:
+ sectdict = self._defaults
+ else:
+ try:
+ sectdict = self._sections[section]
+ except KeyError:
+ raise from_none(NoSectionError(section))
+ sectdict[self.optionxform(option)] = value
+
+ def write(self, fp, space_around_delimiters=True):
+ """Write an .ini-format representation of the configuration state.
+
+ If `space_around_delimiters' is True (the default), delimiters
+ between keys and values are surrounded by spaces.
+ """
+ if space_around_delimiters:
+ d = " {0} ".format(self._delimiters[0])
+ else:
+ d = self._delimiters[0]
+ if self._defaults:
+ self._write_section(fp, self.default_section, self._defaults.items(), d)
+ for section in self._sections:
+ self._write_section(fp, section, self._sections[section].items(), d)
+
+ def _write_section(self, fp, section_name, section_items, delimiter):
+ """Write a single section to the specified `fp'."""
+ fp.write("[{0}]\n".format(section_name))
+ for key, value in section_items:
+ value = self._interpolation.before_write(self, section_name, key, value)
+ if value is not None or not self._allow_no_value:
+ value = delimiter + str(value).replace('\n', '\n\t')
+ else:
+ value = ""
+ fp.write("{0}{1}\n".format(key, value))
+ fp.write("\n")
+
+ def remove_option(self, section, option):
+ """Remove an option."""
+ if not section or section == self.default_section:
+ sectdict = self._defaults
+ else:
+ try:
+ sectdict = self._sections[section]
+ except KeyError:
+ raise from_none(NoSectionError(section))
+ option = self.optionxform(option)
+ existed = option in sectdict
+ if existed:
+ del sectdict[option]
+ return existed
+
+ def remove_section(self, section):
+ """Remove a file section."""
+ existed = section in self._sections
+ if existed:
+ del self._sections[section]
+ del self._proxies[section]
+ return existed
+
+ def __getitem__(self, key):
+ if key != self.default_section and not self.has_section(key):
+ raise KeyError(key)
+ return self._proxies[key]
+
+ def __setitem__(self, key, value):
+ # To conform with the mapping protocol, overwrites existing values in
+ # the section.
+ if key in self and self[key] is value:
+ return
+ # XXX this is not atomic if read_dict fails at any point. Then again,
+ # no update method in configparser is atomic in this implementation.
+ if key == self.default_section:
+ self._defaults.clear()
+ elif key in self._sections:
+ self._sections[key].clear()
+ self.read_dict({key: value})
+
+ def __delitem__(self, key):
+ if key == self.default_section:
+ raise ValueError("Cannot remove the default section.")
+ if not self.has_section(key):
+ raise KeyError(key)
+ self.remove_section(key)
+
+ def __contains__(self, key):
+ return key == self.default_section or self.has_section(key)
+
+ def __len__(self):
+ return len(self._sections) + 1 # the default section
+
+ def __iter__(self):
+ # XXX does it break when underlying container state changed?
+ return itertools.chain((self.default_section,), self._sections.keys())
+
+ def _read(self, fp, fpname):
+ """Parse a sectioned configuration file.
+
+ Each section in a configuration file contains a header, indicated by
+ a name in square brackets (`[]'), plus key/value options, indicated by
+ `name' and `value' delimited with a specific substring (`=' or `:' by
+ default).
+
+ Values can span multiple lines, as long as they are indented deeper
+ than the first line of the value. Depending on the parser's mode, blank
+ lines may be treated as parts of multiline values or ignored.
+
+ Configuration files may include comments, prefixed by specific
+ characters (`#' and `;' by default). Comments may appear on their own
+ in an otherwise empty line or may be entered in lines holding values or
+ section names.
+ """
+ elements_added = set()
+ cursect = None # None, or a dictionary
+ sectname = None
+ optname = None
+ lineno = 0
+ indent_level = 0
+ e = None # None, or an exception
+ for lineno, line in enumerate(fp, start=1):
+ comment_start = sys.maxsize
+ # strip inline comments
+ inline_prefixes = dict((p, -1) for p in self._inline_comment_prefixes)
+ while comment_start == sys.maxsize and inline_prefixes:
+ next_prefixes = {}
+ for prefix, index in inline_prefixes.items():
+ index = line.find(prefix, index + 1)
+ if index == -1:
+ continue
+ next_prefixes[prefix] = index
+ if index == 0 or (index > 0 and line[index - 1].isspace()):
+ comment_start = min(comment_start, index)
+ inline_prefixes = next_prefixes
+ # strip full line comments
+ for prefix in self._comment_prefixes:
+ if line.strip().startswith(prefix):
+ comment_start = 0
+ break
+ if comment_start == sys.maxsize:
+ comment_start = None
+ value = line[:comment_start].strip()
+ if not value:
+ if self._empty_lines_in_values:
+ # add empty line to the value, but only if there was no
+ # comment on the line
+ if (
+ comment_start is None
+ and cursect is not None
+ and optname
+ and cursect[optname] is not None
+ ):
+ cursect[optname].append('') # newlines added at join
+ else:
+ # empty line marks end of value
+ indent_level = sys.maxsize
+ continue
+ # continuation line?
+ first_nonspace = self.NONSPACECRE.search(line)
+ cur_indent_level = first_nonspace.start() if first_nonspace else 0
+ if cursect is not None and optname and cur_indent_level > indent_level:
+ cursect[optname].append(value)
+ # a section header or option header?
+ else:
+ indent_level = cur_indent_level
+ # is it a section header?
+ mo = self.SECTCRE.match(value)
+ if mo:
+ sectname = mo.group('header')
+ if sectname in self._sections:
+ if self._strict and sectname in elements_added:
+ raise DuplicateSectionError(sectname, fpname, lineno)
+ cursect = self._sections[sectname]
+ elements_added.add(sectname)
+ elif sectname == self.default_section:
+ cursect = self._defaults
+ else:
+ cursect = self._dict()
+ self._sections[sectname] = cursect
+ self._proxies[sectname] = SectionProxy(self, sectname)
+ elements_added.add(sectname)
+ # So sections can't start with a continuation line
+ optname = None
+ # no section header in the file?
+ elif cursect is None:
+ raise MissingSectionHeaderError(fpname, lineno, line)
+ # an option line?
+ else:
+ mo = self._optcre.match(value)
+ if mo:
+ optname, vi, optval = mo.group('option', 'vi', 'value')
+ if not optname:
+ e = self._handle_error(e, fpname, lineno, line)
+ optname = self.optionxform(optname.rstrip())
+ if self._strict and (sectname, optname) in elements_added:
+ raise DuplicateOptionError(
+ sectname, optname, fpname, lineno
+ )
+ elements_added.add((sectname, optname))
+ # This check is fine because the OPTCRE cannot
+ # match if it would set optval to None
+ if optval is not None:
+ optval = optval.strip()
+ cursect[optname] = [optval]
+ else:
+ # valueless option handling
+ cursect[optname] = None
+ else:
+ # a non-fatal parsing error occurred. set up the
+ # exception but keep going. the exception will be
+ # raised at the end of the file and will contain a
+ # list of all bogus lines
+ e = self._handle_error(e, fpname, lineno, line)
+ self._join_multiline_values()
+ # if any parsing errors occurred, raise an exception
+ if e:
+ raise e
+
+ def _join_multiline_values(self):
+ defaults = self.default_section, self._defaults
+ all_sections = itertools.chain((defaults,), self._sections.items())
+ for section, options in all_sections:
+ for name, val in options.items():
+ if isinstance(val, list):
+ val = '\n'.join(val).rstrip()
+ options[name] = self._interpolation.before_read(
+ self, section, name, val
+ )
+
+ def _read_defaults(self, defaults):
+ """Read the defaults passed in the initializer.
+ Note: values can be non-string."""
+ for key, value in defaults.items():
+ self._defaults[self.optionxform(key)] = value
+
+ def _handle_error(self, exc, fpname, lineno, line):
+ if not exc:
+ exc = ParsingError(fpname)
+ exc.append(lineno, repr(line))
+ return exc
+
+ def _unify_values(self, section, vars):
+ """Create a sequence of lookups with 'vars' taking priority over
+ the 'section' which takes priority over the DEFAULTSECT.
+
+ """
+ sectiondict = {}
+ try:
+ sectiondict = self._sections[section]
+ except KeyError:
+ if section != self.default_section:
+ raise NoSectionError(section)
+ # Update with the entry specific variables
+ vardict = {}
+ if vars:
+ for key, value in vars.items():
+ if value is not None:
+ value = str(value)
+ vardict[self.optionxform(key)] = value
+ return _ChainMap(vardict, sectiondict, self._defaults)
+
+ def _convert_to_boolean(self, value):
+ """Return a boolean value translating from other types if necessary.
+ """
+ if value.lower() not in self.BOOLEAN_STATES:
+ raise ValueError('Not a boolean: %s' % value)
+ return self.BOOLEAN_STATES[value.lower()]
+
+ def _validate_value_types(self, **kwargs):
+ """Raises a TypeError for non-string values.
+
+ The only legal non-string value if we allow valueless
+ options is None, so we need to check if the value is a
+ string if:
+ - we do not allow valueless options, or
+ - we allow valueless options but the value is not None
+
+ For compatibility reasons this method is not used in classic set()
+ for RawConfigParsers. It is invoked in every case for mapping protocol
+ access and in ConfigParser.set().
+ """
+ # keyword-only arguments
+ section = kwargs.get('section', "")
+ option = kwargs.get('option', "")
+ value = kwargs.get('value', "")
+
+ if PY2 and bytes in (type(section), type(option), type(value)):
+ # we allow for a little unholy magic for Python 2 so that
+ # people not using unicode_literals can still use the library
+ # conveniently
+ warnings.warn(
+ "You passed a bytestring. Implicitly decoding as UTF-8 string."
+ " This will not work on Python 3. Please switch to using"
+ " Unicode strings across the board.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ if isinstance(section, bytes):
+ section = section.decode('utf8')
+ if isinstance(option, bytes):
+ option = option.decode('utf8')
+ if isinstance(value, bytes):
+ value = value.decode('utf8')
+
+ if not isinstance(section, str):
+ raise TypeError("section names must be strings")
+ if not isinstance(option, str):
+ raise TypeError("option keys must be strings")
+ if not self._allow_no_value or value:
+ if not isinstance(value, str):
+ raise TypeError("option values must be strings")
+
+ return section, option, value
+
+ @property
+ def converters(self):
+ return self._converters
+
+
+class ConfigParser(RawConfigParser):
+ """ConfigParser implementing interpolation."""
+
+ _DEFAULT_INTERPOLATION = BasicInterpolation()
+
+ def set(self, section, option, value=None):
+ """Set an option. Extends RawConfigParser.set by validating type and
+ interpolation syntax on the value."""
+ _, option, value = self._validate_value_types(option=option, value=value)
+ super(ConfigParser, self).set(section, option, value)
+
+ def add_section(self, section):
+ """Create a new section in the configuration. Extends
+ RawConfigParser.add_section by validating if the section name is
+ a string."""
+ section, _, _ = self._validate_value_types(section=section)
+ super(ConfigParser, self).add_section(section)
+
+ def _read_defaults(self, defaults):
+ """Reads the defaults passed in the initializer, implicitly converting
+ values to strings like the rest of the API.
+
+ Does not perform interpolation for backwards compatibility.
+ """
+ try:
+ hold_interpolation = self._interpolation
+ self._interpolation = Interpolation()
+ self.read_dict({self.default_section: defaults})
+ finally:
+ self._interpolation = hold_interpolation
+
+
+class SafeConfigParser(ConfigParser):
+ """ConfigParser alias for backwards compatibility purposes."""
+
+ def __init__(self, *args, **kwargs):
+ super(SafeConfigParser, self).__init__(*args, **kwargs)
+ warnings.warn(
+ "The SafeConfigParser class has been renamed to ConfigParser "
+ "in Python 3.2. This alias will be removed in future versions."
+ " Use ConfigParser directly instead.",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+
+
+class SectionProxy(MutableMapping):
+ """A proxy for a single section from a parser."""
+
+ def __init__(self, parser, name):
+ """Creates a view on a section of the specified `name` in `parser`."""
+ self._parser = parser
+ self._name = name
+ for conv in parser.converters:
+ key = 'get' + conv
+ getter = functools.partial(self.get, _impl=getattr(parser, key))
+ setattr(self, key, getter)
+
+ def __repr__(self):
+ return '<Section: {0}>'.format(self._name)
+
+ def __getitem__(self, key):
+ if not self._parser.has_option(self._name, key):
+ raise KeyError(key)
+ return self._parser.get(self._name, key)
+
+ def __setitem__(self, key, value):
+ _, key, value = self._parser._validate_value_types(option=key, value=value)
+ return self._parser.set(self._name, key, value)
+
+ def __delitem__(self, key):
+ if not (
+ self._parser.has_option(self._name, key)
+ and self._parser.remove_option(self._name, key)
+ ):
+ raise KeyError(key)
+
+ def __contains__(self, key):
+ return self._parser.has_option(self._name, key)
+
+ def __len__(self):
+ return len(self._options())
+
+ def __iter__(self):
+ return self._options().__iter__()
+
+ def _options(self):
+ if self._name != self._parser.default_section:
+ return self._parser.options(self._name)
+ else:
+ return self._parser.defaults()
+
+ @property
+ def parser(self):
+ # The parser object of the proxy is read-only.
+ return self._parser
+
+ @property
+ def name(self):
+ # The name of the section on a proxy is read-only.
+ return self._name
+
+ def get(self, option, fallback=None, **kwargs):
+ """Get an option value.
+
+ Unless `fallback` is provided, `None` will be returned if the option
+ is not found.
+
+ """
+ # keyword-only arguments
+ kwargs.setdefault('raw', False)
+ kwargs.setdefault('vars', None)
+ _impl = kwargs.pop('_impl', None)
+ # If `_impl` is provided, it should be a getter method on the parser
+ # object that provides the desired type conversion.
+ if not _impl:
+ _impl = self._parser.get
+ return _impl(self._name, option, fallback=fallback, **kwargs)
+
+
+class ConverterMapping(MutableMapping):
+ """Enables reuse of get*() methods between the parser and section proxies.
+
+ If a parser class implements a getter directly, the value for the given
+ key will be ``None``. The presence of the converter name here enables
+ section proxies to find and use the implementation on the parser class.
+ """
+
+ GETTERCRE = re.compile(r"^get(?P<name>.+)$")
+
+ def __init__(self, parser):
+ self._parser = parser
+ self._data = {}
+ for getter in dir(self._parser):
+ m = self.GETTERCRE.match(getter)
+ if not m or not callable(getattr(self._parser, getter)):
+ continue
+ self._data[m.group('name')] = None # See class docstring.
+
+ def __getitem__(self, key):
+ return self._data[key]
+
+ def __setitem__(self, key, value):
+ try:
+ k = 'get' + key
+ except TypeError:
+ raise ValueError(
+ 'Incompatible key: {} (type: {})' ''.format(key, type(key))
+ )
+ if k == 'get':
+ raise ValueError('Incompatible key: cannot use "" as a name')
+ self._data[key] = value
+ func = functools.partial(self._parser._get_conv, conv=value)
+ func.converter = value
+ setattr(self._parser, k, func)
+ for proxy in self._parser.values():
+ getter = functools.partial(proxy.get, _impl=func)
+ setattr(proxy, k, getter)
+
+ def __delitem__(self, key):
+ try:
+ k = 'get' + (key or None)
+ except TypeError:
+ raise KeyError(key)
+ del self._data[key]
+ for inst in itertools.chain((self._parser,), self._parser.values()):
+ try:
+ delattr(inst, k)
+ except AttributeError:
+ # don't raise since the entry was present in _data, silently
+ # clean up
+ continue
+
+ def __iter__(self):
+ return iter(self._data)
+
+ def __len__(self):
+ return len(self._data)
diff --git a/contrib/deprecated/python/configparser/backports/configparser/helpers.py b/contrib/deprecated/python/configparser/backports/configparser/helpers.py
new file mode 100644
index 0000000000..e7eb72243f
--- /dev/null
+++ b/contrib/deprecated/python/configparser/backports/configparser/helpers.py
@@ -0,0 +1,274 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+import abc
+import os
+
+try:
+ from collections.abc import MutableMapping
+except ImportError:
+ from collections import MutableMapping
+
+try:
+ from collections import UserDict
+except ImportError:
+ from UserDict import UserDict
+
+try:
+ from collections import OrderedDict
+except ImportError:
+ from ordereddict import OrderedDict
+
+try:
+ import pathlib
+except ImportError:
+ pathlib = None
+
+from io import open
+import sys
+
+try:
+ from thread import get_ident
+except ImportError:
+ try:
+ from _thread import get_ident
+ except ImportError:
+ from _dummy_thread import get_ident
+
+
+__all__ = ['UserDict', 'OrderedDict', 'open']
+
+
+PY2 = sys.version_info[0] == 2
+PY3 = sys.version_info[0] == 3
+
+native_str = str
+str = type('str')
+
+
+def from_none(exc):
+ """raise from_none(ValueError('a')) == raise ValueError('a') from None"""
+ exc.__cause__ = None
+ exc.__suppress_context__ = True
+ return exc
+
+
+# from reprlib 3.2.1
+def recursive_repr(fillvalue='...'):
+ 'Decorator to make a repr function return fillvalue for a recursive call'
+
+ def decorating_function(user_function):
+ repr_running = set()
+
+ def wrapper(self):
+ key = id(self), get_ident()
+ if key in repr_running:
+ return fillvalue
+ repr_running.add(key)
+ try:
+ result = user_function(self)
+ finally:
+ repr_running.discard(key)
+ return result
+
+ # Can't use functools.wraps() here because of bootstrap issues
+ wrapper.__module__ = getattr(user_function, '__module__')
+ wrapper.__doc__ = getattr(user_function, '__doc__')
+ wrapper.__name__ = getattr(user_function, '__name__')
+ wrapper.__annotations__ = getattr(user_function, '__annotations__', {})
+ return wrapper
+
+ return decorating_function
+
+
+# from collections 3.2.1
+class _ChainMap(MutableMapping):
+ ''' A ChainMap groups multiple dicts (or other mappings) together
+ to create a single, updateable view.
+
+ The underlying mappings are stored in a list. That list is public and can
+ accessed or updated using the *maps* attribute. There is no other state.
+
+ Lookups search the underlying mappings successively until a key is found.
+ In contrast, writes, updates, and deletions only operate on the first
+ mapping.
+
+ '''
+
+ def __init__(self, *maps):
+ '''Initialize a ChainMap by setting *maps* to the given mappings.
+ If no mappings are provided, a single empty dictionary is used.
+
+ '''
+ self.maps = list(maps) or [{}] # always at least one map
+
+ def __missing__(self, key):
+ raise KeyError(key)
+
+ def __getitem__(self, key):
+ for mapping in self.maps:
+ try:
+ # can't use 'key in mapping' with defaultdict
+ return mapping[key]
+ except KeyError:
+ pass
+ # support subclasses that define __missing__
+ return self.__missing__(key)
+
+ def get(self, key, default=None):
+ return self[key] if key in self else default
+
+ def __len__(self):
+ # reuses stored hash values if possible
+ return len(set().union(*self.maps))
+
+ def __iter__(self):
+ return iter(set().union(*self.maps))
+
+ def __contains__(self, key):
+ return any(key in m for m in self.maps)
+
+ @recursive_repr()
+ def __repr__(self):
+ return '{0.__class__.__name__}({1})'.format(
+ self, ', '.join(map(repr, self.maps))
+ )
+
+ @classmethod
+ def fromkeys(cls, iterable, *args):
+ 'Create a ChainMap with a single dict created from the iterable.'
+ return cls(dict.fromkeys(iterable, *args))
+
+ def copy(self):
+ """
+ New ChainMap or subclass with a new copy of
+ maps[0] and refs to maps[1:]
+ """
+ return self.__class__(self.maps[0].copy(), *self.maps[1:])
+
+ __copy__ = copy
+
+ def new_child(self): # like Django's Context.push()
+ 'New ChainMap with a new dict followed by all previous maps.'
+ return self.__class__({}, *self.maps)
+
+ @property
+ def parents(self): # like Django's Context.pop()
+ 'New ChainMap from maps[1:].'
+ return self.__class__(*self.maps[1:])
+
+ def __setitem__(self, key, value):
+ self.maps[0][key] = value
+
+ def __delitem__(self, key):
+ try:
+ del self.maps[0][key]
+ except KeyError:
+ raise KeyError('Key not found in the first mapping: {!r}'.format(key))
+
+ def popitem(self):
+ """
+ Remove and return an item pair from maps[0].
+ Raise KeyError is maps[0] is empty.
+ """
+ try:
+ return self.maps[0].popitem()
+ except KeyError:
+ raise KeyError('No keys found in the first mapping.')
+
+ def pop(self, key, *args):
+ """
+ Remove *key* from maps[0] and return its value.
+ Raise KeyError if *key* not in maps[0].
+ """
+
+ try:
+ return self.maps[0].pop(key, *args)
+ except KeyError:
+ raise KeyError('Key not found in the first mapping: {!r}'.format(key))
+
+ def clear(self):
+ 'Clear maps[0], leaving maps[1:] intact.'
+ self.maps[0].clear()
+
+
+try:
+ from collections import ChainMap
+except ImportError:
+ ChainMap = _ChainMap
+
+
+_ABC = getattr(
+ abc,
+ 'ABC',
+ # Python 3.3 compatibility
+ abc.ABCMeta(native_str('__ABC'), (object,), dict(__metaclass__=abc.ABCMeta)),
+)
+
+
+class _PathLike(_ABC):
+
+ """Abstract base class for implementing the file system path protocol."""
+
+ @abc.abstractmethod
+ def __fspath__(self):
+ """Return the file system path representation of the object."""
+ raise NotImplementedError
+
+ @classmethod
+ def __subclasshook__(cls, subclass):
+ return bool(
+ hasattr(subclass, '__fspath__')
+ # workaround for Python 3.5
+ or pathlib
+ and issubclass(subclass, pathlib.Path)
+ )
+
+
+PathLike = getattr(os, 'PathLike', _PathLike)
+
+
+def _fspath(path):
+ """Return the path representation of a path-like object.
+
+ If str or bytes is passed in, it is returned unchanged. Otherwise the
+ os.PathLike interface is used to get the path representation. If the
+ path representation is not str or bytes, TypeError is raised. If the
+ provided path is not str, bytes, or os.PathLike, TypeError is raised.
+ """
+ if isinstance(path, (str, bytes)):
+ return path
+
+ if not hasattr(path, '__fspath__') and isinstance(path, pathlib.Path):
+ # workaround for Python 3.5
+ return str(path)
+
+ # Work from the object's type to match method resolution of other magic
+ # methods.
+ path_type = type(path)
+ try:
+ path_repr = path_type.__fspath__(path)
+ except AttributeError:
+
+ if hasattr(path_type, '__fspath__'):
+ raise
+ else:
+ raise TypeError(
+ "expected str, bytes or os.PathLike object, "
+ "not " + path_type.__name__
+ )
+ if isinstance(path_repr, (str, bytes)):
+ return path_repr
+ else:
+ raise TypeError(
+ "expected {}.__fspath__() to return str or bytes, "
+ "not {}".format(path_type.__name__, type(path_repr).__name__)
+ )
+
+
+fspath = getattr(os, 'fspath', _fspath)
diff --git a/contrib/deprecated/python/configparser/configparser.py b/contrib/deprecated/python/configparser/configparser.py
new file mode 100644
index 0000000000..0a18360239
--- /dev/null
+++ b/contrib/deprecated/python/configparser/configparser.py
@@ -0,0 +1,61 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+"""Convenience module importing everything from backports.configparser."""
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+from backports.configparser import (
+ RawConfigParser,
+ ConfigParser,
+ SafeConfigParser,
+ SectionProxy,
+ Interpolation,
+ BasicInterpolation,
+ ExtendedInterpolation,
+ LegacyInterpolation,
+ NoSectionError,
+ DuplicateSectionError,
+ DuplicateOptionError,
+ NoOptionError,
+ InterpolationError,
+ InterpolationMissingOptionError,
+ InterpolationSyntaxError,
+ InterpolationDepthError,
+ ParsingError,
+ MissingSectionHeaderError,
+ ConverterMapping,
+ DEFAULTSECT,
+ MAX_INTERPOLATION_DEPTH,
+)
+
+from backports.configparser import Error, _UNSET, _default_dict, _ChainMap # noqa: F401
+
+__all__ = [
+ "NoSectionError",
+ "DuplicateOptionError",
+ "DuplicateSectionError",
+ "NoOptionError",
+ "InterpolationError",
+ "InterpolationDepthError",
+ "InterpolationMissingOptionError",
+ "InterpolationSyntaxError",
+ "ParsingError",
+ "MissingSectionHeaderError",
+ "ConfigParser",
+ "SafeConfigParser",
+ "RawConfigParser",
+ "Interpolation",
+ "BasicInterpolation",
+ "ExtendedInterpolation",
+ "LegacyInterpolation",
+ "SectionProxy",
+ "ConverterMapping",
+ "DEFAULTSECT",
+ "MAX_INTERPOLATION_DEPTH",
+]
+
+# NOTE: names missing from __all__ imported anyway for backwards compatibility.
diff --git a/contrib/deprecated/python/configparser/ya.make b/contrib/deprecated/python/configparser/ya.make
new file mode 100644
index 0000000000..6859a6033a
--- /dev/null
+++ b/contrib/deprecated/python/configparser/ya.make
@@ -0,0 +1,24 @@
+# Generated by devtools/yamaker (pypi).
+
+PY2_LIBRARY()
+
+VERSION(4.0.2)
+
+LICENSE(MIT)
+
+NO_LINT()
+
+PY_SRCS(
+ TOP_LEVEL
+ backports/configparser/__init__.py
+ backports/configparser/helpers.py
+ configparser.py
+)
+
+RESOURCE_FILES(
+ PREFIX contrib/deprecated/python/configparser/
+ .dist-info/METADATA
+ .dist-info/top_level.txt
+)
+
+END()
diff --git a/contrib/deprecated/python/enum34/.dist-info/METADATA b/contrib/deprecated/python/enum34/.dist-info/METADATA
new file mode 100644
index 0000000000..fd095a2a8c
--- /dev/null
+++ b/contrib/deprecated/python/enum34/.dist-info/METADATA
@@ -0,0 +1,62 @@
+Metadata-Version: 2.1
+Name: enum34
+Version: 1.1.10
+Summary: Python 3.4 Enum backported to 3.3, 3.2, 3.1, 2.7, 2.6, 2.5, and 2.4
+Home-page: https://bitbucket.org/stoneleaf/enum34
+Author: Ethan Furman
+Author-email: ethan@stoneleaf.us
+License: BSD License
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Programming Language :: Python
+Classifier: Topic :: Software Development
+Classifier: Programming Language :: Python :: 2.4
+Classifier: Programming Language :: Python :: 2.5
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3.3
+Provides: enum
+
+enum --- support for enumerations
+========================================
+
+An enumeration is a set of symbolic names (members) bound to unique, constant
+values. Within an enumeration, the members can be compared by identity, and
+the enumeration itself can be iterated over.
+
+ from enum import Enum
+
+ class Fruit(Enum):
+ apple = 1
+ banana = 2
+ orange = 3
+
+ list(Fruit)
+ # [<Fruit.apple: 1>, <Fruit.banana: 2>, <Fruit.orange: 3>]
+
+ len(Fruit)
+ # 3
+
+ Fruit.banana
+ # <Fruit.banana: 2>
+
+ Fruit['banana']
+ # <Fruit.banana: 2>
+
+ Fruit(2)
+ # <Fruit.banana: 2>
+
+ Fruit.banana is Fruit['banana'] is Fruit(2)
+ # True
+
+ Fruit.banana.name
+ # 'banana'
+
+ Fruit.banana.value
+ # 2
+
+Repository and Issue Tracker at https://bitbucket.org/stoneleaf/enum34.
+
+
diff --git a/contrib/deprecated/python/enum34/.dist-info/top_level.txt b/contrib/deprecated/python/enum34/.dist-info/top_level.txt
new file mode 100644
index 0000000000..e3caefb45c
--- /dev/null
+++ b/contrib/deprecated/python/enum34/.dist-info/top_level.txt
@@ -0,0 +1 @@
+enum
diff --git a/contrib/deprecated/python/enum34/README b/contrib/deprecated/python/enum34/README
new file mode 100644
index 0000000000..aa2333d8df
--- /dev/null
+++ b/contrib/deprecated/python/enum34/README
@@ -0,0 +1,3 @@
+enum34 is the new Python stdlib enum module available in Python 3.4
+backported for previous versions of Python from 2.4 to 3.3.
+tested on 2.6, 2.7, and 3.3+
diff --git a/contrib/deprecated/python/enum34/enum/LICENSE b/contrib/deprecated/python/enum34/enum/LICENSE
new file mode 100644
index 0000000000..9003b8850e
--- /dev/null
+++ b/contrib/deprecated/python/enum34/enum/LICENSE
@@ -0,0 +1,32 @@
+Copyright (c) 2013, Ethan Furman.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions
+are met:
+
+ Redistributions of source code must retain the above
+ copyright notice, this list of conditions and the
+ following disclaimer.
+
+ Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials
+ provided with the distribution.
+
+ Neither the name Ethan Furman nor the names of any
+ contributors may be used to endorse or promote products
+ derived from this software without specific prior written
+ permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGE.
diff --git a/contrib/deprecated/python/enum34/enum/README b/contrib/deprecated/python/enum34/enum/README
new file mode 100644
index 0000000000..aa2333d8df
--- /dev/null
+++ b/contrib/deprecated/python/enum34/enum/README
@@ -0,0 +1,3 @@
+enum34 is the new Python stdlib enum module available in Python 3.4
+backported for previous versions of Python from 2.4 to 3.3.
+tested on 2.6, 2.7, and 3.3+
diff --git a/contrib/deprecated/python/enum34/enum/__init__.py b/contrib/deprecated/python/enum34/enum/__init__.py
new file mode 100644
index 0000000000..a0672b9fad
--- /dev/null
+++ b/contrib/deprecated/python/enum34/enum/__init__.py
@@ -0,0 +1,852 @@
+"""Python Enumerations"""
+
+import sys as _sys
+
+__all__ = ['Enum', 'IntEnum', 'unique']
+
+version = 1, 1, 10
+
+pyver = float('%s.%s' % _sys.version_info[:2])
+
+ALLOW_SYNONYMS = '__allow_synonyms__'
+
+try:
+ any
+except NameError:
+ def any(iterable):
+ for element in iterable:
+ if element:
+ return True
+ return False
+
+try:
+ from collections import OrderedDict
+except ImportError:
+ OrderedDict = None
+
+try:
+ basestring
+except NameError:
+ # In Python 2 basestring is the ancestor of both str and unicode
+ # in Python 3 it's just str, but was missing in 3.1
+ basestring = str
+
+try:
+ unicode
+except NameError:
+ # In Python 3 unicode no longer exists (it's just str)
+ unicode = str
+
+class _RouteClassAttributeToGetattr(object):
+ """Route attribute access on a class to __getattr__.
+
+ This is a descriptor, used to define attributes that act differently when
+ accessed through an instance and through a class. Instance access remains
+ normal, but access to an attribute through a class will be routed to the
+ class's __getattr__ method; this is done by raising AttributeError.
+
+ """
+ def __init__(self, fget=None):
+ self.fget = fget
+
+ def __get__(self, instance, ownerclass=None):
+ if instance is None:
+ raise AttributeError()
+ return self.fget(instance)
+
+ def __set__(self, instance, value):
+ raise AttributeError("can't set attribute")
+
+ def __delete__(self, instance):
+ raise AttributeError("can't delete attribute")
+
+
+def _is_descriptor(obj):
+ """Returns True if obj is a descriptor, False otherwise."""
+ return (
+ hasattr(obj, '__get__') or
+ hasattr(obj, '__set__') or
+ hasattr(obj, '__delete__'))
+
+
+def _is_dunder(name):
+ """Returns True if a __dunder__ name, False otherwise."""
+ return (name[:2] == name[-2:] == '__' and
+ name[2:3] != '_' and
+ name[-3:-2] != '_' and
+ len(name) > 4)
+
+
+def _is_sunder(name):
+ """Returns True if a _sunder_ name, False otherwise."""
+ return (name[0] == name[-1] == '_' and
+ name[1:2] != '_' and
+ name[-2:-1] != '_' and
+ len(name) > 2)
+
+
+def _make_class_unpicklable(cls):
+ """Make the given class un-picklable."""
+ def _break_on_call_reduce(self, protocol=None):
+ raise TypeError('%r cannot be pickled' % self)
+ cls.__reduce_ex__ = _break_on_call_reduce
+ cls.__module__ = '<unknown>'
+
+
+class _EnumDict(dict):
+ """Track enum member order and ensure member names are not reused.
+
+ EnumMeta will use the names found in self._member_names as the
+ enumeration member names.
+
+ """
+ def __init__(self):
+ super(_EnumDict, self).__init__()
+ self._member_names = []
+
+ def __setitem__(self, key, value):
+ """Changes anything not dundered or not a descriptor.
+
+ If a descriptor is added with the same name as an enum member, the name
+ is removed from _member_names (this may leave a hole in the numerical
+ sequence of values).
+
+ If an enum member name is used twice, an error is raised; duplicate
+ values are not checked for.
+
+ Single underscore (sunder) names are reserved.
+
+ Note: in 3.x __order__ is simply discarded as a not necessary piece
+ leftover from 2.x
+
+ """
+ if pyver >= 3.0 and key in ('_order_', '__order__'):
+ return
+ elif key == '__order__':
+ key = '_order_'
+ if _is_sunder(key):
+ if key != '_order_':
+ raise ValueError('_names_ are reserved for future Enum use')
+ elif _is_dunder(key):
+ pass
+ elif key in self._member_names:
+ # descriptor overwriting an enum?
+ raise TypeError('Attempted to reuse key: %r' % key)
+ elif not _is_descriptor(value):
+ if key in self:
+ # enum overwriting a descriptor?
+ raise TypeError('Key already defined as: %r' % self[key])
+ self._member_names.append(key)
+ super(_EnumDict, self).__setitem__(key, value)
+
+
+# Dummy value for Enum as EnumMeta explicity checks for it, but of course until
+# EnumMeta finishes running the first time the Enum class doesn't exist. This
+# is also why there are checks in EnumMeta like `if Enum is not None`
+Enum = None
+
+
+class EnumMeta(type):
+ """Metaclass for Enum"""
+ @classmethod
+ def __prepare__(metacls, cls, bases):
+ return _EnumDict()
+
+ def __new__(metacls, cls, bases, classdict):
+ # an Enum class is final once enumeration items have been defined; it
+ # cannot be mixed with other types (int, float, etc.) if it has an
+ # inherited __new__ unless a new __new__ is defined (or the resulting
+ # class will fail).
+ if type(classdict) is dict:
+ original_dict = classdict
+ classdict = _EnumDict()
+ for k, v in original_dict.items():
+ classdict[k] = v
+
+ allow_synonyms = classdict.get(ALLOW_SYNONYMS, True)
+ member_type, first_enum = metacls._get_mixins_(bases)
+ __new__, save_new, use_args = metacls._find_new_(classdict, member_type,
+ first_enum)
+ # save enum items into separate mapping so they don't get baked into
+ # the new class
+ members = dict((k, classdict[k]) for k in classdict._member_names)
+ for name in classdict._member_names:
+ del classdict[name]
+
+ # py2 support for definition order
+ _order_ = classdict.get('_order_')
+ if _order_ is None:
+ if pyver < 3.0:
+ try:
+ _order_ = [name for (name, value) in sorted(members.items(), key=lambda item: item[1])]
+ except TypeError:
+ _order_ = [name for name in sorted(members.keys())]
+ else:
+ _order_ = classdict._member_names
+ else:
+ del classdict['_order_']
+ if pyver < 3.0:
+ if isinstance(_order_, basestring):
+ _order_ = _order_.replace(',', ' ').split()
+ aliases = [name for name in members if name not in _order_]
+ _order_ += aliases
+
+ # check for illegal enum names (any others?)
+ invalid_names = set(members) & set(['mro'])
+ if invalid_names:
+ raise ValueError('Invalid enum member name(s): %s' % (
+ ', '.join(invalid_names), ))
+
+ # save attributes from super classes so we know if we can take
+ # the shortcut of storing members in the class dict
+ base_attributes = set([a for b in bases for a in b.__dict__])
+ # create our new Enum type
+ enum_class = super(EnumMeta, metacls).__new__(metacls, cls, bases, classdict)
+ enum_class._member_names_ = [] # names in random order
+ if OrderedDict is not None:
+ enum_class._member_map_ = OrderedDict()
+ else:
+ enum_class._member_map_ = {} # name->value map
+ enum_class._member_type_ = member_type
+
+ # Reverse value->name map for hashable values.
+ enum_class._value2member_map_ = {}
+
+ # instantiate them, checking for duplicates as we go
+ # we instantiate first instead of checking for duplicates first in case
+ # a custom __new__ is doing something funky with the values -- such as
+ # auto-numbering ;)
+ if __new__ is None:
+ __new__ = enum_class.__new__
+
+ val2name = {}
+ for member_name in _order_:
+ value = members[member_name]
+ if not allow_synonyms:
+ if value in val2name:
+ raise ValueError(
+ 'allow_synonyms=False forbids multiple names of the same value; '
+ 'Members {!r} and {!r} break this'.format(val2name[value], member_name)
+ )
+ val2name[value] = member_name
+
+ if not isinstance(value, tuple):
+ args = (value, )
+ else:
+ args = value
+ if member_type is tuple: # special case for tuple enums
+ args = (args, ) # wrap it one more time
+ if not use_args or not args:
+ enum_member = __new__(enum_class)
+ if not hasattr(enum_member, '_value_'):
+ enum_member._value_ = value
+ else:
+ enum_member = __new__(enum_class, *args)
+ if not hasattr(enum_member, '_value_'):
+ enum_member._value_ = member_type(*args)
+ value = enum_member._value_
+ enum_member._name_ = member_name
+ enum_member.__objclass__ = enum_class
+ enum_member.__init__(*args)
+ # If another member with the same value was already defined, the
+ # new member becomes an alias to the existing one.
+ for name, canonical_member in (enum_class._member_map_.items() if allow_synonyms else ()):
+ if canonical_member.value == enum_member._value_:
+ enum_member = canonical_member
+ break
+ else:
+ # Aliases don't appear in member names (only in __members__).
+ enum_class._member_names_.append(member_name)
+ # performance boost for any member that would not shadow
+ # a DynamicClassAttribute (aka _RouteClassAttributeToGetattr)
+ if member_name not in base_attributes:
+ setattr(enum_class, member_name, enum_member)
+ # now add to _member_map_
+ enum_class._member_map_[member_name] = enum_member
+ try:
+ # This may fail if value is not hashable. We can't add the value
+ # to the map, and by-value lookups for this value will be
+ # linear.
+ enum_class._value2member_map_[value] = enum_member
+ except TypeError:
+ pass
+
+
+ # If a custom type is mixed into the Enum, and it does not know how
+ # to pickle itself, pickle.dumps will succeed but pickle.loads will
+ # fail. Rather than have the error show up later and possibly far
+ # from the source, sabotage the pickle protocol for this class so
+ # that pickle.dumps also fails.
+ #
+ # However, if the new class implements its own __reduce_ex__, do not
+ # sabotage -- it's on them to make sure it works correctly. We use
+ # __reduce_ex__ instead of any of the others as it is preferred by
+ # pickle over __reduce__, and it handles all pickle protocols.
+ unpicklable = False
+ if '__reduce_ex__' not in classdict:
+ if member_type is not object:
+ methods = ('__getnewargs_ex__', '__getnewargs__',
+ '__reduce_ex__', '__reduce__')
+ if not any(m in member_type.__dict__ for m in methods):
+ _make_class_unpicklable(enum_class)
+ unpicklable = True
+
+
+ # double check that repr and friends are not the mixin's or various
+ # things break (such as pickle)
+ for name in ('__repr__', '__str__', '__format__', '__reduce_ex__'):
+ class_method = getattr(enum_class, name)
+ obj_method = getattr(member_type, name, None)
+ enum_method = getattr(first_enum, name, None)
+ if name not in classdict and class_method is not enum_method:
+ if name == '__reduce_ex__' and unpicklable:
+ continue
+ setattr(enum_class, name, enum_method)
+
+ # method resolution and int's are not playing nice
+ # Python's less than 2.6 use __cmp__
+
+ if pyver < 2.6:
+
+ if issubclass(enum_class, int):
+ setattr(enum_class, '__cmp__', getattr(int, '__cmp__'))
+
+ elif pyver < 3.0:
+
+ if issubclass(enum_class, int):
+ for method in (
+ '__le__',
+ '__lt__',
+ '__gt__',
+ '__ge__',
+ '__eq__',
+ '__ne__',
+ '__hash__',
+ ):
+ setattr(enum_class, method, getattr(int, method))
+
+ # replace any other __new__ with our own (as long as Enum is not None,
+ # anyway) -- again, this is to support pickle
+ if Enum is not None:
+ # if the user defined their own __new__, save it before it gets
+ # clobbered in case they subclass later
+ if save_new:
+ setattr(enum_class, '__member_new__', enum_class.__dict__['__new__'])
+ setattr(enum_class, '__new__', Enum.__dict__['__new__'])
+ return enum_class
+
+ def __bool__(cls):
+ """
+ classes/types should always be True.
+ """
+ return True
+
+ def __call__(cls, value, names=None, module=None, type=None, start=1, allow_synonyms=True):
+ """Either returns an existing member, or creates a new enum class.
+
+ This method is used both when an enum class is given a value to match
+ to an enumeration member (i.e. Color(3)) and for the functional API
+ (i.e. Color = Enum('Color', names='red green blue')).
+
+ When used for the functional API: `module`, if set, will be stored in
+ the new class' __module__ attribute; `type`, if set, will be mixed in
+ as the first base class.
+
+ Note: if `module` is not set this routine will attempt to discover the
+ calling module by walking the frame stack; if this is unsuccessful
+ the resulting class will not be pickleable.
+
+ """
+ if names is None: # simple value lookup
+ return cls.__new__(cls, value)
+ # otherwise, functional API: we're creating a new Enum type
+ return cls._create_(value, names, module=module, type=type, start=start, allow_synonyms=allow_synonyms)
+
+ def __contains__(cls, member):
+ return isinstance(member, cls) and member.name in cls._member_map_
+
+ def __delattr__(cls, attr):
+ # nicer error message when someone tries to delete an attribute
+ # (see issue19025).
+ if attr in cls._member_map_:
+ raise AttributeError(
+ "%s: cannot delete Enum member." % cls.__name__)
+ super(EnumMeta, cls).__delattr__(attr)
+
+ def __dir__(self):
+ return (['__class__', '__doc__', '__members__', '__module__'] +
+ self._member_names_)
+
+ @property
+ def __members__(cls):
+ """Returns a mapping of member name->value.
+
+ This mapping lists all enum members, including aliases. Note that this
+ is a copy of the internal mapping.
+
+ """
+ return cls._member_map_.copy()
+
+ def __getattr__(cls, name):
+ """Return the enum member matching `name`
+
+ We use __getattr__ instead of descriptors or inserting into the enum
+ class' __dict__ in order to support `name` and `value` being both
+ properties for enum members (which live in the class' __dict__) and
+ enum members themselves.
+
+ """
+ if _is_dunder(name):
+ raise AttributeError(name)
+ try:
+ return cls._member_map_[name]
+ except KeyError:
+ raise AttributeError(name)
+
+ def __getitem__(cls, name):
+ return cls._member_map_[name]
+
+ def __iter__(cls):
+ return (cls._member_map_[name] for name in cls._member_names_)
+
+ def __reversed__(cls):
+ return (cls._member_map_[name] for name in reversed(cls._member_names_))
+
+ def __len__(cls):
+ return len(cls._member_names_)
+
+ __nonzero__ = __bool__
+
+ def __repr__(cls):
+ return "<enum %r>" % cls.__name__
+
+ def __setattr__(cls, name, value):
+ """Block attempts to reassign Enum members.
+
+ A simple assignment to the class namespace only changes one of the
+ several possible ways to get an Enum member from the Enum class,
+ resulting in an inconsistent Enumeration.
+
+ """
+ member_map = cls.__dict__.get('_member_map_', {})
+ if name in member_map:
+ raise AttributeError('Cannot reassign members.')
+ super(EnumMeta, cls).__setattr__(name, value)
+
+ def _create_(cls, class_name, names=None, module=None, type=None, start=1, allow_synonyms=True):
+ """Convenience method to create a new Enum class.
+
+ `names` can be:
+
+ * A string containing member names, separated either with spaces or
+ commas. Values are auto-numbered from 1.
+ * An iterable of member names. Values are auto-numbered from 1.
+ * An iterable of (member name, value) pairs.
+ * A mapping of member name -> value.
+
+ """
+ if pyver < 3.0:
+ # if class_name is unicode, attempt a conversion to ASCII
+ if isinstance(class_name, unicode):
+ try:
+ class_name = class_name.encode('ascii')
+ except UnicodeEncodeError:
+ raise TypeError('%r is not representable in ASCII' % class_name)
+ metacls = cls.__class__
+ if type is None:
+ bases = (cls, )
+ else:
+ bases = (type, cls)
+ classdict = metacls.__prepare__(class_name, bases)
+ _order_ = []
+
+ # special processing needed for names?
+ if isinstance(names, basestring):
+ names = names.replace(',', ' ').split()
+ if isinstance(names, (tuple, list)) and isinstance(names[0], basestring):
+ names = [(e, i+start) for (i, e) in enumerate(names)]
+
+ # Here, names is either an iterable of (name, value) or a mapping.
+ item = None # in case names is empty
+ for item in names:
+ if isinstance(item, basestring):
+ member_name, member_value = item, names[item]
+ else:
+ member_name, member_value = item
+ classdict[member_name] = member_value
+ _order_.append(member_name)
+ # only set _order_ in classdict if name/value was not from a mapping
+ if not isinstance(item, basestring):
+ classdict['_order_'] = _order_
+ classdict[ALLOW_SYNONYMS] = getattr(cls, ALLOW_SYNONYMS, allow_synonyms)
+ enum_class = metacls.__new__(metacls, class_name, bases, classdict)
+
+ # TODO: replace the frame hack if a blessed way to know the calling
+ # module is ever developed
+ if module is None:
+ try:
+ module = _sys._getframe(2).f_globals['__name__']
+ except (AttributeError, ValueError):
+ pass
+ if module is None:
+ _make_class_unpicklable(enum_class)
+ else:
+ enum_class.__module__ = module
+
+ return enum_class
+
+ @staticmethod
+ def _get_mixins_(bases):
+ """Returns the type for creating enum members, and the first inherited
+ enum class.
+
+ bases: the tuple of bases that was given to __new__
+
+ """
+ if not bases or Enum is None:
+ return object, Enum
+
+
+ # double check that we are not subclassing a class with existing
+ # enumeration members; while we're at it, see if any other data
+ # type has been mixed in so we can use the correct __new__
+ member_type = first_enum = None
+ for base in bases:
+ if (base is not Enum and
+ issubclass(base, Enum) and
+ base._member_names_):
+ raise TypeError("Cannot extend enumerations")
+ # base is now the last base in bases
+ if not issubclass(base, Enum):
+ raise TypeError("new enumerations must be created as "
+ "`ClassName([mixin_type,] enum_type)`")
+
+ # get correct mix-in type (either mix-in type of Enum subclass, or
+ # first base if last base is Enum)
+ if not issubclass(bases[0], Enum):
+ member_type = bases[0] # first data type
+ first_enum = bases[-1] # enum type
+ else:
+ for base in bases[0].__mro__:
+ # most common: (IntEnum, int, Enum, object)
+ # possible: (<Enum 'AutoIntEnum'>, <Enum 'IntEnum'>,
+ # <class 'int'>, <Enum 'Enum'>,
+ # <class 'object'>)
+ if issubclass(base, Enum):
+ if first_enum is None:
+ first_enum = base
+ else:
+ if member_type is None:
+ member_type = base
+
+ return member_type, first_enum
+
+ if pyver < 3.0:
+ @staticmethod
+ def _find_new_(classdict, member_type, first_enum):
+ """Returns the __new__ to be used for creating the enum members.
+
+ classdict: the class dictionary given to __new__
+ member_type: the data type whose __new__ will be used by default
+ first_enum: enumeration to check for an overriding __new__
+
+ """
+ # now find the correct __new__, checking to see of one was defined
+ # by the user; also check earlier enum classes in case a __new__ was
+ # saved as __member_new__
+ __new__ = classdict.get('__new__', None)
+ if __new__:
+ return None, True, True # __new__, save_new, use_args
+
+ N__new__ = getattr(None, '__new__')
+ O__new__ = getattr(object, '__new__')
+ if Enum is None:
+ E__new__ = N__new__
+ else:
+ E__new__ = Enum.__dict__['__new__']
+ # check all possibles for __member_new__ before falling back to
+ # __new__
+ for method in ('__member_new__', '__new__'):
+ for possible in (member_type, first_enum):
+ try:
+ target = possible.__dict__[method]
+ except (AttributeError, KeyError):
+ target = getattr(possible, method, None)
+ if target not in [
+ None,
+ N__new__,
+ O__new__,
+ E__new__,
+ ]:
+ if method == '__member_new__':
+ classdict['__new__'] = target
+ return None, False, True
+ if isinstance(target, staticmethod):
+ target = target.__get__(member_type)
+ __new__ = target
+ break
+ if __new__ is not None:
+ break
+ else:
+ __new__ = object.__new__
+
+ # if a non-object.__new__ is used then whatever value/tuple was
+ # assigned to the enum member name will be passed to __new__ and to the
+ # new enum member's __init__
+ if __new__ is object.__new__:
+ use_args = False
+ else:
+ use_args = True
+
+ return __new__, False, use_args
+ else:
+ @staticmethod
+ def _find_new_(classdict, member_type, first_enum):
+ """Returns the __new__ to be used for creating the enum members.
+
+ classdict: the class dictionary given to __new__
+ member_type: the data type whose __new__ will be used by default
+ first_enum: enumeration to check for an overriding __new__
+
+ """
+ # now find the correct __new__, checking to see of one was defined
+ # by the user; also check earlier enum classes in case a __new__ was
+ # saved as __member_new__
+ __new__ = classdict.get('__new__', None)
+
+ # should __new__ be saved as __member_new__ later?
+ save_new = __new__ is not None
+
+ if __new__ is None:
+ # check all possibles for __member_new__ before falling back to
+ # __new__
+ for method in ('__member_new__', '__new__'):
+ for possible in (member_type, first_enum):
+ target = getattr(possible, method, None)
+ if target not in (
+ None,
+ None.__new__,
+ object.__new__,
+ Enum.__new__,
+ ):
+ __new__ = target
+ break
+ if __new__ is not None:
+ break
+ else:
+ __new__ = object.__new__
+
+ # if a non-object.__new__ is used then whatever value/tuple was
+ # assigned to the enum member name will be passed to __new__ and to the
+ # new enum member's __init__
+ if __new__ is object.__new__:
+ use_args = False
+ else:
+ use_args = True
+
+ return __new__, save_new, use_args
+
+
+########################################################
+# In order to support Python 2 and 3 with a single
+# codebase we have to create the Enum methods separately
+# and then use the `type(name, bases, dict)` method to
+# create the class.
+########################################################
+temp_enum_dict = {}
+temp_enum_dict['__doc__'] = "Generic enumeration.\n\n Derive from this class to define new enumerations.\n\n"
+
+def __new__(cls, value):
+ # all enum instances are actually created during class construction
+ # without calling this method; this method is called by the metaclass'
+ # __call__ (i.e. Color(3) ), and by pickle
+ if type(value) is cls:
+ # For lookups like Color(Color.red)
+ value = value.value
+ #return value
+ # by-value search for a matching enum member
+ # see if it's in the reverse mapping (for hashable values)
+ try:
+ if value in cls._value2member_map_:
+ return cls._value2member_map_[value]
+ except TypeError:
+ # not there, now do long search -- O(n) behavior
+ for member in cls._member_map_.values():
+ if member.value == value:
+ return member
+ raise ValueError("%s is not a valid %s" % (value, cls.__name__))
+temp_enum_dict['__new__'] = __new__
+del __new__
+
+def __repr__(self):
+ return "<%s.%s: %r>" % (
+ self.__class__.__name__, self._name_, self._value_)
+temp_enum_dict['__repr__'] = __repr__
+del __repr__
+
+def __str__(self):
+ return "%s.%s" % (self.__class__.__name__, self._name_)
+temp_enum_dict['__str__'] = __str__
+del __str__
+
+if pyver >= 3.0:
+ def __dir__(self):
+ added_behavior = [
+ m
+ for cls in self.__class__.mro()
+ for m in cls.__dict__
+ if m[0] != '_' and m not in self._member_map_
+ ]
+ return (['__class__', '__doc__', '__module__', ] + added_behavior)
+ temp_enum_dict['__dir__'] = __dir__
+ del __dir__
+
+def __format__(self, format_spec):
+ # mixed-in Enums should use the mixed-in type's __format__, otherwise
+ # we can get strange results with the Enum name showing up instead of
+ # the value
+
+ # pure Enum branch
+ if self._member_type_ is object:
+ cls = str
+ val = str(self)
+ # mix-in branch
+ else:
+ cls = self._member_type_
+ val = self.value
+ return cls.__format__(val, format_spec)
+temp_enum_dict['__format__'] = __format__
+del __format__
+
+
+####################################
+# Python's less than 2.6 use __cmp__
+
+if pyver < 2.6:
+
+ def __cmp__(self, other):
+ if type(other) is self.__class__:
+ if self is other:
+ return 0
+ return -1
+ return NotImplemented
+ raise TypeError("unorderable types: %s() and %s()" % (self.__class__.__name__, other.__class__.__name__))
+ temp_enum_dict['__cmp__'] = __cmp__
+ del __cmp__
+
+else:
+
+ def __le__(self, other):
+ raise TypeError("unorderable types: %s() <= %s()" % (self.__class__.__name__, other.__class__.__name__))
+ temp_enum_dict['__le__'] = __le__
+ del __le__
+
+ def __lt__(self, other):
+ raise TypeError("unorderable types: %s() < %s()" % (self.__class__.__name__, other.__class__.__name__))
+ temp_enum_dict['__lt__'] = __lt__
+ del __lt__
+
+ def __ge__(self, other):
+ raise TypeError("unorderable types: %s() >= %s()" % (self.__class__.__name__, other.__class__.__name__))
+ temp_enum_dict['__ge__'] = __ge__
+ del __ge__
+
+ def __gt__(self, other):
+ raise TypeError("unorderable types: %s() > %s()" % (self.__class__.__name__, other.__class__.__name__))
+ temp_enum_dict['__gt__'] = __gt__
+ del __gt__
+
+
+def __eq__(self, other):
+ if type(other) is self.__class__:
+ return self is other
+ return NotImplemented
+temp_enum_dict['__eq__'] = __eq__
+del __eq__
+
+def __ne__(self, other):
+ if type(other) is self.__class__:
+ return self is not other
+ return NotImplemented
+temp_enum_dict['__ne__'] = __ne__
+del __ne__
+
+def __hash__(self):
+ return hash(self._name_)
+temp_enum_dict['__hash__'] = __hash__
+del __hash__
+
+def __reduce_ex__(self, proto):
+ return self.__class__, (self._value_, )
+temp_enum_dict['__reduce_ex__'] = __reduce_ex__
+del __reduce_ex__
+
+# _RouteClassAttributeToGetattr is used to provide access to the `name`
+# and `value` properties of enum members while keeping some measure of
+# protection from modification, while still allowing for an enumeration
+# to have members named `name` and `value`. This works because enumeration
+# members are not set directly on the enum class -- __getattr__ is
+# used to look them up.
+
+@_RouteClassAttributeToGetattr
+def name(self):
+ return self._name_
+temp_enum_dict['name'] = name
+del name
+
+@_RouteClassAttributeToGetattr
+def value(self):
+ return self._value_
+temp_enum_dict['value'] = value
+del value
+
+@classmethod
+def _convert(cls, name, module, filter, source=None):
+ """
+ Create a new Enum subclass that replaces a collection of global constants
+ """
+ # convert all constants from source (or module) that pass filter() to
+ # a new Enum called name, and export the enum and its members back to
+ # module;
+ # also, replace the __reduce_ex__ method so unpickling works in
+ # previous Python versions
+ module_globals = vars(_sys.modules[module])
+ if source:
+ source = vars(source)
+ else:
+ source = module_globals
+ members = dict((name, value) for name, value in source.items() if filter(name))
+ cls = cls(name, members, module=module)
+ cls.__reduce_ex__ = _reduce_ex_by_name
+ module_globals.update(cls.__members__)
+ module_globals[name] = cls
+ return cls
+temp_enum_dict['_convert'] = _convert
+del _convert
+
+Enum = EnumMeta('Enum', (object, ), temp_enum_dict)
+del temp_enum_dict
+
+# Enum has now been created
+###########################
+
+class IntEnum(int, Enum):
+ """Enum where members are also (and must be) ints"""
+
+def _reduce_ex_by_name(self, proto):
+ return self.name
+
+def unique(enumeration):
+ """Class decorator that ensures only unique members exist in an enumeration."""
+ duplicates = []
+ for name, member in enumeration.__members__.items():
+ if name != member.name:
+ duplicates.append((name, member.name))
+ if duplicates:
+ duplicate_names = ', '.join(
+ ["%s -> %s" % (alias, name) for (alias, name) in duplicates]
+ )
+ raise ValueError('duplicate names found in %r: %s' %
+ (enumeration, duplicate_names)
+ )
+ return enumeration
diff --git a/contrib/deprecated/python/enum34/enum/test.py b/contrib/deprecated/python/enum34/enum/test.py
new file mode 100644
index 0000000000..380bf3c4ca
--- /dev/null
+++ b/contrib/deprecated/python/enum34/enum/test.py
@@ -0,0 +1,1867 @@
+from pickle import dumps, loads, PicklingError, HIGHEST_PROTOCOL
+import sys
+import unittest
+pyver = float('%s.%s' % sys.version_info[:2])
+if pyver < 2.5:
+ sys.path.insert(0, '.')
+import enum
+from enum import Enum, IntEnum, unique, EnumMeta
+
+if pyver < 2.6:
+ from __builtin__ import enumerate as bltin_enumerate
+ def enumerate(thing, start=0):
+ result = []
+ for i, item in bltin_enumerate(thing):
+ i = i + start
+ result.append((i, item))
+ return result
+
+try:
+ any
+except NameError:
+ def any(iterable):
+ for element in iterable:
+ if element:
+ return True
+ return False
+
+try:
+ unicode
+except NameError:
+ unicode = str
+
+try:
+ from collections import OrderedDict
+except ImportError:
+ OrderedDict = None
+
+# for pickle tests
+try:
+ class Stooges(Enum):
+ LARRY = 1
+ CURLY = 2
+ MOE = 3
+except Exception:
+ Stooges = sys.exc_info()[1]
+
+try:
+ class IntStooges(int, Enum):
+ LARRY = 1
+ CURLY = 2
+ MOE = 3
+except Exception:
+ IntStooges = sys.exc_info()[1]
+
+try:
+ class FloatStooges(float, Enum):
+ LARRY = 1.39
+ CURLY = 2.72
+ MOE = 3.142596
+except Exception:
+ FloatStooges = sys.exc_info()[1]
+
+# for pickle test and subclass tests
+try:
+ class StrEnum(str, Enum):
+ 'accepts only string values'
+ class Name(StrEnum):
+ BDFL = 'Guido van Rossum'
+ FLUFL = 'Barry Warsaw'
+except Exception:
+ Name = sys.exc_info()[1]
+
+try:
+ Question = Enum('Question', 'who what when where why', module=__name__)
+except Exception:
+ Question = sys.exc_info()[1]
+
+try:
+ Answer = Enum('Answer', 'him this then there because')
+except Exception:
+ Answer = sys.exc_info()[1]
+
+try:
+ Theory = Enum('Theory', 'rule law supposition', qualname='spanish_inquisition')
+except Exception:
+ Theory = sys.exc_info()[1]
+
+# for doctests
+try:
+ class Fruit(Enum):
+ tomato = 1
+ banana = 2
+ cherry = 3
+except Exception:
+ pass
+
+import pytest
+
+def check_pickle_dump_load(assertion, source, target=None,
+ protocol=(0, HIGHEST_PROTOCOL)):
+ start, stop = protocol
+ failures = []
+ for protocol in range(start, stop+1):
+ try:
+ if target is None:
+ assertion(loads(dumps(source, protocol=protocol)) is source)
+ else:
+ assertion(loads(dumps(source, protocol=protocol)), target)
+ except Exception:
+ exc, tb = sys.exc_info()[1:]
+ failures.append('%2d: %s' %(protocol, exc))
+ if failures:
+ raise ValueError('Failed with protocols: %s' % ', '.join(failures))
+
+def check_pickle_exception(assertion, exception, obj,
+ protocol=(0, HIGHEST_PROTOCOL)):
+ start, stop = protocol
+ failures = []
+ for protocol in range(start, stop+1):
+ try:
+ assertion(exception, dumps, obj, protocol=protocol)
+ except Exception:
+ exc = sys.exc_info()[1]
+ failures.append('%d: %s %s' % (protocol, exc.__class__.__name__, exc))
+ if failures:
+ raise ValueError('Failed with protocols: %s' % ', '.join(failures))
+
+
+class TestHelpers(unittest.TestCase):
+ # _is_descriptor, _is_sunder, _is_dunder
+
+ def test_is_descriptor(self):
+ class foo:
+ pass
+ for attr in ('__get__','__set__','__delete__'):
+ obj = foo()
+ self.assertFalse(enum._is_descriptor(obj))
+ setattr(obj, attr, 1)
+ self.assertTrue(enum._is_descriptor(obj))
+
+ def test_is_sunder(self):
+ for s in ('_a_', '_aa_'):
+ self.assertTrue(enum._is_sunder(s))
+
+ for s in ('a', 'a_', '_a', '__a', 'a__', '__a__', '_a__', '__a_', '_',
+ '__', '___', '____', '_____',):
+ self.assertFalse(enum._is_sunder(s))
+
+ def test_is_dunder(self):
+ for s in ('__a__', '__aa__'):
+ self.assertTrue(enum._is_dunder(s))
+ for s in ('a', 'a_', '_a', '__a', 'a__', '_a_', '_a__', '__a_', '_',
+ '__', '___', '____', '_____',):
+ self.assertFalse(enum._is_dunder(s))
+
+
+class TestEnum(unittest.TestCase):
+ def setUp(self):
+ class Season(Enum):
+ SPRING = 1
+ SUMMER = 2
+ AUTUMN = 3
+ WINTER = 4
+ self.Season = Season
+
+ class Konstants(float, Enum):
+ E = 2.7182818
+ PI = 3.1415926
+ TAU = 2 * PI
+ self.Konstants = Konstants
+
+ class Grades(IntEnum):
+ A = 5
+ B = 4
+ C = 3
+ D = 2
+ F = 0
+ self.Grades = Grades
+
+ class Directional(str, Enum):
+ EAST = 'east'
+ WEST = 'west'
+ NORTH = 'north'
+ SOUTH = 'south'
+ self.Directional = Directional
+
+ from datetime import date
+ class Holiday(date, Enum):
+ NEW_YEAR = 2013, 1, 1
+ IDES_OF_MARCH = 2013, 3, 15
+ self.Holiday = Holiday
+
+ if pyver >= 3.0: # do not specify custom `dir` on previous versions
+ def test_dir_on_class(self):
+ Season = self.Season
+ self.assertEqual(
+ set(dir(Season)),
+ set(['__class__', '__doc__', '__members__', '__module__',
+ 'SPRING', 'SUMMER', 'AUTUMN', 'WINTER']),
+ )
+
+ def test_dir_on_item(self):
+ Season = self.Season
+ self.assertEqual(
+ set(dir(Season.WINTER)),
+ set(['__class__', '__doc__', '__module__', 'name', 'value']),
+ )
+
+ def test_dir_with_added_behavior(self):
+ class Test(Enum):
+ this = 'that'
+ these = 'those'
+ def wowser(self):
+ return ("Wowser! I'm %s!" % self.name)
+ self.assertEqual(
+ set(dir(Test)),
+ set(['__class__', '__doc__', '__members__', '__module__', 'this', 'these']),
+ )
+ self.assertEqual(
+ set(dir(Test.this)),
+ set(['__class__', '__doc__', '__module__', 'name', 'value', 'wowser']),
+ )
+
+ def test_dir_on_sub_with_behavior_on_super(self):
+ # see issue22506
+ class SuperEnum(Enum):
+ def invisible(self):
+ return "did you see me?"
+ class SubEnum(SuperEnum):
+ sample = 5
+ self.assertEqual(
+ set(dir(SubEnum.sample)),
+ set(['__class__', '__doc__', '__module__', 'name', 'value', 'invisible']),
+ )
+
+ if pyver >= 2.7: # OrderedDict first available here
+ def test_members_is_ordereddict_if_ordered(self):
+ class Ordered(Enum):
+ __order__ = 'first second third'
+ first = 'bippity'
+ second = 'boppity'
+ third = 'boo'
+ self.assertTrue(type(Ordered.__members__) is OrderedDict)
+
+ def test_members_is_ordereddict_if_not_ordered(self):
+ class Unordered(Enum):
+ this = 'that'
+ these = 'those'
+ self.assertTrue(type(Unordered.__members__) is OrderedDict)
+
+ if pyver >= 3.0: # all objects are ordered in Python 2.x
+ def test_members_is_always_ordered(self):
+ class AlwaysOrdered(Enum):
+ first = 1
+ second = 2
+ third = 3
+ self.assertTrue(type(AlwaysOrdered.__members__) is OrderedDict)
+
+ def test_comparisons(self):
+ def bad_compare():
+ Season.SPRING > 4
+ Season = self.Season
+ self.assertNotEqual(Season.SPRING, 1)
+ self.assertRaises(TypeError, bad_compare)
+
+ class Part(Enum):
+ SPRING = 1
+ CLIP = 2
+ BARREL = 3
+
+ self.assertNotEqual(Season.SPRING, Part.SPRING)
+ def bad_compare():
+ Season.SPRING < Part.CLIP
+ self.assertRaises(TypeError, bad_compare)
+
+ def test_enum_in_enum_out(self):
+ Season = self.Season
+ self.assertTrue(Season(Season.WINTER) is Season.WINTER)
+
+ def test_enum_value(self):
+ Season = self.Season
+ self.assertEqual(Season.SPRING.value, 1)
+
+ def test_intenum_value(self):
+ self.assertEqual(IntStooges.CURLY.value, 2)
+
+ def test_enum(self):
+ Season = self.Season
+ lst = list(Season)
+ self.assertEqual(len(lst), len(Season))
+ self.assertEqual(len(Season), 4, Season)
+ self.assertEqual(
+ [Season.SPRING, Season.SUMMER, Season.AUTUMN, Season.WINTER], lst)
+
+ for i, season in enumerate('SPRING SUMMER AUTUMN WINTER'.split()):
+ i += 1
+ e = Season(i)
+ self.assertEqual(e, getattr(Season, season))
+ self.assertEqual(e.value, i)
+ self.assertNotEqual(e, i)
+ self.assertEqual(e.name, season)
+ self.assertTrue(e in Season)
+ self.assertTrue(type(e) is Season)
+ self.assertTrue(isinstance(e, Season))
+ self.assertEqual(str(e), 'Season.' + season)
+ self.assertEqual(
+ repr(e),
+ '<Season.%s: %s>' % (season, i),
+ )
+
+ def test_value_name(self):
+ Season = self.Season
+ self.assertEqual(Season.SPRING.name, 'SPRING')
+ self.assertEqual(Season.SPRING.value, 1)
+ def set_name(obj, new_value):
+ obj.name = new_value
+ def set_value(obj, new_value):
+ obj.value = new_value
+ self.assertRaises(AttributeError, set_name, Season.SPRING, 'invierno', )
+ self.assertRaises(AttributeError, set_value, Season.SPRING, 2)
+
+ def test_attribute_deletion(self):
+ class Season(Enum):
+ SPRING = 1
+ SUMMER = 2
+ AUTUMN = 3
+ WINTER = 4
+
+ def spam(cls):
+ pass
+
+ self.assertTrue(hasattr(Season, 'spam'))
+ del Season.spam
+ self.assertFalse(hasattr(Season, 'spam'))
+
+ self.assertRaises(AttributeError, delattr, Season, 'SPRING')
+ self.assertRaises(AttributeError, delattr, Season, 'DRY')
+ self.assertRaises(AttributeError, delattr, Season.SPRING, 'name')
+
+ def test_bool_of_class(self):
+ class Empty(Enum):
+ pass
+ self.assertTrue(bool(Empty))
+
+ def test_bool_of_member(self):
+ class Count(Enum):
+ zero = 0
+ one = 1
+ two = 2
+ for member in Count:
+ self.assertTrue(bool(member))
+
+ def test_invalid_names(self):
+ def create_bad_class_1():
+ class Wrong(Enum):
+ mro = 9
+ def create_bad_class_2():
+ class Wrong(Enum):
+ _reserved_ = 3
+ self.assertRaises(ValueError, create_bad_class_1)
+ self.assertRaises(ValueError, create_bad_class_2)
+
+ def test_contains(self):
+ Season = self.Season
+ self.assertTrue(Season.AUTUMN in Season)
+ self.assertTrue(3 not in Season)
+
+ val = Season(3)
+ self.assertTrue(val in Season)
+
+ class OtherEnum(Enum):
+ one = 1; two = 2
+ self.assertTrue(OtherEnum.two not in Season)
+
+ if pyver >= 2.6: # when `format` came into being
+
+ def test_format_enum(self):
+ Season = self.Season
+ self.assertEqual('{0}'.format(Season.SPRING),
+ '{0}'.format(str(Season.SPRING)))
+ self.assertEqual( '{0:}'.format(Season.SPRING),
+ '{0:}'.format(str(Season.SPRING)))
+ self.assertEqual('{0:20}'.format(Season.SPRING),
+ '{0:20}'.format(str(Season.SPRING)))
+ self.assertEqual('{0:^20}'.format(Season.SPRING),
+ '{0:^20}'.format(str(Season.SPRING)))
+ self.assertEqual('{0:>20}'.format(Season.SPRING),
+ '{0:>20}'.format(str(Season.SPRING)))
+ self.assertEqual('{0:<20}'.format(Season.SPRING),
+ '{0:<20}'.format(str(Season.SPRING)))
+
+ def test_format_enum_custom(self):
+ class TestFloat(float, Enum):
+ one = 1.0
+ two = 2.0
+ def __format__(self, spec):
+ return 'TestFloat success!'
+ self.assertEqual('{0}'.format(TestFloat.one), 'TestFloat success!')
+
+ def assertFormatIsValue(self, spec, member):
+ self.assertEqual(spec.format(member), spec.format(member.value))
+
+ def test_format_enum_date(self):
+ Holiday = self.Holiday
+ self.assertFormatIsValue('{0}', Holiday.IDES_OF_MARCH)
+ self.assertFormatIsValue('{0:}', Holiday.IDES_OF_MARCH)
+ self.assertFormatIsValue('{0:20}', Holiday.IDES_OF_MARCH)
+ self.assertFormatIsValue('{0:^20}', Holiday.IDES_OF_MARCH)
+ self.assertFormatIsValue('{0:>20}', Holiday.IDES_OF_MARCH)
+ self.assertFormatIsValue('{0:<20}', Holiday.IDES_OF_MARCH)
+ self.assertFormatIsValue('{0:%Y %m}', Holiday.IDES_OF_MARCH)
+ self.assertFormatIsValue('{0:%Y %m %M:00}', Holiday.IDES_OF_MARCH)
+
+ def test_format_enum_float(self):
+ Konstants = self.Konstants
+ self.assertFormatIsValue('{0}', Konstants.TAU)
+ self.assertFormatIsValue('{0:}', Konstants.TAU)
+ self.assertFormatIsValue('{0:20}', Konstants.TAU)
+ self.assertFormatIsValue('{0:^20}', Konstants.TAU)
+ self.assertFormatIsValue('{0:>20}', Konstants.TAU)
+ self.assertFormatIsValue('{0:<20}', Konstants.TAU)
+ self.assertFormatIsValue('{0:n}', Konstants.TAU)
+ self.assertFormatIsValue('{0:5.2}', Konstants.TAU)
+ self.assertFormatIsValue('{0:f}', Konstants.TAU)
+
+ def test_format_enum_int(self):
+ Grades = self.Grades
+ self.assertFormatIsValue('{0}', Grades.C)
+ self.assertFormatIsValue('{0:}', Grades.C)
+ self.assertFormatIsValue('{0:20}', Grades.C)
+ self.assertFormatIsValue('{0:^20}', Grades.C)
+ self.assertFormatIsValue('{0:>20}', Grades.C)
+ self.assertFormatIsValue('{0:<20}', Grades.C)
+ self.assertFormatIsValue('{0:+}', Grades.C)
+ self.assertFormatIsValue('{0:08X}', Grades.C)
+ self.assertFormatIsValue('{0:b}', Grades.C)
+
+ def test_format_enum_str(self):
+ Directional = self.Directional
+ self.assertFormatIsValue('{0}', Directional.WEST)
+ self.assertFormatIsValue('{0:}', Directional.WEST)
+ self.assertFormatIsValue('{0:20}', Directional.WEST)
+ self.assertFormatIsValue('{0:^20}', Directional.WEST)
+ self.assertFormatIsValue('{0:>20}', Directional.WEST)
+ self.assertFormatIsValue('{0:<20}', Directional.WEST)
+
+ def test_hash(self):
+ Season = self.Season
+ dates = {}
+ dates[Season.WINTER] = '1225'
+ dates[Season.SPRING] = '0315'
+ dates[Season.SUMMER] = '0704'
+ dates[Season.AUTUMN] = '1031'
+ self.assertEqual(dates[Season.AUTUMN], '1031')
+
+ def test_enum_duplicates(self):
+ class Season(Enum):
+ _order_ = "SPRING SUMMER AUTUMN WINTER"
+ SPRING = 1
+ SUMMER = 2
+ AUTUMN = FALL = 3
+ WINTER = 4
+ ANOTHER_SPRING = 1
+ lst = list(Season)
+ self.assertEqual(
+ lst,
+ [Season.SPRING, Season.SUMMER,
+ Season.AUTUMN, Season.WINTER,
+ ])
+ self.assertTrue(Season.FALL is Season.AUTUMN)
+ self.assertEqual(Season.FALL.value, 3)
+ self.assertEqual(Season.AUTUMN.value, 3)
+ self.assertTrue(Season(3) is Season.AUTUMN)
+ self.assertTrue(Season(1) is Season.SPRING)
+ self.assertEqual(Season.FALL.name, 'AUTUMN')
+ self.assertEqual(
+ set([k for k,v in Season.__members__.items() if v.name != k]),
+ set(['FALL', 'ANOTHER_SPRING']),
+ )
+
+ if pyver >= 3.0:
+ cls = vars()
+ result = {'Enum':Enum}
+ exec("""def test_duplicate_name(self):
+ with self.assertRaises(TypeError):
+ class Color(Enum):
+ red = 1
+ green = 2
+ blue = 3
+ red = 4
+
+ with self.assertRaises(TypeError):
+ class Color(Enum):
+ red = 1
+ green = 2
+ blue = 3
+ def red(self):
+ return 'red'
+
+ with self.assertRaises(TypeError):
+ class Color(Enum):
+ @property
+
+ def red(self):
+ return 'redder'
+ red = 1
+ green = 2
+ blue = 3""",
+ result)
+ cls['test_duplicate_name'] = result['test_duplicate_name']
+
+ def test_enum_with_value_name(self):
+ class Huh(Enum):
+ name = 1
+ value = 2
+ self.assertEqual(
+ list(Huh),
+ [Huh.name, Huh.value],
+ )
+ self.assertTrue(type(Huh.name) is Huh)
+ self.assertEqual(Huh.name.name, 'name')
+ self.assertEqual(Huh.name.value, 1)
+
+ def test_intenum_from_scratch(self):
+ class phy(int, Enum):
+ pi = 3
+ tau = 2 * pi
+ self.assertTrue(phy.pi < phy.tau)
+
+ def test_intenum_inherited(self):
+ class IntEnum(int, Enum):
+ pass
+ class phy(IntEnum):
+ pi = 3
+ tau = 2 * pi
+ self.assertTrue(phy.pi < phy.tau)
+
+ def test_floatenum_from_scratch(self):
+ class phy(float, Enum):
+ pi = 3.1415926
+ tau = 2 * pi
+ self.assertTrue(phy.pi < phy.tau)
+
+ def test_floatenum_inherited(self):
+ class FloatEnum(float, Enum):
+ pass
+ class phy(FloatEnum):
+ pi = 3.1415926
+ tau = 2 * pi
+ self.assertTrue(phy.pi < phy.tau)
+
+ def test_strenum_from_scratch(self):
+ class phy(str, Enum):
+ pi = 'Pi'
+ tau = 'Tau'
+ self.assertTrue(phy.pi < phy.tau)
+
+ def test_strenum_inherited(self):
+ class StrEnum(str, Enum):
+ pass
+ class phy(StrEnum):
+ pi = 'Pi'
+ tau = 'Tau'
+ self.assertTrue(phy.pi < phy.tau)
+
+ def test_intenum(self):
+ class WeekDay(IntEnum):
+ SUNDAY = 1
+ MONDAY = 2
+ TUESDAY = 3
+ WEDNESDAY = 4
+ THURSDAY = 5
+ FRIDAY = 6
+ SATURDAY = 7
+
+ self.assertEqual(['a', 'b', 'c'][WeekDay.MONDAY], 'c')
+ self.assertEqual([i for i in range(WeekDay.TUESDAY)], [0, 1, 2])
+
+ lst = list(WeekDay)
+ self.assertEqual(len(lst), len(WeekDay))
+ self.assertEqual(len(WeekDay), 7)
+ target = 'SUNDAY MONDAY TUESDAY WEDNESDAY THURSDAY FRIDAY SATURDAY'
+ target = target.split()
+ for i, weekday in enumerate(target):
+ i += 1
+ e = WeekDay(i)
+ self.assertEqual(e, i)
+ self.assertEqual(int(e), i)
+ self.assertEqual(e.name, weekday)
+ self.assertTrue(e in WeekDay)
+ self.assertEqual(lst.index(e)+1, i)
+ self.assertTrue(0 < e < 8)
+ self.assertTrue(type(e) is WeekDay)
+ self.assertTrue(isinstance(e, int))
+ self.assertTrue(isinstance(e, Enum))
+
+ def test_intenum_duplicates(self):
+ class WeekDay(IntEnum):
+ __order__ = 'SUNDAY MONDAY TUESDAY WEDNESDAY THURSDAY FRIDAY SATURDAY'
+ SUNDAY = 1
+ MONDAY = 2
+ TUESDAY = TEUSDAY = 3
+ WEDNESDAY = 4
+ THURSDAY = 5
+ FRIDAY = 6
+ SATURDAY = 7
+ self.assertTrue(WeekDay.TEUSDAY is WeekDay.TUESDAY)
+ self.assertEqual(WeekDay(3).name, 'TUESDAY')
+ self.assertEqual([k for k,v in WeekDay.__members__.items()
+ if v.name != k], ['TEUSDAY', ])
+
+ def test_pickle_enum(self):
+ if isinstance(Stooges, Exception):
+ raise Stooges
+ check_pickle_dump_load(self.assertTrue, Stooges.CURLY)
+ check_pickle_dump_load(self.assertTrue, Stooges)
+
+ def test_pickle_int(self):
+ if isinstance(IntStooges, Exception):
+ raise IntStooges
+ check_pickle_dump_load(self.assertTrue, IntStooges.CURLY)
+ check_pickle_dump_load(self.assertTrue, IntStooges)
+
+ def test_pickle_float(self):
+ if isinstance(FloatStooges, Exception):
+ raise FloatStooges
+ check_pickle_dump_load(self.assertTrue, FloatStooges.CURLY)
+ check_pickle_dump_load(self.assertTrue, FloatStooges)
+
+ def test_pickle_enum_function(self):
+ if isinstance(Answer, Exception):
+ raise Answer
+ check_pickle_dump_load(self.assertTrue, Answer.him)
+ check_pickle_dump_load(self.assertTrue, Answer)
+
+ def test_pickle_enum_function_with_module(self):
+ if isinstance(Question, Exception):
+ raise Question
+ check_pickle_dump_load(self.assertTrue, Question.who)
+ check_pickle_dump_load(self.assertTrue, Question)
+
+ if pyver == 3.4:
+ def test_class_nested_enum_and_pickle_protocol_four(self):
+ # would normally just have this directly in the class namespace
+ class NestedEnum(Enum):
+ twigs = 'common'
+ shiny = 'rare'
+
+ self.__class__.NestedEnum = NestedEnum
+ self.NestedEnum.__qualname__ = '%s.NestedEnum' % self.__class__.__name__
+ check_pickle_exception(
+ self.assertRaises, PicklingError, self.NestedEnum.twigs,
+ protocol=(0, 3))
+ check_pickle_dump_load(self.assertTrue, self.NestedEnum.twigs,
+ protocol=(4, HIGHEST_PROTOCOL))
+
+ elif pyver == 3.5:
+ def test_class_nested_enum_and_pickle_protocol_four(self):
+ # would normally just have this directly in the class namespace
+ class NestedEnum(Enum):
+ twigs = 'common'
+ shiny = 'rare'
+
+ self.__class__.NestedEnum = NestedEnum
+ self.NestedEnum.__qualname__ = '%s.NestedEnum' % self.__class__.__name__
+ check_pickle_dump_load(self.assertTrue, self.NestedEnum.twigs,
+ protocol=(0, HIGHEST_PROTOCOL))
+
+ def test_exploding_pickle(self):
+ BadPickle = Enum('BadPickle', 'dill sweet bread_n_butter')
+ enum._make_class_unpicklable(BadPickle)
+ globals()['BadPickle'] = BadPickle
+ check_pickle_exception(self.assertRaises, TypeError, BadPickle.dill)
+ check_pickle_exception(self.assertRaises, PicklingError, BadPickle)
+
+ def test_string_enum(self):
+ class SkillLevel(str, Enum):
+ master = 'what is the sound of one hand clapping?'
+ journeyman = 'why did the chicken cross the road?'
+ apprentice = 'knock, knock!'
+ self.assertEqual(SkillLevel.apprentice, 'knock, knock!')
+
+ def test_getattr_getitem(self):
+ class Period(Enum):
+ morning = 1
+ noon = 2
+ evening = 3
+ night = 4
+ self.assertTrue(Period(2) is Period.noon)
+ self.assertTrue(getattr(Period, 'night') is Period.night)
+ self.assertTrue(Period['morning'] is Period.morning)
+
+ def test_getattr_dunder(self):
+ Season = self.Season
+ self.assertTrue(getattr(Season, '__hash__'))
+
+ def test_iteration_order(self):
+ class Season(Enum):
+ _order_ = 'SUMMER WINTER AUTUMN SPRING'
+ SUMMER = 2
+ WINTER = 4
+ AUTUMN = 3
+ SPRING = 1
+ self.assertEqual(
+ list(Season),
+ [Season.SUMMER, Season.WINTER, Season.AUTUMN, Season.SPRING],
+ )
+
+ def test_iteration_order_reversed(self):
+ self.assertEqual(
+ list(reversed(self.Season)),
+ [self.Season.WINTER, self.Season.AUTUMN, self.Season.SUMMER,
+ self.Season.SPRING]
+ )
+
+ def test_iteration_order_with_unorderable_values(self):
+ class Complex(Enum):
+ a = complex(7, 9)
+ b = complex(3.14, 2)
+ c = complex(1, -1)
+ d = complex(-77, 32)
+ self.assertEqual(
+ list(Complex),
+ [Complex.a, Complex.b, Complex.c, Complex.d],
+ )
+
+ def test_programatic_function_string(self):
+ SummerMonth = Enum('SummerMonth', 'june july august')
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate('june july august'.split()):
+ i += 1
+ e = SummerMonth(i)
+ self.assertEqual(int(e.value), i)
+ self.assertNotEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programatic_function_string_with_start(self):
+ SummerMonth = Enum('SummerMonth', 'june july august', start=10)
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate('june july august'.split(), 10):
+ e = SummerMonth(i)
+ self.assertEqual(int(e.value), i)
+ self.assertNotEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programatic_function_string_list(self):
+ SummerMonth = Enum('SummerMonth', ['june', 'july', 'august'])
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate('june july august'.split()):
+ i += 1
+ e = SummerMonth(i)
+ self.assertEqual(int(e.value), i)
+ self.assertNotEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programatic_function_string_list_with_start(self):
+ SummerMonth = Enum('SummerMonth', ['june', 'july', 'august'], start=20)
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate('june july august'.split(), 20):
+ e = SummerMonth(i)
+ self.assertEqual(int(e.value), i)
+ self.assertNotEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programatic_function_iterable(self):
+ SummerMonth = Enum(
+ 'SummerMonth',
+ (('june', 1), ('july', 2), ('august', 3))
+ )
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate('june july august'.split()):
+ i += 1
+ e = SummerMonth(i)
+ self.assertEqual(int(e.value), i)
+ self.assertNotEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programatic_function_iterable_with_weird_names(self):
+ SummerMonth = Enum(
+ 'SummerMonth',
+ (('june', 1), ('july', 2), ('august', 3), ('fabulous september', 4))
+ )
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 4, SummerMonth)
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august, SummerMonth['fabulous september']],
+ lst,
+ )
+ for i, month in enumerate('june july august'.split() + ['fabulous september']):
+ i += 1
+ e = SummerMonth(i)
+ self.assertEqual(int(e.value), i)
+ self.assertNotEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programatic_function_from_dict(self):
+ SummerMonth = Enum(
+ 'SummerMonth',
+ dict((('june', 1), ('july', 2), ('august', 3)))
+ )
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ if pyver < 3.0:
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate('june july august'.split()):
+ i += 1
+ e = SummerMonth(i)
+ self.assertEqual(int(e.value), i)
+ self.assertNotEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programatic_function_type(self):
+ SummerMonth = Enum('SummerMonth', 'june july august', type=int)
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate('june july august'.split()):
+ i += 1
+ e = SummerMonth(i)
+ self.assertEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programatic_function_type_with_start(self):
+ SummerMonth = Enum('SummerMonth', 'june july august', type=int, start=30)
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate('june july august'.split(), 30):
+ e = SummerMonth(i)
+ self.assertEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programatic_function_type_from_subclass(self):
+ SummerMonth = IntEnum('SummerMonth', 'june july august')
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate('june july august'.split()):
+ i += 1
+ e = SummerMonth(i)
+ self.assertEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programatic_function_type_from_subclass_with_start(self):
+ SummerMonth = IntEnum('SummerMonth', 'june july august', start=40)
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate('june july august'.split(), 40):
+ e = SummerMonth(i)
+ self.assertEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programatic_function_unicode(self):
+ SummerMonth = Enum('SummerMonth', unicode('june july august'))
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate(unicode('june july august').split()):
+ i += 1
+ e = SummerMonth(i)
+ self.assertEqual(int(e.value), i)
+ self.assertNotEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programatic_function_unicode_list(self):
+ SummerMonth = Enum('SummerMonth', [unicode('june'), unicode('july'), unicode('august')])
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate(unicode('june july august').split()):
+ i += 1
+ e = SummerMonth(i)
+ self.assertEqual(int(e.value), i)
+ self.assertNotEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programatic_function_unicode_iterable(self):
+ SummerMonth = Enum(
+ 'SummerMonth',
+ ((unicode('june'), 1), (unicode('july'), 2), (unicode('august'), 3))
+ )
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate(unicode('june july august').split()):
+ i += 1
+ e = SummerMonth(i)
+ self.assertEqual(int(e.value), i)
+ self.assertNotEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programatic_function_from_unicode_dict(self):
+ SummerMonth = Enum(
+ 'SummerMonth',
+ dict(((unicode('june'), 1), (unicode('july'), 2), (unicode('august'), 3)))
+ )
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ if pyver < 3.0:
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate(unicode('june july august').split()):
+ i += 1
+ e = SummerMonth(i)
+ self.assertEqual(int(e.value), i)
+ self.assertNotEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programatic_function_unicode_type(self):
+ SummerMonth = Enum('SummerMonth', unicode('june july august'), type=int)
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate(unicode('june july august').split()):
+ i += 1
+ e = SummerMonth(i)
+ self.assertEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programatic_function_unicode_type_from_subclass(self):
+ SummerMonth = IntEnum('SummerMonth', unicode('june july august'))
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate(unicode('june july august').split()):
+ i += 1
+ e = SummerMonth(i)
+ self.assertEqual(e, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_programmatic_function_unicode_class(self):
+ if pyver < 3.0:
+ class_names = unicode('SummerMonth'), 'S\xfcmm\xe9rM\xf6nth'.decode('latin1')
+ else:
+ class_names = 'SummerMonth', 'S\xfcmm\xe9rM\xf6nth'
+ for i, class_name in enumerate(class_names):
+ if pyver < 3.0 and i == 1:
+ self.assertRaises(TypeError, Enum, class_name, unicode('june july august'))
+ else:
+ SummerMonth = Enum(class_name, unicode('june july august'))
+ lst = list(SummerMonth)
+ self.assertEqual(len(lst), len(SummerMonth))
+ self.assertEqual(len(SummerMonth), 3, SummerMonth)
+ self.assertEqual(
+ [SummerMonth.june, SummerMonth.july, SummerMonth.august],
+ lst,
+ )
+ for i, month in enumerate(unicode('june july august').split()):
+ i += 1
+ e = SummerMonth(i)
+ self.assertEqual(e.value, i)
+ self.assertEqual(e.name, month)
+ self.assertTrue(e in SummerMonth)
+ self.assertTrue(type(e) is SummerMonth)
+
+ def test_subclassing(self):
+ if isinstance(Name, Exception):
+ raise Name
+ self.assertEqual(Name.BDFL, 'Guido van Rossum')
+ self.assertTrue(Name.BDFL, Name('Guido van Rossum'))
+ self.assertTrue(Name.BDFL is getattr(Name, 'BDFL'))
+ check_pickle_dump_load(self.assertTrue, Name.BDFL)
+
+ def test_extending(self):
+ def bad_extension():
+ class Color(Enum):
+ red = 1
+ green = 2
+ blue = 3
+ class MoreColor(Color):
+ cyan = 4
+ magenta = 5
+ yellow = 6
+ self.assertRaises(TypeError, bad_extension)
+
+ def test_exclude_methods(self):
+ class whatever(Enum):
+ this = 'that'
+ these = 'those'
+ def really(self):
+ return 'no, not %s' % self.value
+ self.assertFalse(type(whatever.really) is whatever)
+ self.assertEqual(whatever.this.really(), 'no, not that')
+
+ def test_wrong_inheritance_order(self):
+ def wrong_inherit():
+ class Wrong(Enum, str):
+ NotHere = 'error before this point'
+ self.assertRaises(TypeError, wrong_inherit)
+
+ def test_intenum_transitivity(self):
+ class number(IntEnum):
+ one = 1
+ two = 2
+ three = 3
+ class numero(IntEnum):
+ uno = 1
+ dos = 2
+ tres = 3
+ self.assertEqual(number.one, numero.uno)
+ self.assertEqual(number.two, numero.dos)
+ self.assertEqual(number.three, numero.tres)
+
+ def test_introspection(self):
+ class Number(IntEnum):
+ one = 100
+ two = 200
+ self.assertTrue(Number.one._member_type_ is int)
+ self.assertTrue(Number._member_type_ is int)
+ class String(str, Enum):
+ yarn = 'soft'
+ rope = 'rough'
+ wire = 'hard'
+ self.assertTrue(String.yarn._member_type_ is str)
+ self.assertTrue(String._member_type_ is str)
+ class Plain(Enum):
+ vanilla = 'white'
+ one = 1
+ self.assertTrue(Plain.vanilla._member_type_ is object)
+ self.assertTrue(Plain._member_type_ is object)
+
+ def test_wrong_enum_in_call(self):
+ class Monochrome(Enum):
+ black = 0
+ white = 1
+ class Gender(Enum):
+ male = 0
+ female = 1
+ self.assertRaises(ValueError, Monochrome, Gender.male)
+
+ def test_wrong_enum_in_mixed_call(self):
+ class Monochrome(IntEnum):
+ black = 0
+ white = 1
+ class Gender(Enum):
+ male = 0
+ female = 1
+ self.assertRaises(ValueError, Monochrome, Gender.male)
+
+ def test_mixed_enum_in_call_1(self):
+ class Monochrome(IntEnum):
+ black = 0
+ white = 1
+ class Gender(IntEnum):
+ male = 0
+ female = 1
+ self.assertTrue(Monochrome(Gender.female) is Monochrome.white)
+
+ def test_mixed_enum_in_call_2(self):
+ class Monochrome(Enum):
+ black = 0
+ white = 1
+ class Gender(IntEnum):
+ male = 0
+ female = 1
+ self.assertTrue(Monochrome(Gender.male) is Monochrome.black)
+
+ def test_flufl_enum(self):
+ class Fluflnum(Enum):
+ def __int__(self):
+ return int(self.value)
+ class MailManOptions(Fluflnum):
+ option1 = 1
+ option2 = 2
+ option3 = 3
+ self.assertEqual(int(MailManOptions.option1), 1)
+
+ def test_no_such_enum_member(self):
+ class Color(Enum):
+ red = 1
+ green = 2
+ blue = 3
+ self.assertRaises(ValueError, Color, 4)
+ self.assertRaises(KeyError, Color.__getitem__, 'chartreuse')
+
+ def test_new_repr(self):
+ class Color(Enum):
+ red = 1
+ green = 2
+ blue = 3
+ def __repr__(self):
+ return "don't you just love shades of %s?" % self.name
+ self.assertEqual(
+ repr(Color.blue),
+ "don't you just love shades of blue?",
+ )
+
+ def test_inherited_repr(self):
+ class MyEnum(Enum):
+ def __repr__(self):
+ return "My name is %s." % self.name
+ class MyIntEnum(int, MyEnum):
+ this = 1
+ that = 2
+ theother = 3
+ self.assertEqual(repr(MyIntEnum.that), "My name is that.")
+
+ def test_multiple_mixin_mro(self):
+ class auto_enum(EnumMeta):
+ def __new__(metacls, cls, bases, classdict):
+ original_dict = classdict
+ classdict = enum._EnumDict()
+ for k, v in original_dict.items():
+ classdict[k] = v
+ temp = type(classdict)()
+ names = set(classdict._member_names)
+ i = 0
+ for k in classdict._member_names:
+ v = classdict[k]
+ if v == ():
+ v = i
+ else:
+ i = v
+ i += 1
+ temp[k] = v
+ for k, v in classdict.items():
+ if k not in names:
+ temp[k] = v
+ return super(auto_enum, metacls).__new__(
+ metacls, cls, bases, temp)
+
+ AutoNumberedEnum = auto_enum('AutoNumberedEnum', (Enum,), {})
+
+ AutoIntEnum = auto_enum('AutoIntEnum', (IntEnum,), {})
+
+ class TestAutoNumber(AutoNumberedEnum):
+ a = ()
+ b = 3
+ c = ()
+
+ class TestAutoInt(AutoIntEnum):
+ a = ()
+ b = 3
+ c = ()
+
+ def test_subclasses_with_getnewargs(self):
+ class NamedInt(int):
+ __qualname__ = 'NamedInt' # needed for pickle protocol 4
+ def __new__(cls, *args):
+ _args = args
+ if len(args) < 1:
+ raise TypeError("name and value must be specified")
+ name, args = args[0], args[1:]
+ self = int.__new__(cls, *args)
+ self._intname = name
+ self._args = _args
+ return self
+ def __getnewargs__(self):
+ return self._args
+ @property
+ def __name__(self):
+ return self._intname
+ def __repr__(self):
+ # repr() is updated to include the name and type info
+ return "%s(%r, %s)" % (type(self).__name__,
+ self.__name__,
+ int.__repr__(self))
+ def __str__(self):
+ # str() is unchanged, even if it relies on the repr() fallback
+ base = int
+ base_str = base.__str__
+ if base_str.__objclass__ is object:
+ return base.__repr__(self)
+ return base_str(self)
+ # for simplicity, we only define one operator that
+ # propagates expressions
+ def __add__(self, other):
+ temp = int(self) + int( other)
+ if isinstance(self, NamedInt) and isinstance(other, NamedInt):
+ return NamedInt(
+ '(%s + %s)' % (self.__name__, other.__name__),
+ temp )
+ else:
+ return temp
+
+ class NEI(NamedInt, Enum):
+ __qualname__ = 'NEI' # needed for pickle protocol 4
+ x = ('the-x', 1)
+ y = ('the-y', 2)
+
+ self.assertTrue(NEI.__new__ is Enum.__new__)
+ self.assertEqual(repr(NEI.x + NEI.y), "NamedInt('(the-x + the-y)', 3)")
+ globals()['NamedInt'] = NamedInt
+ globals()['NEI'] = NEI
+ NI5 = NamedInt('test', 5)
+ self.assertEqual(NI5, 5)
+ check_pickle_dump_load(self.assertTrue, NI5, 5)
+ self.assertEqual(NEI.y.value, 2)
+ check_pickle_dump_load(self.assertTrue, NEI.y)
+
+ if pyver >= 3.4:
+ def test_subclasses_with_getnewargs_ex(self):
+ class NamedInt(int):
+ __qualname__ = 'NamedInt' # needed for pickle protocol 4
+ def __new__(cls, *args):
+ _args = args
+ if len(args) < 2:
+ raise TypeError("name and value must be specified")
+ name, args = args[0], args[1:]
+ self = int.__new__(cls, *args)
+ self._intname = name
+ self._args = _args
+ return self
+ def __getnewargs_ex__(self):
+ return self._args, {}
+ @property
+ def __name__(self):
+ return self._intname
+ def __repr__(self):
+ # repr() is updated to include the name and type info
+ return "{}({!r}, {})".format(type(self).__name__,
+ self.__name__,
+ int.__repr__(self))
+ def __str__(self):
+ # str() is unchanged, even if it relies on the repr() fallback
+ base = int
+ base_str = base.__str__
+ if base_str.__objclass__ is object:
+ return base.__repr__(self)
+ return base_str(self)
+ # for simplicity, we only define one operator that
+ # propagates expressions
+ def __add__(self, other):
+ temp = int(self) + int( other)
+ if isinstance(self, NamedInt) and isinstance(other, NamedInt):
+ return NamedInt(
+ '({0} + {1})'.format(self.__name__, other.__name__),
+ temp )
+ else:
+ return temp
+
+ class NEI(NamedInt, Enum):
+ __qualname__ = 'NEI' # needed for pickle protocol 4
+ x = ('the-x', 1)
+ y = ('the-y', 2)
+
+
+ self.assertIs(NEI.__new__, Enum.__new__)
+ self.assertEqual(repr(NEI.x + NEI.y), "NamedInt('(the-x + the-y)', 3)")
+ globals()['NamedInt'] = NamedInt
+ globals()['NEI'] = NEI
+ NI5 = NamedInt('test', 5)
+ self.assertEqual(NI5, 5)
+ check_pickle_dump_load(self.assertEqual, NI5, 5, protocol=(4, HIGHEST_PROTOCOL))
+ self.assertEqual(NEI.y.value, 2)
+ check_pickle_dump_load(self.assertTrue, NEI.y, protocol=(4, HIGHEST_PROTOCOL))
+
+ def test_subclasses_with_reduce(self):
+ class NamedInt(int):
+ __qualname__ = 'NamedInt' # needed for pickle protocol 4
+ def __new__(cls, *args):
+ _args = args
+ if len(args) < 1:
+ raise TypeError("name and value must be specified")
+ name, args = args[0], args[1:]
+ self = int.__new__(cls, *args)
+ self._intname = name
+ self._args = _args
+ return self
+ def __reduce__(self):
+ return self.__class__, self._args
+ @property
+ def __name__(self):
+ return self._intname
+ def __repr__(self):
+ # repr() is updated to include the name and type info
+ return "%s(%r, %s)" % (type(self).__name__,
+ self.__name__,
+ int.__repr__(self))
+ def __str__(self):
+ # str() is unchanged, even if it relies on the repr() fallback
+ base = int
+ base_str = base.__str__
+ if base_str.__objclass__ is object:
+ return base.__repr__(self)
+ return base_str(self)
+ # for simplicity, we only define one operator that
+ # propagates expressions
+ def __add__(self, other):
+ temp = int(self) + int( other)
+ if isinstance(self, NamedInt) and isinstance(other, NamedInt):
+ return NamedInt(
+ '(%s + %s)' % (self.__name__, other.__name__),
+ temp )
+ else:
+ return temp
+
+ class NEI(NamedInt, Enum):
+ __qualname__ = 'NEI' # needed for pickle protocol 4
+ x = ('the-x', 1)
+ y = ('the-y', 2)
+
+
+ self.assertTrue(NEI.__new__ is Enum.__new__)
+ self.assertEqual(repr(NEI.x + NEI.y), "NamedInt('(the-x + the-y)', 3)")
+ globals()['NamedInt'] = NamedInt
+ globals()['NEI'] = NEI
+ NI5 = NamedInt('test', 5)
+ self.assertEqual(NI5, 5)
+ check_pickle_dump_load(self.assertEqual, NI5, 5)
+ self.assertEqual(NEI.y.value, 2)
+ check_pickle_dump_load(self.assertTrue, NEI.y)
+
+ def test_subclasses_with_reduce_ex(self):
+ class NamedInt(int):
+ __qualname__ = 'NamedInt' # needed for pickle protocol 4
+ def __new__(cls, *args):
+ _args = args
+ if len(args) < 1:
+ raise TypeError("name and value must be specified")
+ name, args = args[0], args[1:]
+ self = int.__new__(cls, *args)
+ self._intname = name
+ self._args = _args
+ return self
+ def __reduce_ex__(self, proto):
+ return self.__class__, self._args
+ @property
+ def __name__(self):
+ return self._intname
+ def __repr__(self):
+ # repr() is updated to include the name and type info
+ return "%s(%r, %s)" % (type(self).__name__,
+ self.__name__,
+ int.__repr__(self))
+ def __str__(self):
+ # str() is unchanged, even if it relies on the repr() fallback
+ base = int
+ base_str = base.__str__
+ if base_str.__objclass__ is object:
+ return base.__repr__(self)
+ return base_str(self)
+ # for simplicity, we only define one operator that
+ # propagates expressions
+ def __add__(self, other):
+ temp = int(self) + int( other)
+ if isinstance(self, NamedInt) and isinstance(other, NamedInt):
+ return NamedInt(
+ '(%s + %s)' % (self.__name__, other.__name__),
+ temp )
+ else:
+ return temp
+
+ class NEI(NamedInt, Enum):
+ __qualname__ = 'NEI' # needed for pickle protocol 4
+ x = ('the-x', 1)
+ y = ('the-y', 2)
+
+
+ self.assertTrue(NEI.__new__ is Enum.__new__)
+ self.assertEqual(repr(NEI.x + NEI.y), "NamedInt('(the-x + the-y)', 3)")
+ globals()['NamedInt'] = NamedInt
+ globals()['NEI'] = NEI
+ NI5 = NamedInt('test', 5)
+ self.assertEqual(NI5, 5)
+ check_pickle_dump_load(self.assertEqual, NI5, 5)
+ self.assertEqual(NEI.y.value, 2)
+ check_pickle_dump_load(self.assertTrue, NEI.y)
+
+ def test_subclasses_without_direct_pickle_support(self):
+ class NamedInt(int):
+ __qualname__ = 'NamedInt'
+ def __new__(cls, *args):
+ _args = args
+ name, args = args[0], args[1:]
+ if len(args) == 0:
+ raise TypeError("name and value must be specified")
+ self = int.__new__(cls, *args)
+ self._intname = name
+ self._args = _args
+ return self
+ @property
+ def __name__(self):
+ return self._intname
+ def __repr__(self):
+ # repr() is updated to include the name and type info
+ return "%s(%r, %s)" % (type(self).__name__,
+ self.__name__,
+ int.__repr__(self))
+ def __str__(self):
+ # str() is unchanged, even if it relies on the repr() fallback
+ base = int
+ base_str = base.__str__
+ if base_str.__objclass__ is object:
+ return base.__repr__(self)
+ return base_str(self)
+ # for simplicity, we only define one operator that
+ # propagates expressions
+ def __add__(self, other):
+ temp = int(self) + int( other)
+ if isinstance(self, NamedInt) and isinstance(other, NamedInt):
+ return NamedInt(
+ '(%s + %s)' % (self.__name__, other.__name__),
+ temp )
+ else:
+ return temp
+
+ class NEI(NamedInt, Enum):
+ __qualname__ = 'NEI'
+ x = ('the-x', 1)
+ y = ('the-y', 2)
+
+ self.assertTrue(NEI.__new__ is Enum.__new__)
+ self.assertEqual(repr(NEI.x + NEI.y), "NamedInt('(the-x + the-y)', 3)")
+ globals()['NamedInt'] = NamedInt
+ globals()['NEI'] = NEI
+ NI5 = NamedInt('test', 5)
+ self.assertEqual(NI5, 5)
+ self.assertEqual(NEI.y.value, 2)
+ check_pickle_exception(self.assertRaises, TypeError, NEI.x)
+ check_pickle_exception(self.assertRaises, PicklingError, NEI)
+
+ def test_subclasses_without_direct_pickle_support_using_name(self):
+ class NamedInt(int):
+ __qualname__ = 'NamedInt'
+ def __new__(cls, *args):
+ _args = args
+ name, args = args[0], args[1:]
+ if len(args) == 0:
+ raise TypeError("name and value must be specified")
+ self = int.__new__(cls, *args)
+ self._intname = name
+ self._args = _args
+ return self
+ @property
+ def __name__(self):
+ return self._intname
+ def __repr__(self):
+ # repr() is updated to include the name and type info
+ return "%s(%r, %s)" % (type(self).__name__,
+ self.__name__,
+ int.__repr__(self))
+ def __str__(self):
+ # str() is unchanged, even if it relies on the repr() fallback
+ base = int
+ base_str = base.__str__
+ if base_str.__objclass__ is object:
+ return base.__repr__(self)
+ return base_str(self)
+ # for simplicity, we only define one operator that
+ # propagates expressions
+ def __add__(self, other):
+ temp = int(self) + int( other)
+ if isinstance(self, NamedInt) and isinstance(other, NamedInt):
+ return NamedInt(
+ '(%s + %s)' % (self.__name__, other.__name__),
+ temp )
+ else:
+ return temp
+
+ class NEI(NamedInt, Enum):
+ __qualname__ = 'NEI'
+ x = ('the-x', 1)
+ y = ('the-y', 2)
+ def __reduce_ex__(self, proto):
+ return getattr, (self.__class__, self._name_)
+
+ self.assertTrue(NEI.__new__ is Enum.__new__)
+ self.assertEqual(repr(NEI.x + NEI.y), "NamedInt('(the-x + the-y)', 3)")
+ globals()['NamedInt'] = NamedInt
+ globals()['NEI'] = NEI
+ NI5 = NamedInt('test', 5)
+ self.assertEqual(NI5, 5)
+ self.assertEqual(NEI.y.value, 2)
+ check_pickle_dump_load(self.assertTrue, NEI.y)
+ check_pickle_dump_load(self.assertTrue, NEI)
+
+ def test_tuple_subclass(self):
+ class SomeTuple(tuple, Enum):
+ __qualname__ = 'SomeTuple'
+ first = (1, 'for the money')
+ second = (2, 'for the show')
+ third = (3, 'for the music')
+ self.assertTrue(type(SomeTuple.first) is SomeTuple)
+ self.assertTrue(isinstance(SomeTuple.second, tuple))
+ self.assertEqual(SomeTuple.third, (3, 'for the music'))
+ globals()['SomeTuple'] = SomeTuple
+ check_pickle_dump_load(self.assertTrue, SomeTuple.first)
+
+ def test_duplicate_values_give_unique_enum_items(self):
+ class AutoNumber(Enum):
+ __order__ = 'enum_m enum_d enum_y'
+ enum_m = ()
+ enum_d = ()
+ enum_y = ()
+ def __new__(cls):
+ value = len(cls.__members__) + 1
+ obj = object.__new__(cls)
+ obj._value_ = value
+ return obj
+ def __int__(self):
+ return int(self._value_)
+ self.assertEqual(int(AutoNumber.enum_d), 2)
+ self.assertEqual(AutoNumber.enum_y.value, 3)
+ self.assertTrue(AutoNumber(1) is AutoNumber.enum_m)
+ self.assertEqual(
+ list(AutoNumber),
+ [AutoNumber.enum_m, AutoNumber.enum_d, AutoNumber.enum_y],
+ )
+
+ def test_inherited_new_from_enhanced_enum(self):
+ class AutoNumber2(Enum):
+ def __new__(cls):
+ value = len(cls.__members__) + 1
+ obj = object.__new__(cls)
+ obj._value_ = value
+ return obj
+ def __int__(self):
+ return int(self._value_)
+ class Color(AutoNumber2):
+ _order_ = 'red green blue'
+ red = ()
+ green = ()
+ blue = ()
+ self.assertEqual(len(Color), 3, "wrong number of elements: %d (should be %d)" % (len(Color), 3))
+ self.assertEqual(list(Color), [Color.red, Color.green, Color.blue])
+ if pyver >= 3.0:
+ self.assertEqual(list(map(int, Color)), [1, 2, 3])
+
+ def test_inherited_new_from_mixed_enum(self):
+ class AutoNumber3(IntEnum):
+ def __new__(cls):
+ value = len(cls.__members__) + 1
+ obj = int.__new__(cls, value)
+ obj._value_ = value
+ return obj
+ class Color(AutoNumber3):
+ red = ()
+ green = ()
+ blue = ()
+ self.assertEqual(len(Color), 3, "wrong number of elements: %d (should be %d)" % (len(Color), 3))
+ Color.red
+ Color.green
+ Color.blue
+
+ def test_equality(self):
+ class AlwaysEqual:
+ def __eq__(self, other):
+ return True
+ class OrdinaryEnum(Enum):
+ a = 1
+ self.assertEqual(AlwaysEqual(), OrdinaryEnum.a)
+ self.assertEqual(OrdinaryEnum.a, AlwaysEqual())
+
+ def test_ordered_mixin(self):
+ class OrderedEnum(Enum):
+ def __ge__(self, other):
+ if self.__class__ is other.__class__:
+ return self._value_ >= other._value_
+ return NotImplemented
+ def __gt__(self, other):
+ if self.__class__ is other.__class__:
+ return self._value_ > other._value_
+ return NotImplemented
+ def __le__(self, other):
+ if self.__class__ is other.__class__:
+ return self._value_ <= other._value_
+ return NotImplemented
+ def __lt__(self, other):
+ if self.__class__ is other.__class__:
+ return self._value_ < other._value_
+ return NotImplemented
+ class Grade(OrderedEnum):
+ __order__ = 'A B C D F'
+ A = 5
+ B = 4
+ C = 3
+ D = 2
+ F = 1
+ self.assertEqual(list(Grade), [Grade.A, Grade.B, Grade.C, Grade.D, Grade.F])
+ self.assertTrue(Grade.A > Grade.B)
+ self.assertTrue(Grade.F <= Grade.C)
+ self.assertTrue(Grade.D < Grade.A)
+ self.assertTrue(Grade.B >= Grade.B)
+
+ def test_extending2(self):
+ def bad_extension():
+ class Shade(Enum):
+ def shade(self):
+ print(self.name)
+ class Color(Shade):
+ red = 1
+ green = 2
+ blue = 3
+ class MoreColor(Color):
+ cyan = 4
+ magenta = 5
+ yellow = 6
+ self.assertRaises(TypeError, bad_extension)
+
+ def test_extending3(self):
+ class Shade(Enum):
+ def shade(self):
+ return self.name
+ class Color(Shade):
+ def hex(self):
+ return '%s hexlified!' % self.value
+ class MoreColor(Color):
+ cyan = 4
+ magenta = 5
+ yellow = 6
+ self.assertEqual(MoreColor.magenta.hex(), '5 hexlified!')
+
+ def test_no_duplicates(self):
+ def bad_duplicates():
+ class UniqueEnum(Enum):
+ def __init__(self, *args):
+ cls = self.__class__
+ if any(self.value == e.value for e in cls):
+ a = self.name
+ e = cls(self.value).name
+ raise ValueError(
+ "aliases not allowed in UniqueEnum: %r --> %r"
+ % (a, e)
+ )
+ class Color(UniqueEnum):
+ red = 1
+ green = 2
+ blue = 3
+ class Color(UniqueEnum):
+ red = 1
+ green = 2
+ blue = 3
+ grene = 2
+ self.assertRaises(ValueError, bad_duplicates)
+
+ def test_init(self):
+ class Planet(Enum):
+ MERCURY = (3.303e+23, 2.4397e6)
+ VENUS = (4.869e+24, 6.0518e6)
+ EARTH = (5.976e+24, 6.37814e6)
+ MARS = (6.421e+23, 3.3972e6)
+ JUPITER = (1.9e+27, 7.1492e7)
+ SATURN = (5.688e+26, 6.0268e7)
+ URANUS = (8.686e+25, 2.5559e7)
+ NEPTUNE = (1.024e+26, 2.4746e7)
+ def __init__(self, mass, radius):
+ self.mass = mass # in kilograms
+ self.radius = radius # in meters
+ @property
+ def surface_gravity(self):
+ # universal gravitational constant (m3 kg-1 s-2)
+ G = 6.67300E-11
+ return G * self.mass / (self.radius * self.radius)
+ self.assertEqual(round(Planet.EARTH.surface_gravity, 2), 9.80)
+ self.assertEqual(Planet.EARTH.value, (5.976e+24, 6.37814e6))
+
+ def test_nonhash_value(self):
+ class AutoNumberInAList(Enum):
+ def __new__(cls):
+ value = [len(cls.__members__) + 1]
+ obj = object.__new__(cls)
+ obj._value_ = value
+ return obj
+ class ColorInAList(AutoNumberInAList):
+ _order_ = 'red green blue'
+ red = ()
+ green = ()
+ blue = ()
+ self.assertEqual(list(ColorInAList), [ColorInAList.red, ColorInAList.green, ColorInAList.blue])
+ self.assertEqual(ColorInAList.red.value, [1])
+ self.assertEqual(ColorInAList([1]), ColorInAList.red)
+
+ def test_conflicting_types_resolved_in_new(self):
+ class LabelledIntEnum(int, Enum):
+ def __new__(cls, *args):
+ value, label = args
+ obj = int.__new__(cls, value)
+ obj.label = label
+ obj._value_ = value
+ return obj
+
+ class LabelledList(LabelledIntEnum):
+ unprocessed = (1, "Unprocessed")
+ payment_complete = (2, "Payment Complete")
+
+ self.assertEqual(list(LabelledList), [LabelledList.unprocessed, LabelledList.payment_complete])
+ self.assertEqual(LabelledList.unprocessed, 1)
+ self.assertEqual(LabelledList(1), LabelledList.unprocessed)
+
+ def test_empty_with_functional_api(self):
+ empty = enum.IntEnum('Foo', {})
+ self.assertEqual(len(empty), 0)
+
+
+class TestUnique(unittest.TestCase):
+ """2.4 doesn't allow class decorators, use function syntax."""
+
+ def test_unique_clean(self):
+ class Clean(Enum):
+ one = 1
+ two = 'dos'
+ tres = 4.0
+ unique(Clean)
+ class Cleaner(IntEnum):
+ single = 1
+ double = 2
+ triple = 3
+ unique(Cleaner)
+
+ def test_unique_dirty(self):
+ try:
+ class Dirty(Enum):
+ __order__ = 'one two tres'
+ one = 1
+ two = 'dos'
+ tres = 1
+ unique(Dirty)
+ except ValueError:
+ exc = sys.exc_info()[1]
+ message = exc.args[0]
+ self.assertTrue('tres -> one' in message)
+
+ try:
+ class Dirtier(IntEnum):
+ _order_ = 'single double triple turkey'
+ single = 1
+ double = 1
+ triple = 3
+ turkey = 3
+ unique(Dirtier)
+ except ValueError:
+ exc = sys.exc_info()[1]
+ message = exc.args[0]
+ self.assertTrue('double -> single' in message)
+ self.assertTrue('turkey -> triple' in message)
+
+ def test_unique_class(self):
+ values = [
+ ('switzerland', 1),
+ ('sweden', 2),
+ ('usa', 3),
+ ('iran', 4),
+ ('iraq', 4),
+ ]
+ with self.assertRaises(ValueError):
+ enum.Enum('Country', values, allow_synonyms=False)
+
+ with self.assertRaises(ValueError):
+ class Country(enum.Enum):
+ __allow_synonyms__ = False
+
+ austria = 1
+ australia = 1
+
+ with self.assertRaises(ValueError):
+ class NoDuplicatesAllowed(enum.Enum):
+ __allow_synonyms__ = False
+
+ t = NoDuplicatesAllowed('NewEnum', [('russia', 1), ('belorussia', 1)])
+
+
+class TestMe(unittest.TestCase):
+
+ pass
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/contrib/deprecated/python/enum34/tests/ya.make b/contrib/deprecated/python/enum34/tests/ya.make
new file mode 100644
index 0000000000..f1b8300430
--- /dev/null
+++ b/contrib/deprecated/python/enum34/tests/ya.make
@@ -0,0 +1,15 @@
+PY2TEST()
+
+SRCDIR(contrib/deprecated/python/enum34/enum)
+
+TEST_SRCS(
+ test.py
+)
+
+PEERDIR(
+ contrib/deprecated/python/enum34
+)
+
+NO_LINT()
+
+END()
diff --git a/contrib/deprecated/python/enum34/ya.make b/contrib/deprecated/python/enum34/ya.make
new file mode 100644
index 0000000000..be19b5baea
--- /dev/null
+++ b/contrib/deprecated/python/enum34/ya.make
@@ -0,0 +1,26 @@
+# Generated by devtools/yamaker (pypi).
+
+PY2_LIBRARY()
+
+VERSION(1.1.10)
+
+LICENSE(BSD-3-Clause)
+
+NO_LINT()
+
+PY_SRCS(
+ TOP_LEVEL
+ enum/__init__.py
+)
+
+RESOURCE_FILES(
+ PREFIX contrib/deprecated/python/enum34/
+ .dist-info/METADATA
+ .dist-info/top_level.txt
+)
+
+END()
+
+RECURSE(
+ tests
+)
diff --git a/contrib/deprecated/python/faulthandler/.dist-info/METADATA b/contrib/deprecated/python/faulthandler/.dist-info/METADATA
new file mode 100644
index 0000000000..1d2974b87f
--- /dev/null
+++ b/contrib/deprecated/python/faulthandler/.dist-info/METADATA
@@ -0,0 +1,78 @@
+Metadata-Version: 2.1
+Name: faulthandler
+Version: 3.2
+Summary: Display the Python traceback on a crash
+Home-page: https://faulthandler.readthedocs.io/
+Author: Victor Stinner
+Author-email: victor.stinner@gmail.com
+License: BSD (2-clause)
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Natural Language :: English
+Classifier: Programming Language :: C
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Topic :: Software Development :: Debuggers
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+
++++++++++++++
+Fault handler
++++++++++++++
+
+.. image:: https://img.shields.io/pypi/v/faulthandler.svg
+ :alt: Latest release on the Python Cheeseshop (PyPI)
+ :target: https://pypi.python.org/pypi/faulthandler
+
+.. image:: https://travis-ci.org/vstinner/faulthandler.svg?branch=master
+ :alt: Build status of faulthandler on Travis CI
+ :target: https://travis-ci.org/vstinner/faulthandler
+
+.. image:: http://unmaintained.tech/badge.svg
+ :target: http://unmaintained.tech/
+ :alt: No Maintenance Intended
+
+Fault handler for SIGSEGV, SIGFPE, SIGABRT, SIGBUS and SIGILL signals: display
+the Python traceback and restore the previous handler. Allocate an alternate
+stack for this handler, if sigaltstack() is available, to be able to allocate
+memory on the stack, even on stack overflow (not available on Windows).
+
+Import the module and call faulthandler.enable() to enable the fault handler.
+
+Alternatively you can set the PYTHONFAULTHANDLER environment variable to a
+non-empty value.
+
+The fault handler is called on catastrophic cases and so it can only use
+signal-safe functions (eg. it doesn't allocate memory on the heap). That's why
+the traceback is limited: it only supports ASCII encoding (use the
+backslashreplace error handler for non-ASCII characters) and limits each string
+to 100 characters, doesn't print the source code in the traceback (only the
+filename, the function name and the line number), is limited to 100 frames and
+100 threads.
+
+By default, the Python traceback is written to the standard error stream. Start
+your graphical applications in a terminal and run your server in foreground to
+see the traceback, or pass a file to faulthandler.enable().
+
+faulthandler is implemented in C using signal handlers to be able to dump a
+traceback on a crash or when Python is blocked (eg. deadlock).
+
+This module is the backport for CPython 2.7. faulthandler is part of CPython
+standard library since CPython 3.3: `faulthandler
+<http://docs.python.org/dev/library/faulthandler.html>`_. For PyPy,
+faulthandler is builtin since PyPy 5.5: use ``pypy -X faulthandler``.
+
+Website:
+https://faulthandler.readthedocs.io/
+
+faulthandler 3.2 is the last version released by Victor Stinner. I maintained
+it for 10 years in my free time for the great pleasure of Python 2 users, but
+Python 2 is no longer supported upstream since 2020-01-01. Each faulthandler
+release requires me to start my Windows VM, install Python 2.7 in 32-bit and
+64-bit, install an old C compiler just for Python 2.7, and type manually some
+commands to upload Windows binaries. Moreover, I have to fix some issues on
+Travis CI and many small boring tasks. The maintenance is far from being free.
+In 10 years, I got zero "thank you" (and 0€), only bug reports :-)
+
diff --git a/contrib/deprecated/python/faulthandler/.dist-info/top_level.txt b/contrib/deprecated/python/faulthandler/.dist-info/top_level.txt
new file mode 100644
index 0000000000..c093449392
--- /dev/null
+++ b/contrib/deprecated/python/faulthandler/.dist-info/top_level.txt
@@ -0,0 +1 @@
+faulthandler
diff --git a/contrib/deprecated/python/faulthandler/AUTHORS b/contrib/deprecated/python/faulthandler/AUTHORS
new file mode 100644
index 0000000000..56ba9ac0c5
--- /dev/null
+++ b/contrib/deprecated/python/faulthandler/AUTHORS
@@ -0,0 +1,14 @@
+Authors
+=======
+
+Victor Stinner <victor.stinner@gmail.com>
+
+Contributors
+============
+
+Dan Sully <daniel@electricrain.com> - minor fixes
+Guido van Rossum <guido@dropbox.com> - enhance traceback output
+Martin (gzlist) <gzlist@googlemail.com> - improved Windows support
+Ionel Cristian Mărieș <contact@ionelmc.ro> - guilty for the .pth/environment variable activation
+Wei Wu - added support for file descriptors
+Giuseppe Corbelli - added support for thread name printout on Linux
diff --git a/contrib/deprecated/python/faulthandler/COPYING b/contrib/deprecated/python/faulthandler/COPYING
new file mode 100644
index 0000000000..b334336b5c
--- /dev/null
+++ b/contrib/deprecated/python/faulthandler/COPYING
@@ -0,0 +1,25 @@
+Copyright 2010 Victor Stinner. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification, are
+permitted provided that the following conditions are met:
+
+ 1. Redistributions of source code must retain the above copyright notice, this list of
+ conditions and the following disclaimer.
+
+ 2. Redistributions in binary form must reproduce the above copyright notice, this list
+ of conditions and the following disclaimer in the documentation and/or other materials
+ provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY VICTOR STINNER ``AS IS'' AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL VICTOR STINNER OR
+CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
+ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+The views and conclusions contained in the software and documentation are those of the
+authors and should not be interpreted as representing official policies, either expressed
+or implied, of Victor Stinner.
diff --git a/contrib/deprecated/python/faulthandler/README.rst b/contrib/deprecated/python/faulthandler/README.rst
new file mode 100644
index 0000000000..82a8c1ac78
--- /dev/null
+++ b/contrib/deprecated/python/faulthandler/README.rst
@@ -0,0 +1,57 @@
++++++++++++++
+Fault handler
++++++++++++++
+
+.. image:: https://img.shields.io/pypi/v/faulthandler.svg
+ :alt: Latest release on the Python Cheeseshop (PyPI)
+ :target: https://pypi.python.org/pypi/faulthandler
+
+.. image:: https://travis-ci.org/vstinner/faulthandler.svg?branch=master
+ :alt: Build status of faulthandler on Travis CI
+ :target: https://travis-ci.org/vstinner/faulthandler
+
+.. image:: http://unmaintained.tech/badge.svg
+ :target: http://unmaintained.tech/
+ :alt: No Maintenance Intended
+
+Fault handler for SIGSEGV, SIGFPE, SIGABRT, SIGBUS and SIGILL signals: display
+the Python traceback and restore the previous handler. Allocate an alternate
+stack for this handler, if sigaltstack() is available, to be able to allocate
+memory on the stack, even on stack overflow (not available on Windows).
+
+Import the module and call faulthandler.enable() to enable the fault handler.
+
+Alternatively you can set the PYTHONFAULTHANDLER environment variable to a
+non-empty value.
+
+The fault handler is called on catastrophic cases and so it can only use
+signal-safe functions (eg. it doesn't allocate memory on the heap). That's why
+the traceback is limited: it only supports ASCII encoding (use the
+backslashreplace error handler for non-ASCII characters) and limits each string
+to 100 characters, doesn't print the source code in the traceback (only the
+filename, the function name and the line number), is limited to 100 frames and
+100 threads.
+
+By default, the Python traceback is written to the standard error stream. Start
+your graphical applications in a terminal and run your server in foreground to
+see the traceback, or pass a file to faulthandler.enable().
+
+faulthandler is implemented in C using signal handlers to be able to dump a
+traceback on a crash or when Python is blocked (eg. deadlock).
+
+This module is the backport for CPython 2.7. faulthandler is part of CPython
+standard library since CPython 3.3: `faulthandler
+<http://docs.python.org/dev/library/faulthandler.html>`_. For PyPy,
+faulthandler is builtin since PyPy 5.5: use ``pypy -X faulthandler``.
+
+Website:
+https://faulthandler.readthedocs.io/
+
+faulthandler 3.2 is the last version released by Victor Stinner. I maintained
+it for 10 years in my free time for the great pleasure of Python 2 users, but
+Python 2 is no longer supported upstream since 2020-01-01. Each faulthandler
+release requires me to start my Windows VM, install Python 2.7 in 32-bit and
+64-bit, install an old C compiler just for Python 2.7, and type manually some
+commands to upload Windows binaries. Moreover, I have to fix some issues on
+Travis CI and many small boring tasks. The maintenance is far from being free.
+In 10 years, I got zero "thank you" (and 0€), only bug reports :-)
diff --git a/contrib/deprecated/python/faulthandler/faulthandler.c b/contrib/deprecated/python/faulthandler/faulthandler.c
new file mode 100644
index 0000000000..4aaae8b636
--- /dev/null
+++ b/contrib/deprecated/python/faulthandler/faulthandler.c
@@ -0,0 +1,1415 @@
+/*
+ * faulthandler module
+ *
+ * Written by Victor Stinner.
+ */
+
+#include "Python.h"
+#include "pythread.h"
+#include <signal.h>
+#ifdef MS_WINDOWS
+# include <windows.h>
+#endif
+#ifdef HAVE_SYS_RESOURCE_H
+# include <sys/resource.h>
+#endif
+
+#define VERSION 0x302
+
+/* Allocate at maximum 100 MB of the stack to raise the stack overflow */
+#define STACK_OVERFLOW_MAX_SIZE (100*1024*1024)
+
+#ifdef SIGALRM
+# define FAULTHANDLER_LATER
+#endif
+
+#ifndef MS_WINDOWS
+ /* sigaltstack() is not available on Windows */
+# define HAVE_SIGALTSTACK
+
+ /* register() is useless on Windows, because only SIGSEGV, SIGABRT and
+ SIGILL can be handled by the process, and these signals can only be used
+ with enable(), not using register() */
+# define FAULTHANDLER_USER
+#endif
+
+#if PY_MAJOR_VERSION >= 3
+# define PYINT_CHECK PyLong_Check
+# define PYINT_ASLONG PyLong_AsLong
+#else
+# define PYINT_CHECK PyInt_Check
+# define PYINT_ASLONG PyInt_AsLong
+#endif
+
+/* defined in traceback.c */
+extern Py_ssize_t _Py_write_noraise(int fd, const char *buf, size_t count);
+extern void dump_decimal(int fd, int value);
+extern void reverse_string(char *text, const size_t len);
+
+/* cast size_t to int because write() takes an int on Windows
+ (anyway, the length is smaller than 30 characters) */
+#define PUTS(fd, str) _Py_write_noraise(fd, str, (int)strlen(str))
+
+#ifdef HAVE_SIGACTION
+typedef struct sigaction _Py_sighandler_t;
+#else
+typedef PyOS_sighandler_t _Py_sighandler_t;
+#endif
+
+typedef struct {
+ int signum;
+ int enabled;
+ const char* name;
+ _Py_sighandler_t previous;
+ int all_threads;
+} fault_handler_t;
+
+static struct {
+ int enabled;
+ PyObject *file;
+ int fd;
+ int all_threads;
+ PyInterpreterState *interp;
+} fatal_error = {0, NULL, -1, 0};
+
+#ifdef FAULTHANDLER_LATER
+static struct {
+ PyObject *file;
+ int fd;
+ int timeout;
+ int repeat;
+ PyInterpreterState *interp;
+ int exit;
+ char *header;
+ size_t header_len;
+} fault_alarm;
+#endif
+
+#ifdef FAULTHANDLER_USER
+typedef struct {
+ int enabled;
+ PyObject *file;
+ int fd;
+ int all_threads;
+ int chain;
+ _Py_sighandler_t previous;
+ PyInterpreterState *interp;
+} user_signal_t;
+
+static user_signal_t *user_signals;
+
+/* the following macros come from Python: Modules/signalmodule.c of Python 3.3 */
+#if defined(PYOS_OS2) && !defined(PYCC_GCC)
+#define NSIG 12
+#endif
+#ifndef NSIG
+# if defined(_NSIG)
+# define NSIG _NSIG /* For BSD/SysV */
+# elif defined(_SIGMAX)
+# define NSIG (_SIGMAX + 1) /* For QNX */
+# elif defined(SIGMAX)
+# define NSIG (SIGMAX + 1) /* For djgpp */
+# else
+# define NSIG 64 /* Use a reasonable default value */
+# endif
+#endif
+
+static void faulthandler_user(int signum);
+#endif /* FAULTHANDLER_USER */
+
+#ifndef SI_KERNEL
+#define SI_KERNEL 0x80
+#endif
+
+#ifndef SI_TKILL
+#define SI_TKILL -6
+#endif
+
+static fault_handler_t faulthandler_handlers[] = {
+#ifdef SIGBUS
+ {SIGBUS, 0, "Bus error", },
+#endif
+#ifdef SIGILL
+ {SIGILL, 0, "Illegal instruction", },
+#endif
+ {SIGFPE, 0, "Floating point exception", },
+ {SIGABRT, 0, "Aborted", },
+ /* define SIGSEGV at the end to make it the default choice if searching the
+ handler fails in faulthandler_fatal_error() */
+ {SIGSEGV, 0, "Segmentation fault", }
+};
+static const size_t faulthandler_nsignals = \
+ sizeof(faulthandler_handlers) / sizeof(faulthandler_handlers[0]);
+
+#ifdef HAVE_SIGALTSTACK
+static stack_t stack;
+#endif
+
+/* Forward */
+static void faulthandler_unload(void);
+
+/* from traceback.c */
+extern void _Py_DumpTraceback(int fd, PyThreadState *tstate);
+extern const char* _Py_DumpTracebackThreads(
+ int fd,
+ PyInterpreterState *interp,
+ PyThreadState *current_thread);
+
+/* Get the file descriptor of a file by calling its fileno() method and then
+ call its flush() method.
+
+ If file is NULL or Py_None, use sys.stderr as the new file.
+ If file is an integer, it will be treated as file descriptor.
+
+ On success, return the file descriptor and write the new file into *file_ptr.
+ On error, return -1. */
+
+static int
+faulthandler_get_fileno(PyObject **file_ptr)
+{
+ PyObject *result;
+ long fd_long;
+ long fd;
+ PyObject *file = *file_ptr;
+
+ if (file == NULL || file == Py_None) {
+ file = PySys_GetObject("stderr");
+ if (file == NULL) {
+ PyErr_SetString(PyExc_RuntimeError, "unable to get sys.stderr");
+ return -1;
+ }
+ if (file == Py_None) {
+ PyErr_SetString(PyExc_RuntimeError, "sys.stderr is None");
+ return -1;
+ }
+ }
+ else if (PYINT_CHECK(file)) {
+ fd = PYINT_ASLONG(file);
+ if (fd == -1 && PyErr_Occurred())
+ return -1;
+ if (fd < 0 || fd > INT_MAX) {
+ PyErr_SetString(PyExc_ValueError,
+ "file is not a valid file descripter");
+ return -1;
+ }
+ *file_ptr = NULL;
+ return (int)fd;
+ }
+
+ result = PyObject_CallMethod(file, "fileno", "");
+ if (result == NULL)
+ return -1;
+
+ fd = -1;
+ if (PYINT_CHECK(result)) {
+ fd_long = PYINT_ASLONG(result);
+ if (0 < fd_long && fd_long < INT_MAX)
+ fd = (int)fd_long;
+ }
+ Py_DECREF(result);
+
+ if (fd == -1) {
+ PyErr_SetString(PyExc_RuntimeError,
+ "file.fileno() is not a valid file descriptor");
+ return -1;
+ }
+
+ result = PyObject_CallMethod(file, "flush", "");
+ if (result != NULL)
+ Py_DECREF(result);
+ else {
+ /* ignore flush() error */
+ PyErr_Clear();
+ }
+ *file_ptr = file;
+ return fd;
+}
+
+/* Get the state of the current thread: only call this function if the current
+ thread holds the GIL. Raise an exception on error. */
+static PyThreadState*
+get_thread_state(void)
+{
+ PyThreadState *tstate = PyThreadState_Get();
+ if (tstate == NULL) {
+ PyErr_SetString(PyExc_RuntimeError,
+ "unable to get the current thread state");
+ return NULL;
+ }
+ return tstate;
+}
+
+static void
+faulthandler_dump_traceback(int fd, int all_threads,
+ PyInterpreterState *interp)
+{
+ static volatile int reentrant = 0;
+ PyThreadState *tstate;
+
+ if (reentrant)
+ return;
+
+ reentrant = 1;
+
+#ifdef WITH_THREAD
+ /* SIGSEGV, SIGFPE, SIGABRT, SIGBUS and SIGILL are synchronous signals and
+ are thus delivered to the thread that caused the fault. Get the Python
+ thread state of the current thread.
+
+ PyThreadState_Get() doesn't give the state of the thread that caused the
+ fault if the thread released the GIL, and so this function cannot be
+ used. Read the thread local storage (TLS) instead: call
+ PyGILState_GetThisThreadState(). */
+ tstate = PyGILState_GetThisThreadState();
+#else
+ tstate = PyThreadState_Get();
+#endif
+
+ if (all_threads)
+ _Py_DumpTracebackThreads(fd, interp, tstate);
+ else {
+ if (tstate != NULL)
+ _Py_DumpTraceback(fd, tstate);
+ }
+
+ reentrant = 0;
+}
+
+static PyObject*
+faulthandler_dump_traceback_py(PyObject *self,
+ PyObject *args, PyObject *kwargs)
+{
+ static char *kwlist[] = {"file", "all_threads", NULL};
+ PyObject *file = NULL;
+ int all_threads = 1;
+ PyThreadState *tstate;
+ const char *errmsg;
+ int fd;
+
+ if (!PyArg_ParseTupleAndKeywords(args, kwargs,
+ "|Oi:dump_traceback", kwlist,
+ &file, &all_threads))
+ return NULL;
+
+ fd = faulthandler_get_fileno(&file);
+ if (fd < 0)
+ return NULL;
+
+ tstate = get_thread_state();
+ if (tstate == NULL)
+ return NULL;
+
+ if (all_threads) {
+ errmsg = _Py_DumpTracebackThreads(fd, tstate->interp, tstate);
+ if (errmsg != NULL) {
+ PyErr_SetString(PyExc_RuntimeError, errmsg);
+ return NULL;
+ }
+ }
+ else {
+ _Py_DumpTraceback(fd, tstate);
+ }
+
+ if (PyErr_CheckSignals())
+ return NULL;
+
+ Py_RETURN_NONE;
+}
+
+static void
+faulthandler_disable_fatal_handler(fault_handler_t *handler)
+{
+ if (!handler->enabled)
+ return;
+ handler->enabled = 0;
+#ifdef HAVE_SIGACTION
+ (void)sigaction(handler->signum, &handler->previous, NULL);
+#else
+ (void)signal(handler->signum, handler->previous);
+#endif
+}
+
+
+/* Handler for SIGSEGV, SIGFPE, SIGABRT, SIGBUS and SIGILL signals.
+
+ Display the current Python traceback, restore the previous handler and call
+ the previous handler.
+
+ On Windows, don't explicitly call the previous handler, because the Windows
+ signal handler would not be called (for an unknown reason). The execution of
+ the program continues at faulthandler_fatal_error() exit, but the same
+ instruction will raise the same fault (signal), and so the previous handler
+ will be called.
+
+ This function is signal-safe and should only call signal-safe functions. */
+
+static void
+faulthandler_fatal_error(int signum)
+{
+ const int fd = fatal_error.fd;
+ size_t i;
+ fault_handler_t *handler = NULL;
+ int save_errno = errno;
+
+ if (!fatal_error.enabled)
+ return;
+
+ for (i=0; i < faulthandler_nsignals; i++) {
+ handler = &faulthandler_handlers[i];
+ if (handler->signum == signum)
+ break;
+ }
+ if (handler == NULL) {
+ /* faulthandler_nsignals == 0 (unlikely) */
+ return;
+ }
+
+ /* restore the previous handler */
+ faulthandler_disable_fatal_handler(handler);
+
+ PUTS(fd, "Fatal Python error: ");
+ PUTS(fd, handler->name);
+ PUTS(fd, "\n\n");
+
+ faulthandler_dump_traceback(fd, fatal_error.all_threads,
+ fatal_error.interp);
+
+ errno = save_errno;
+#ifdef MS_WINDOWS
+ if (signum == SIGSEGV) {
+ /* don't explicitly call the previous handler for SIGSEGV in this signal
+ handler, because the Windows signal handler would not be called */
+ return;
+ }
+#endif
+ /* call the previous signal handler: it is called immediately if we use
+ sigaction() thanks to SA_NODEFER flag, otherwise it is deferred */
+ raise(signum);
+}
+
+static size_t
+uitoa(size_t val, char* ss) {
+ char* start = ss;
+ size_t len = 0;
+ do {
+ *ss = '0' + (val % 10);
+ val /= 10;
+ ss++; len++;
+ } while (val);
+ reverse_string(start, len);
+ return len;
+}
+
+static void
+read_proc_exe(pid_t pid, char* buff, size_t len) {
+ char pathname[32] = {0};
+ strcpy(pathname, "/proc/");
+ size_t pos = uitoa(pid, &pathname[6]) + 6;
+ strcpy(&pathname[pos], "/exe");
+
+ ssize_t l = readlink(pathname, buff, len);
+ if (l > 0) {
+ // readlink() does not append a null byte to buf
+ buff[l] = '\0';
+ } else {
+ strncpy(buff, "unknown_program", len);
+ }
+}
+
+#ifdef HAVE_SIGACTION
+static void
+faulthandler_fatal_error_siginfo(int signum, siginfo_t* siginfo, void* ctx)
+{
+ const int fd = fatal_error.fd;
+ int save_errno = errno;
+
+ if (!fatal_error.enabled)
+ return;
+
+ PUTS(fd, "\n*** Signal {si_signo=");
+ dump_decimal(fd, siginfo->si_signo);
+
+ PUTS(fd, ", si_code=");
+ dump_decimal(fd, siginfo->si_code);
+ switch (siginfo->si_code) {
+ case SEGV_ACCERR: PUTS(fd, " SEGV_ACCERR"); break;
+ case SEGV_MAPERR: PUTS(fd, " SEGV_MAPERR"); break;
+ case SI_KERNEL: PUTS(fd, " SI_KERNEL"); break;
+ case SI_TIMER: PUTS(fd, " SI_TIMER"); break;
+ case SI_TKILL: PUTS(fd, " SI_TKILL"); break;
+ case SI_USER: PUTS(fd, " SI_USER"); break;
+ }
+
+ if (siginfo->si_pid > 0) {
+ PUTS(fd, ", si_pid=");
+ dump_decimal(fd, siginfo->si_pid);
+ PUTS(fd, " ");
+ char buffer[PATH_MAX] = {0};
+ read_proc_exe(siginfo->si_pid, &buffer[0], PATH_MAX - 1);
+ PUTS(fd, &buffer[0]);
+ }
+
+ PUTS(fd, ", si_uid=");
+ dump_decimal(fd, siginfo->si_uid);
+
+ PUTS(fd, "} received by proc {pid=");
+ dump_decimal(fd, getpid());
+ PUTS(fd, ", uid=");
+ dump_decimal(fd, getuid());
+ PUTS(fd, "} ***\n");
+
+ faulthandler_fatal_error(signum);
+
+ errno = save_errno;
+}
+#endif
+
+#ifdef MS_WINDOWS
+extern void _Py_dump_hexadecimal(int fd, unsigned long value, size_t bytes);
+
+static int
+faulthandler_ignore_exception(DWORD code)
+{
+ /* bpo-30557: ignore exceptions which are not errors */
+ if (!(code & 0x80000000)) {
+ return 1;
+ }
+ /* bpo-31701: ignore MSC and COM exceptions
+ E0000000 + code */
+ if (code == 0xE06D7363 /* MSC exception ("Emsc") */
+ || code == 0xE0434352 /* COM Callable Runtime exception ("ECCR") */) {
+ return 1;
+ }
+ /* Interesting exception: log it with the Python traceback */
+ return 0;
+}
+
+static LONG WINAPI
+faulthandler_exc_handler(struct _EXCEPTION_POINTERS *exc_info)
+{
+ const int fd = fatal_error.fd;
+ DWORD code = exc_info->ExceptionRecord->ExceptionCode;
+
+ if (faulthandler_ignore_exception(code)) {
+ /* ignore the exception: call the next exception handler */
+ return EXCEPTION_CONTINUE_SEARCH;
+ }
+ PUTS(fd, "Windows exception: ");
+ switch (code)
+ {
+ /* only format most common errors */
+ case EXCEPTION_ACCESS_VIOLATION: PUTS(fd, "access violation"); break;
+ case EXCEPTION_FLT_DIVIDE_BY_ZERO: PUTS(fd, "float divide by zero"); break;
+ case EXCEPTION_FLT_OVERFLOW: PUTS(fd, "float overflow"); break;
+ case EXCEPTION_INT_DIVIDE_BY_ZERO: PUTS(fd, "int divide by zero"); break;
+ case EXCEPTION_INT_OVERFLOW: PUTS(fd, "integer overflow"); break;
+ case EXCEPTION_IN_PAGE_ERROR: PUTS(fd, "page error"); break;
+ case EXCEPTION_STACK_OVERFLOW: PUTS(fd, "stack overflow"); break;
+ default:
+ PUTS(fd, "code 0x");
+ _Py_dump_hexadecimal(fd, code, sizeof(DWORD));
+ }
+ PUTS(fd, "\n\n");
+
+ if (code == EXCEPTION_ACCESS_VIOLATION) {
+ /* disable signal handler for SIGSEGV */
+ fault_handler_t *handler;
+ size_t i;
+ for (i=0; i < faulthandler_nsignals; i++) {
+ handler = &faulthandler_handlers[i];
+ if (handler->signum == SIGSEGV) {
+ faulthandler_disable_fatal_handler(handler);
+ break;
+ }
+ }
+ }
+
+ faulthandler_dump_traceback(fd, fatal_error.all_threads,
+ fatal_error.interp);
+
+ /* call the next exception handler */
+ return EXCEPTION_CONTINUE_SEARCH;
+}
+#endif
+
+
+/* Install the handler for fatal signals, faulthandler_fatal_error(). */
+
+static PyObject*
+faulthandler_enable(PyObject *self, PyObject *args, PyObject *kwargs)
+{
+ static char *kwlist[] = {"file", "all_threads", NULL};
+ PyObject *file = NULL;
+ int all_threads = 1;
+ unsigned int i;
+ fault_handler_t *handler;
+#ifdef HAVE_SIGACTION
+ struct sigaction action;
+#endif
+ int err;
+ int fd;
+ PyThreadState *tstate;
+
+ if (!PyArg_ParseTupleAndKeywords(args, kwargs,
+ "|Oi:enable", kwlist, &file, &all_threads))
+ return NULL;
+
+ fd = faulthandler_get_fileno(&file);
+ if (fd < 0)
+ return NULL;
+
+ tstate = get_thread_state();
+ if (tstate == NULL)
+ return NULL;
+
+ Py_XDECREF(fatal_error.file);
+ Py_XINCREF(file);
+ fatal_error.file = file;
+ fatal_error.fd = fd;
+ fatal_error.all_threads = all_threads;
+ fatal_error.interp = tstate->interp;
+
+ if (!fatal_error.enabled) {
+ fatal_error.enabled = 1;
+
+ for (i=0; i < faulthandler_nsignals; i++) {
+ handler = &faulthandler_handlers[i];
+#ifdef HAVE_SIGACTION
+ action.sa_flags = 0;
+#ifdef USE_SIGINFO
+ action.sa_handler = faulthandler_fatal_error_siginfo;
+ action.sa_flags |= SA_SIGINFO;
+#else
+ action.sa_handler = faulthandler_fatal_error;
+#endif
+ sigemptyset(&action.sa_mask);
+ /* Do not prevent the signal from being received from within
+ its own signal handler */
+ action.sa_flags |= SA_NODEFER;
+#ifdef HAVE_SIGALTSTACK
+ if (stack.ss_sp != NULL) {
+ /* Call the signal handler on an alternate signal stack
+ provided by sigaltstack() */
+ action.sa_flags |= SA_ONSTACK;
+ }
+#endif
+ err = sigaction(handler->signum, &action, &handler->previous);
+#else
+ handler->previous = signal(handler->signum,
+ faulthandler_fatal_error);
+ err = (handler->previous == SIG_ERR);
+#endif
+ if (err) {
+ PyErr_SetFromErrno(PyExc_RuntimeError);
+ return NULL;
+ }
+ handler->enabled = 1;
+ }
+#ifdef MS_WINDOWS
+ AddVectoredExceptionHandler(1, faulthandler_exc_handler);
+#endif
+ }
+ Py_RETURN_NONE;
+}
+
+static void
+faulthandler_disable(void)
+{
+ unsigned int i;
+ fault_handler_t *handler;
+
+ if (fatal_error.enabled) {
+ fatal_error.enabled = 0;
+ for (i=0; i < faulthandler_nsignals; i++) {
+ handler = &faulthandler_handlers[i];
+ faulthandler_disable_fatal_handler(handler);
+ }
+ }
+
+ Py_CLEAR(fatal_error.file);
+}
+
+static PyObject*
+faulthandler_disable_py(PyObject *self)
+{
+ if (!fatal_error.enabled) {
+ Py_INCREF(Py_False);
+ return Py_False;
+ }
+ faulthandler_disable();
+ Py_INCREF(Py_True);
+ return Py_True;
+}
+
+static PyObject*
+faulthandler_is_enabled(PyObject *self)
+{
+ return PyBool_FromLong(fatal_error.enabled);
+}
+
+#ifdef FAULTHANDLER_LATER
+/* Handler of the SIGALRM signal.
+
+ Dump the traceback of the current thread, or of all threads if
+ fault_alarm.all_threads is true. On success, register itself again if
+ fault_alarm.repeat is true.
+
+ This function is signal safe and should only call signal safe functions. */
+
+static void
+faulthandler_alarm(int signum)
+{
+ PyThreadState *tstate;
+ const char* errmsg;
+ int ok;
+
+ _Py_write_noraise(fault_alarm.fd,
+ fault_alarm.header, fault_alarm.header_len);
+
+ /* PyThreadState_Get() doesn't give the state of the current thread if
+ the thread doesn't hold the GIL. Read the thread local storage (TLS)
+ instead: call PyGILState_GetThisThreadState(). */
+ tstate = PyGILState_GetThisThreadState();
+
+ errmsg = _Py_DumpTracebackThreads(fault_alarm.fd, fault_alarm.interp, tstate);
+ ok = (errmsg == NULL);
+
+ if (ok && fault_alarm.repeat)
+ alarm(fault_alarm.timeout);
+ else
+ /* don't call Py_CLEAR() here because it may call _Py_Dealloc() which
+ is not signal safe */
+ alarm(0);
+
+ if (fault_alarm.exit)
+ _exit(1);
+}
+
+static char*
+format_timeout(double timeout)
+{
+ unsigned long us, sec, min, hour;
+ double intpart, fracpart;
+ char buffer[100];
+
+ fracpart = modf(timeout, &intpart);
+ sec = (unsigned long)intpart;
+ us = (unsigned long)(fracpart * 1e6);
+ min = sec / 60;
+ sec %= 60;
+ hour = min / 60;
+ min %= 60;
+
+ if (us != 0)
+ PyOS_snprintf(buffer, sizeof(buffer),
+ "Timeout (%lu:%02lu:%02lu.%06lu)!\n",
+ hour, min, sec, us);
+ else
+ PyOS_snprintf(buffer, sizeof(buffer),
+ "Timeout (%lu:%02lu:%02lu)!\n",
+ hour, min, sec);
+
+ return strdup(buffer);
+}
+
+static PyObject*
+faulthandler_dump_traceback_later(PyObject *self,
+ PyObject *args, PyObject *kwargs)
+{
+ static char *kwlist[] = {"timeout", "repeat", "file", "exit", NULL};
+ int timeout;
+ PyOS_sighandler_t previous;
+ int repeat = 0;
+ PyObject *file = NULL;
+ int exit = 0;
+ PyThreadState *tstate;
+ int fd;
+ char *header;
+ size_t header_len;
+
+ if (!PyArg_ParseTupleAndKeywords(args, kwargs,
+ "i|iOi:dump_traceback_later", kwlist,
+ &timeout, &repeat, &file, &exit))
+ return NULL;
+ if (timeout <= 0) {
+ PyErr_SetString(PyExc_ValueError, "timeout must be greater than 0");
+ return NULL;
+ }
+
+ tstate = get_thread_state();
+ if (tstate == NULL)
+ return NULL;
+
+ fd = faulthandler_get_fileno(&file);
+ if (fd < 0)
+ return NULL;
+
+ /* format the timeout */
+ header = format_timeout(timeout);
+ if (header == NULL)
+ return PyErr_NoMemory();
+ header_len = strlen(header);
+
+ previous = signal(SIGALRM, faulthandler_alarm);
+ if (previous == SIG_ERR) {
+ PyErr_SetString(PyExc_RuntimeError, "unable to set SIGALRM handler");
+ free(header);
+ return NULL;
+ }
+
+ Py_XDECREF(fault_alarm.file);
+ Py_XINCREF(file);
+ fault_alarm.file = file;
+ fault_alarm.fd = fd;
+ fault_alarm.timeout = timeout;
+ fault_alarm.repeat = repeat;
+ fault_alarm.interp = tstate->interp;
+ fault_alarm.exit = exit;
+ fault_alarm.header = header;
+ fault_alarm.header_len = header_len;
+
+ alarm(timeout);
+
+ Py_RETURN_NONE;
+}
+
+static PyObject*
+faulthandler_cancel_dump_traceback_later_py(PyObject *self)
+{
+ alarm(0);
+ Py_CLEAR(fault_alarm.file);
+ free(fault_alarm.header);
+ fault_alarm.header = NULL;
+ Py_RETURN_NONE;
+}
+#endif /* FAULTHANDLER_LATER */
+
+#ifdef FAULTHANDLER_USER
+static int
+faulthandler_register(int signum, int chain, _Py_sighandler_t *p_previous)
+{
+#ifdef HAVE_SIGACTION
+ struct sigaction action;
+ action.sa_handler = faulthandler_user;
+ sigemptyset(&action.sa_mask);
+ /* if the signal is received while the kernel is executing a system
+ call, try to restart the system call instead of interrupting it and
+ return EINTR. */
+ action.sa_flags = SA_RESTART;
+ if (chain) {
+ /* do not prevent the signal from being received from within its
+ own signal handler */
+ action.sa_flags = SA_NODEFER;
+ }
+#ifdef HAVE_SIGALTSTACK
+ if (stack.ss_sp != NULL) {
+ /* Call the signal handler on an alternate signal stack
+ provided by sigaltstack() */
+ action.sa_flags |= SA_ONSTACK;
+ }
+#endif
+ return sigaction(signum, &action, p_previous);
+#else
+ _Py_sighandler_t previous;
+ previous = signal(signum, faulthandler_user);
+ if (p_previous != NULL)
+ *p_previous = previous;
+ return (previous == SIG_ERR);
+#endif
+}
+
+/* Handler of user signals (e.g. SIGUSR1).
+
+ Dump the traceback of the current thread, or of all threads if
+ thread.all_threads is true.
+
+ This function is signal safe and should only call signal safe functions. */
+
+static void
+faulthandler_user(int signum)
+{
+ user_signal_t *user;
+ int save_errno = errno;
+
+ user = &user_signals[signum];
+ if (!user->enabled)
+ return;
+
+ faulthandler_dump_traceback(user->fd, user->all_threads, user->interp);
+
+#ifdef HAVE_SIGACTION
+ if (user->chain) {
+ (void)sigaction(signum, &user->previous, NULL);
+ errno = save_errno;
+
+ /* call the previous signal handler */
+ raise(signum);
+
+ save_errno = errno;
+ (void)faulthandler_register(signum, user->chain, NULL);
+ errno = save_errno;
+ }
+#else
+ if (user->chain) {
+ errno = save_errno;
+ /* call the previous signal handler */
+ user->previous(signum);
+ }
+#endif
+}
+
+static int
+check_signum(int signum)
+{
+ unsigned int i;
+
+ for (i=0; i < faulthandler_nsignals; i++) {
+ if (faulthandler_handlers[i].signum == signum) {
+ PyErr_Format(PyExc_RuntimeError,
+ "signal %i cannot be registered, "
+ "use enable() instead",
+ signum);
+ return 0;
+ }
+ }
+ if (signum < 1 || NSIG <= signum) {
+ PyErr_SetString(PyExc_ValueError, "signal number out of range");
+ return 0;
+ }
+ return 1;
+}
+
+static PyObject*
+faulthandler_register_py(PyObject *self,
+ PyObject *args, PyObject *kwargs)
+{
+ static char *kwlist[] = {"signum", "file", "all_threads", "chain", NULL};
+ int signum;
+ PyObject *file = NULL;
+ int all_threads = 1;
+ int chain = 0;
+ int fd;
+ user_signal_t *user;
+ _Py_sighandler_t previous;
+ PyThreadState *tstate;
+ int err;
+
+ if (!PyArg_ParseTupleAndKeywords(args, kwargs,
+ "i|Oii:register", kwlist,
+ &signum, &file, &all_threads, &chain))
+ return NULL;
+
+ if (!check_signum(signum))
+ return NULL;
+
+ tstate = get_thread_state();
+ if (tstate == NULL)
+ return NULL;
+
+ fd = faulthandler_get_fileno(&file);
+ if (fd < 0)
+ return NULL;
+
+ if (user_signals == NULL) {
+ user_signals = PyMem_Malloc(NSIG * sizeof(user_signal_t));
+ if (user_signals == NULL)
+ return PyErr_NoMemory();
+ memset(user_signals, 0, NSIG * sizeof(user_signal_t));
+ }
+ user = &user_signals[signum];
+
+ if (!user->enabled) {
+ err = faulthandler_register(signum, chain, &previous);
+ if (err) {
+ PyErr_SetFromErrno(PyExc_OSError);
+ return NULL;
+ }
+
+ user->previous = previous;
+ }
+
+ Py_XDECREF(user->file);
+ Py_XINCREF(file);
+ user->file = file;
+ user->fd = fd;
+ user->all_threads = all_threads;
+ user->chain = chain;
+ user->interp = tstate->interp;
+ user->enabled = 1;
+
+ Py_RETURN_NONE;
+}
+
+static int
+faulthandler_unregister(user_signal_t *user, int signum)
+{
+ if (!user->enabled)
+ return 0;
+ user->enabled = 0;
+#ifdef HAVE_SIGACTION
+ (void)sigaction(signum, &user->previous, NULL);
+#else
+ (void)signal(signum, user->previous);
+#endif
+ user->fd = -1;
+ return 1;
+}
+
+static PyObject*
+faulthandler_unregister_py(PyObject *self, PyObject *args)
+{
+ int signum;
+ user_signal_t *user;
+ int change;
+
+ if (!PyArg_ParseTuple(args, "i:unregister", &signum))
+ return NULL;
+
+ if (!check_signum(signum))
+ return NULL;
+
+ if (user_signals == NULL)
+ Py_RETURN_FALSE;
+
+ user = &user_signals[signum];
+ change = faulthandler_unregister(user, signum);
+ Py_CLEAR(user->file);
+ return PyBool_FromLong(change);
+}
+#endif /* FAULTHANDLER_USER */
+
+
+static void
+faulthandler_suppress_crash_report(void)
+{
+#ifdef MS_WINDOWS
+ UINT mode;
+
+ /* Configure Windows to not display the Windows Error Reporting dialog */
+ mode = SetErrorMode(SEM_NOGPFAULTERRORBOX);
+ SetErrorMode(mode | SEM_NOGPFAULTERRORBOX);
+#endif
+
+#ifdef HAVE_SYS_RESOURCE_H
+ struct rlimit rl;
+
+ /* Disable creation of core dump */
+ if (getrlimit(RLIMIT_CORE, &rl) != 0) {
+ rl.rlim_cur = 0;
+ setrlimit(RLIMIT_CORE, &rl);
+ }
+#endif
+
+#ifdef _MSC_VER
+ /* Visual Studio: configure abort() to not display an error message nor
+ open a popup asking to report the fault. */
+ _set_abort_behavior(0, _WRITE_ABORT_MSG | _CALL_REPORTFAULT);
+#endif
+}
+
+static PyObject *
+faulthandler_read_null(PyObject *self, PyObject *args)
+{
+ volatile int *x;
+ volatile int y;
+
+ faulthandler_suppress_crash_report();
+ x = NULL;
+ y = *x;
+ return PyLong_FromLong(y);
+
+}
+
+static void
+faulthandler_raise_sigsegv(void)
+{
+ faulthandler_suppress_crash_report();
+#if defined(MS_WINDOWS)
+ /* For SIGSEGV, faulthandler_fatal_error() restores the previous signal
+ handler and then gives back the execution flow to the program (without
+ explicitly calling the previous error handler). In a normal case, the
+ SIGSEGV was raised by the kernel because of a fault, and so if the
+ program retries to execute the same instruction, the fault will be
+ raised again.
+
+ Here the fault is simulated by a fake SIGSEGV signal raised by the
+ application. We have to raise SIGSEGV at lease twice: once for
+ faulthandler_fatal_error(), and one more time for the previous signal
+ handler. */
+ while(1)
+ raise(SIGSEGV);
+#else
+ raise(SIGSEGV);
+#endif
+}
+
+static PyObject *
+faulthandler_sigsegv(PyObject *self, PyObject *args)
+{
+ int release_gil = 0;
+ if (!PyArg_ParseTuple(args, "|i:_read_null", &release_gil))
+ return NULL;
+
+ if (release_gil) {
+ Py_BEGIN_ALLOW_THREADS
+ faulthandler_raise_sigsegv();
+ Py_END_ALLOW_THREADS
+ } else {
+ faulthandler_raise_sigsegv();
+ }
+ Py_RETURN_NONE;
+}
+
+static PyObject *
+faulthandler_sigfpe(PyObject *self, PyObject *args)
+{
+ /* Do an integer division by zero: raise a SIGFPE on Intel CPU, but not on
+ PowerPC. Use volatile to disable compile-time optimizations. */
+ volatile int x = 1, y = 0, z;
+ faulthandler_suppress_crash_report();
+ z = x / y;
+ /* If the division by zero didn't raise a SIGFPE (e.g. on PowerPC),
+ raise it manually. */
+ raise(SIGFPE);
+ /* This line is never reached, but we pretend to make something with z
+ to silence a compiler warning. */
+ return PyLong_FromLong(z);
+}
+
+static PyObject *
+faulthandler_sigabrt(PyObject *self, PyObject *args)
+{
+ faulthandler_suppress_crash_report();
+ abort();
+ Py_RETURN_NONE;
+}
+
+static PyObject *
+faulthandler_raise_signal(PyObject *self, PyObject *args)
+{
+ int signum, err;
+
+ if (PyArg_ParseTuple(args, "i:raise_signal", &signum) < 0)
+ return NULL;
+
+ faulthandler_suppress_crash_report();
+
+ err = raise(signum);
+ if (err)
+ return PyErr_SetFromErrno(PyExc_OSError);
+
+ if (PyErr_CheckSignals() < 0)
+ return NULL;
+
+ Py_RETURN_NONE;
+}
+
+static PyObject *
+faulthandler_fatal_error_py(PyObject *self, PyObject *args)
+{
+ char *message;
+#if PY_MAJOR_VERSION >= 3
+ if (!PyArg_ParseTuple(args, "y:_fatal_error", &message))
+ return NULL;
+#else
+ if (!PyArg_ParseTuple(args, "s:fatal_error", &message))
+ return NULL;
+#endif
+ faulthandler_suppress_crash_report();
+ Py_FatalError(message);
+ Py_RETURN_NONE;
+}
+
+
+#ifdef __INTEL_COMPILER
+ /* Issue #23654: Turn off ICC's tail call optimization for the
+ * stack_overflow generator. ICC turns the recursive tail call into
+ * a loop. */
+# pragma intel optimization_level 0
+#endif
+static
+Py_uintptr_t
+stack_overflow(Py_uintptr_t min_sp, Py_uintptr_t max_sp, size_t *depth)
+{
+ /* allocate 4096 bytes on the stack at each call */
+ unsigned char buffer[1024*1024];
+ Py_uintptr_t sp = (Py_uintptr_t)&buffer;
+ *depth += 1;
+ if (sp < min_sp || max_sp < sp) {
+ return sp;
+ }
+ memset(buffer, (unsigned char)*depth, sizeof(buffer));
+ return stack_overflow(min_sp, max_sp, depth) + buffer[0];
+}
+
+static PyObject *
+faulthandler_stack_overflow(PyObject *self)
+{
+ size_t depth, size;
+ Py_uintptr_t sp = (Py_uintptr_t)&depth;
+ Py_uintptr_t min_sp;
+ Py_uintptr_t max_sp;
+ Py_uintptr_t stop;
+
+ faulthandler_suppress_crash_report();
+ depth = 0;
+ if (sp > STACK_OVERFLOW_MAX_SIZE)
+ min_sp = sp - STACK_OVERFLOW_MAX_SIZE;
+ else
+ min_sp = 0;
+ max_sp = sp + STACK_OVERFLOW_MAX_SIZE;
+ stop = stack_overflow(min_sp, max_sp, &depth);
+ if (sp < stop)
+ size = stop - sp;
+ else
+ size = sp - stop;
+ PyErr_Format(PyExc_RuntimeError,
+ "unable to raise a stack overflow (allocated %zu bytes "
+ "on the stack, %zu recursive calls)",
+ size, depth);
+ return NULL;
+}
+
+#if PY_MAJOR_VERSION >= 3
+static int
+faulthandler_traverse(PyObject *module, visitproc visit, void *arg)
+{
+#ifdef FAULTHANDLER_USER
+ unsigned int signum;
+#endif
+
+#ifdef FAULTHANDLER_LATER
+ Py_VISIT(fault_alarm.file);
+#endif
+#ifdef FAULTHANDLER_USER
+ if (user_signals != NULL) {
+ for (signum=0; signum < NSIG; signum++)
+ Py_VISIT(user_signals[signum].file);
+ }
+#endif
+ Py_VISIT(fatal_error.file);
+ return 0;
+}
+#endif
+
+#ifdef MS_WINDOWS
+static PyObject *
+faulthandler_raise_exception(PyObject *self, PyObject *args)
+{
+ unsigned int code, flags = 0;
+ if (!PyArg_ParseTuple(args, "I|I:_raise_exception", &code, &flags))
+ return NULL;
+ RaiseException(code, flags, 0, NULL);
+ Py_RETURN_NONE;
+}
+#endif
+
+PyDoc_STRVAR(module_doc,
+"faulthandler module.");
+
+static PyMethodDef module_methods[] = {
+ {"enable",
+ (PyCFunction)faulthandler_enable, METH_VARARGS|METH_KEYWORDS,
+ PyDoc_STR("enable(file=sys.stderr, all_threads=True): "
+ "enable the fault handler")},
+ {"disable", (PyCFunction)faulthandler_disable_py, METH_NOARGS,
+ PyDoc_STR("disable(): disable the fault handler")},
+ {"is_enabled", (PyCFunction)faulthandler_is_enabled, METH_NOARGS,
+ PyDoc_STR("is_enabled()->bool: check if the handler is enabled")},
+ {"dump_traceback",
+ (PyCFunction)faulthandler_dump_traceback_py, METH_VARARGS|METH_KEYWORDS,
+ PyDoc_STR("dump_traceback(file=sys.stderr, all_threads=True): "
+ "dump the traceback of the current thread, or of all threads "
+ "if all_threads is True, into file")},
+#ifdef FAULTHANDLER_LATER
+ {"dump_traceback_later",
+ (PyCFunction)faulthandler_dump_traceback_later, METH_VARARGS|METH_KEYWORDS,
+ PyDoc_STR("dump_traceback_later(timeout, repeat=False, file=sys.stderrn, exit=False):\n"
+ "dump the traceback of all threads in timeout seconds,\n"
+ "or each timeout seconds if repeat is True. If exit is True, "
+ "call _exit(1) which is not safe.")},
+ {"cancel_dump_traceback_later",
+ (PyCFunction)faulthandler_cancel_dump_traceback_later_py, METH_NOARGS,
+ PyDoc_STR("cancel_dump_traceback_later():\ncancel the previous call "
+ "to dump_traceback_later().")},
+#endif
+
+#ifdef FAULTHANDLER_USER
+ {"register",
+ (PyCFunction)faulthandler_register_py, METH_VARARGS|METH_KEYWORDS,
+ PyDoc_STR("register(signum, file=sys.stderr, all_threads=True, chain=False): "
+ "register an handler for the signal 'signum': dump the "
+ "traceback of the current thread, or of all threads if "
+ "all_threads is True, into file")},
+ {"unregister",
+ faulthandler_unregister_py, METH_VARARGS|METH_KEYWORDS,
+ PyDoc_STR("unregister(signum): unregister the handler of the signal "
+ "'signum' registered by register()")},
+#endif
+
+ {"_read_null", faulthandler_read_null, METH_NOARGS,
+ PyDoc_STR("_read_null(): read from NULL, raise "
+ "a SIGSEGV or SIGBUS signal depending on the platform")},
+ {"_sigsegv", faulthandler_sigsegv, METH_VARARGS,
+ PyDoc_STR("_sigsegv(release_gil=False): raise a SIGSEGV signal")},
+ {"_sigabrt", faulthandler_sigabrt, METH_NOARGS,
+ PyDoc_STR("_sigabrt(): raise a SIGABRT signal")},
+ {"_sigfpe", (PyCFunction)faulthandler_sigfpe, METH_NOARGS,
+ PyDoc_STR("_sigfpe(): raise a SIGFPE signal")},
+ {"_raise_signal", (PyCFunction)faulthandler_raise_signal, METH_VARARGS,
+ PyDoc_STR("raise_signal(signum): raise a signal")},
+ {"_fatal_error", faulthandler_fatal_error_py, METH_VARARGS,
+ PyDoc_STR("_fatal_error(message): call Py_FatalError(message)")},
+ {"_stack_overflow", (PyCFunction)faulthandler_stack_overflow, METH_NOARGS,
+ PyDoc_STR("_stack_overflow(): recursive call to raise a stack overflow")},
+#ifdef MS_WINDOWS
+ {"_raise_exception", faulthandler_raise_exception, METH_VARARGS,
+ PyDoc_STR("raise_exception(code, flags=0): Call RaiseException(code, flags).")},
+#endif
+ {NULL, NULL} /* sentinel */
+};
+
+#if PY_MAJOR_VERSION >= 3
+static struct PyModuleDef module_def = {
+ PyModuleDef_HEAD_INIT,
+ "faulthandler",
+ module_doc,
+ 0, /* non negative size to be able to unload the module */
+ module_methods,
+ NULL,
+ faulthandler_traverse,
+ NULL,
+ NULL
+};
+#endif
+
+
+PyMODINIT_FUNC
+#if PY_MAJOR_VERSION >= 3
+PyInit_faulthandler(void)
+#else
+initfaulthandler(void)
+#endif
+{
+ PyObject *m, *version;
+#ifdef HAVE_SIGALTSTACK
+ int err;
+#endif
+
+#if PY_MAJOR_VERSION >= 3
+ m = PyModule_Create(&module_def);
+#else
+ m = Py_InitModule3("faulthandler", module_methods, module_doc);
+#endif
+ if (m == NULL) {
+#if PY_MAJOR_VERSION >= 3
+ return NULL;
+#else
+ return;
+#endif
+ }
+
+#ifdef MS_WINDOWS
+ /* RaiseException() flags */
+ if (PyModule_AddIntConstant(m, "_EXCEPTION_NONCONTINUABLE",
+ EXCEPTION_NONCONTINUABLE))
+ goto error;
+ if (PyModule_AddIntConstant(m, "_EXCEPTION_NONCONTINUABLE_EXCEPTION",
+ EXCEPTION_NONCONTINUABLE_EXCEPTION))
+ goto error;
+#endif
+
+
+#ifdef HAVE_SIGALTSTACK
+ /* Try to allocate an alternate stack for faulthandler() signal handler to
+ * be able to allocate memory on the stack, even on a stack overflow. If it
+ * fails, ignore the error. */
+ stack.ss_flags = 0;
+ /* bpo-21131: allocate dedicated stack of SIGSTKSZ*2 bytes, instead of just
+ SIGSTKSZ bytes. Calling the previous signal handler in faulthandler
+ signal handler uses more than SIGSTKSZ bytes of stack memory on some
+ platforms. */
+ stack.ss_size = SIGSTKSZ * 2;
+ stack.ss_sp = PyMem_Malloc(stack.ss_size);
+ if (stack.ss_sp != NULL) {
+ err = sigaltstack(&stack, NULL);
+ if (err) {
+ PyMem_Free(stack.ss_sp);
+ stack.ss_sp = NULL;
+ }
+ }
+#endif
+
+ (void)Py_AtExit(faulthandler_unload);
+
+ version = Py_BuildValue("(ii)", VERSION >> 8, VERSION & 0xFF);
+ if (version == NULL)
+ goto error;
+ PyModule_AddObject(m, "version", version);
+
+#if PY_MAJOR_VERSION >= 3
+ version = PyUnicode_FromFormat("%i.%i", VERSION >> 8, VERSION & 0xFF);
+#else
+ version = PyString_FromFormat("%i.%i", VERSION >> 8, VERSION & 0xFF);
+#endif
+ if (version == NULL)
+ goto error;
+ PyModule_AddObject(m, "__version__", version);
+
+#if PY_MAJOR_VERSION >= 3
+ return m;
+#else
+ return;
+#endif
+
+error:
+#if PY_MAJOR_VERSION >= 3
+ Py_DECREF(m);
+ return NULL;
+#else
+ return;
+#endif
+}
+
+static void
+faulthandler_unload(void)
+{
+#ifdef FAULTHANDLER_USER
+ unsigned int signum;
+#endif
+
+#ifdef FAULTHANDLER_LATER
+ /* later */
+ alarm(0);
+ if (fault_alarm.header != NULL) {
+ free(fault_alarm.header);
+ fault_alarm.header = NULL;
+ }
+ /* Don't call Py_CLEAR(fault_alarm.file): this function is called too late,
+ by Py_AtExit(). Destroy a Python object here raise strange errors. */
+#endif
+#ifdef FAULTHANDLER_USER
+ /* user */
+ if (user_signals != NULL) {
+ for (signum=0; signum < NSIG; signum++) {
+ faulthandler_unregister(&user_signals[signum], signum);
+ /* Don't call Py_CLEAR(user->file): this function is called too late,
+ by Py_AtExit(). Destroy a Python object here raise strange
+ errors. */
+ }
+ PyMem_Free(user_signals);
+ user_signals = NULL;
+ }
+#endif
+
+ /* don't release file: faulthandler_unload_fatal_error()
+ is called too late */
+ fatal_error.file = NULL;
+ faulthandler_disable();
+#ifdef HAVE_SIGALTSTACK
+ if (stack.ss_sp != NULL) {
+ PyMem_Free(stack.ss_sp);
+ stack.ss_sp = NULL;
+ }
+#endif
+}
diff --git a/contrib/deprecated/python/faulthandler/traceback.c b/contrib/deprecated/python/faulthandler/traceback.c
new file mode 100644
index 0000000000..6bf727da65
--- /dev/null
+++ b/contrib/deprecated/python/faulthandler/traceback.c
@@ -0,0 +1,326 @@
+#ifdef __gnu_linux__
+# include <sys/prctl.h>
+#endif
+
+#include "Python.h"
+#include <frameobject.h>
+
+#if PY_MAJOR_VERSION >= 3
+# define PYSTRING_CHECK PyUnicode_Check
+#else
+# define PYSTRING_CHECK PyString_Check
+#endif
+
+#define PUTS(fd, str) _Py_write_noraise(fd, str, (int)strlen(str))
+#define MAX_STRING_LENGTH 500
+#define MAX_FRAME_DEPTH 100
+#define MAX_NTHREADS 100
+
+/* Write count bytes of buf into fd.
+ *
+ * On success, return the number of written bytes, it can be lower than count
+ * including 0. On error, set errno and return -1.
+ *
+ * When interrupted by a signal (write() fails with EINTR), retry the syscall
+ * without calling the Python signal handler. */
+Py_ssize_t
+_Py_write_noraise(int fd, const char *buf, size_t count)
+{
+ Py_ssize_t res;
+
+ do {
+#ifdef MS_WINDOWS
+ assert(count < INT_MAX);
+ res = write(fd, buf, (int)count);
+#else
+ res = write(fd, buf, count);
+#endif
+ /* retry write() if it was interrupted by a signal */
+ } while (res < 0 && errno == EINTR);
+
+ return res;
+}
+
+/* Reverse a string. For example, "abcd" becomes "dcba".
+
+ This function is signal safe. */
+
+void
+reverse_string(char *text, const size_t len)
+{
+ char tmp;
+ size_t i, j;
+ if (len == 0)
+ return;
+ for (i=0, j=len-1; i < j; i++, j--) {
+ tmp = text[i];
+ text[i] = text[j];
+ text[j] = tmp;
+ }
+}
+
+/* Format an integer in range [0; 999999999] to decimal,
+ and write it into the file fd.
+
+ This function is signal safe. */
+
+void
+dump_decimal(int fd, int value)
+{
+ char buffer[10];
+ int len;
+ if (value < 0 || 999999999 < value)
+ return;
+ len = 0;
+ do {
+ buffer[len] = '0' + (value % 10);
+ value /= 10;
+ len++;
+ } while (value);
+ reverse_string(buffer, len);
+ _Py_write_noraise(fd, buffer, len);
+}
+
+/* Format an integer in range [0; 0xffffffff] to hexadecimal of 'width' digits,
+ and write it into the file fd.
+
+ This function is signal safe. */
+
+void
+_Py_dump_hexadecimal(int fd, unsigned long value, size_t bytes)
+{
+ const char *hexdigits = "0123456789abcdef";
+ size_t width = bytes * 2;
+ size_t len;
+ char buffer[sizeof(unsigned long) * 2 + 1];
+ len = 0;
+ do {
+ buffer[len] = hexdigits[value & 15];
+ value >>= 4;
+ len++;
+ } while (len < width || value);
+ reverse_string(buffer, len);
+ _Py_write_noraise(fd, buffer, len);
+}
+
+/* Write an unicode object into the file fd using ascii+backslashreplace.
+
+ This function is signal safe. */
+
+static void
+dump_ascii(int fd, PyObject *text)
+{
+ Py_ssize_t i, size;
+ int truncated;
+ unsigned long ch;
+#if PY_MAJOR_VERSION >= 3
+ Py_UNICODE *u;
+
+ size = PyUnicode_GET_SIZE(text);
+ u = PyUnicode_AS_UNICODE(text);
+#else
+ char *s;
+
+ size = PyString_GET_SIZE(text);
+ s = PyString_AS_STRING(text);
+#endif
+
+ if (MAX_STRING_LENGTH < size) {
+ size = MAX_STRING_LENGTH;
+ truncated = 1;
+ }
+ else
+ truncated = 0;
+
+#if PY_MAJOR_VERSION >= 3
+ for (i=0; i < size; i++, u++) {
+ ch = *u;
+ if (' ' <= ch && ch < 0x7f) {
+ /* printable ASCII character */
+ char c = (char)ch;
+ _Py_write_noraise(fd, &c, 1);
+ }
+ else if (ch <= 0xff) {
+ PUTS(fd, "\\x");
+ _Py_dump_hexadecimal(fd, ch, 1);
+ }
+ else
+#ifdef Py_UNICODE_WIDE
+ if (ch <= 0xffff)
+#endif
+ {
+ PUTS(fd, "\\u");
+ _Py_dump_hexadecimal(fd, ch, 2);
+#ifdef Py_UNICODE_WIDE
+ }
+ else {
+ PUTS(fd, "\\U");
+ _Py_dump_hexadecimal(fd, ch, 4);
+#endif
+ }
+ }
+#else
+ for (i=0; i < size; i++, s++) {
+ ch = *s;
+ if (' ' <= ch && ch <= 126) {
+ /* printable ASCII character */
+ _Py_write_noraise(fd, s, 1);
+ }
+ else {
+ PUTS(fd, "\\x");
+ _Py_dump_hexadecimal(fd, ch, 1);
+ }
+ }
+#endif
+ if (truncated)
+ PUTS(fd, "...");
+}
+
+/* Write a frame into the file fd: "File "xxx", line xxx in xxx".
+
+ This function is signal safe. */
+
+static void
+dump_frame(int fd, PyFrameObject *frame)
+{
+ PyCodeObject *code;
+ int lineno;
+
+ code = frame->f_code;
+ PUTS(fd, " File ");
+ if (code != NULL && code->co_filename != NULL
+ && PYSTRING_CHECK(code->co_filename))
+ {
+ PUTS(fd, "\"");
+ dump_ascii(fd, code->co_filename);
+ PUTS(fd, "\"");
+ } else {
+ PUTS(fd, "???");
+ }
+
+#if (PY_MAJOR_VERSION <= 2 && PY_MINOR_VERSION < 7) \
+|| (PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION < 2)
+ /* PyFrame_GetLineNumber() was introduced in Python 2.7.0 and 3.2.0 */
+ lineno = PyCode_Addr2Line(code, frame->f_lasti);
+#else
+ lineno = PyFrame_GetLineNumber(frame);
+#endif
+ PUTS(fd, ", line ");
+ dump_decimal(fd, lineno);
+ PUTS(fd, " in ");
+
+ if (code != NULL && code->co_name != NULL
+ && PYSTRING_CHECK(code->co_name))
+ dump_ascii(fd, code->co_name);
+ else
+ PUTS(fd, "???");
+
+ PUTS(fd, "\n");
+}
+
+static void
+dump_traceback(int fd, PyThreadState *tstate, int write_header)
+{
+ PyFrameObject *frame;
+ unsigned int depth;
+
+ if (write_header)
+ PUTS(fd, "Stack (most recent call first):\n");
+
+ frame = _PyThreadState_GetFrame(tstate);
+ if (frame == NULL)
+ return;
+
+ depth = 0;
+ while (frame != NULL) {
+ if (MAX_FRAME_DEPTH <= depth) {
+ PUTS(fd, " ...\n");
+ break;
+ }
+ if (!PyFrame_Check(frame))
+ break;
+ dump_frame(fd, frame);
+ frame = frame->f_back;
+ depth++;
+ }
+}
+
+/* Dump the traceback of a Python thread into fd. Use write() to write the
+ traceback and retry if write() is interrupted by a signal (failed with
+ EINTR), but don't call the Python signal handler.
+
+ The caller is responsible to call PyErr_CheckSignals() to call Python signal
+ handlers if signals were received. */
+void
+_Py_DumpTraceback(int fd, PyThreadState *tstate)
+{
+ dump_traceback(fd, tstate, 1);
+}
+
+/* Write the thread identifier into the file 'fd': "Current thread 0xHHHH:\" if
+ is_current is true, "Thread 0xHHHH:\n" otherwise.
+
+ This function is signal safe. */
+
+static void
+write_thread_id(int fd, PyThreadState *tstate, int is_current)
+{
+ if (is_current)
+ PUTS(fd, "Current thread 0x");
+ else
+ PUTS(fd, "Thread 0x");
+ _Py_dump_hexadecimal(fd, (unsigned long)tstate->thread_id, sizeof(unsigned long));
+
+#ifdef __gnu_linux__
+ /* Linux only, get and print thread name */
+ static char thread_name[16];
+ if (0 == prctl(PR_GET_NAME, (unsigned long) thread_name, 0, 0, 0)) {
+ if (0 != strlen(thread_name)) {
+ PUTS(fd, " <");
+ PUTS(fd, thread_name);
+ PUTS(fd, ">");
+ }
+ }
+#endif
+
+ PUTS(fd, " (most recent call first):\n");
+}
+
+/* Dump the traceback of all Python threads into fd. Use write() to write the
+ traceback and retry if write() is interrupted by a signal (failed with
+ EINTR), but don't call the Python signal handler.
+
+ The caller is responsible to call PyErr_CheckSignals() to call Python signal
+ handlers if signals were received. */
+const char*
+_Py_DumpTracebackThreads(int fd, PyInterpreterState *interp,
+ PyThreadState *current_thread)
+{
+ PyThreadState *tstate;
+ unsigned int nthreads;
+
+ /* Get the current interpreter from the current thread */
+ tstate = PyInterpreterState_ThreadHead(interp);
+ if (tstate == NULL)
+ return "unable to get the thread head state";
+
+ /* Dump the traceback of each thread */
+ tstate = PyInterpreterState_ThreadHead(interp);
+ nthreads = 0;
+ do
+ {
+ if (nthreads != 0)
+ PUTS(fd, "\n");
+ if (nthreads >= MAX_NTHREADS) {
+ PUTS(fd, "...\n");
+ break;
+ }
+ write_thread_id(fd, tstate, tstate == current_thread);
+ dump_traceback(fd, tstate, 0);
+ tstate = PyThreadState_Next(tstate);
+ nthreads++;
+ } while (tstate != NULL);
+
+ return NULL;
+}
+
diff --git a/contrib/deprecated/python/faulthandler/ya.make b/contrib/deprecated/python/faulthandler/ya.make
new file mode 100644
index 0000000000..b7bd4f199f
--- /dev/null
+++ b/contrib/deprecated/python/faulthandler/ya.make
@@ -0,0 +1,30 @@
+PY2_LIBRARY()
+
+VERSION(3.2)
+
+LICENSE(BSD-2-Clause)
+
+NO_COMPILER_WARNINGS()
+
+NO_LINT()
+
+CFLAGS(
+ -DUSE_SIGINFO
+)
+
+SRCS(
+ faulthandler.c
+ traceback.c
+)
+
+PY_REGISTER(
+ faulthandler
+)
+
+RESOURCE_FILES(
+ PREFIX contrib/deprecated/python/faulthandler/
+ .dist-info/METADATA
+ .dist-info/top_level.txt
+)
+
+END()
diff --git a/contrib/deprecated/python/futures/.dist-info/METADATA b/contrib/deprecated/python/futures/.dist-info/METADATA
new file mode 100644
index 0000000000..71e4777828
--- /dev/null
+++ b/contrib/deprecated/python/futures/.dist-info/METADATA
@@ -0,0 +1,57 @@
+Metadata-Version: 2.1
+Name: futures
+Version: 3.4.0
+Summary: Backport of the concurrent.futures package from Python 3
+Home-page: https://github.com/agronholm/pythonfutures
+Author: Brian Quinlan
+Author-email: brian@sweetapp.com
+Maintainer: Alex Grönholm
+Maintainer-email: alex.gronholm@nextday.fi
+License: PSF
+Platform: UNKNOWN
+Classifier: License :: OSI Approved :: Python Software Foundation License
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 2 :: Only
+Requires-Python: >=2.6, <3
+
+.. image:: https://travis-ci.org/agronholm/pythonfutures.svg?branch=master
+ :target: https://travis-ci.org/agronholm/pythonfutures
+ :alt: Build Status
+
+This is a backport of the `concurrent.futures`_ standard library module to Python 2.
+
+It **does not** work on Python 3 due to Python 2 syntax being used in the codebase.
+Python 3 users should not attempt to install it, since the package is already included in the
+standard library.
+
+To conditionally require this library only on Python 2, you can do this in your ``setup.py``:
+
+.. code-block:: python
+
+ setup(
+ ...
+ extras_require={
+ ':python_version == "2.7"': ['futures']
+ }
+ )
+
+Or, using the newer syntax:
+
+.. code-block:: python
+
+ setup(
+ ...
+ install_requires={
+ 'futures; python_version == "2.7"'
+ }
+ )
+
+.. warning:: The ``ProcessPoolExecutor`` class has known (unfixable) problems on Python 2 and
+ should not be relied on for mission critical work. Please see `Issue 29 <https://github.com/agronholm/pythonfutures/issues/29>`_ and `upstream bug report <https://bugs.python.org/issue9205>`_ for more details.
+
+.. _concurrent.futures: https://docs.python.org/library/concurrent.futures.html
+
+
diff --git a/contrib/deprecated/python/futures/.dist-info/top_level.txt b/contrib/deprecated/python/futures/.dist-info/top_level.txt
new file mode 100644
index 0000000000..e4d7bdcbdb
--- /dev/null
+++ b/contrib/deprecated/python/futures/.dist-info/top_level.txt
@@ -0,0 +1 @@
+concurrent
diff --git a/contrib/deprecated/python/futures/LICENSE b/contrib/deprecated/python/futures/LICENSE
new file mode 100644
index 0000000000..a8d65b16b6
--- /dev/null
+++ b/contrib/deprecated/python/futures/LICENSE
@@ -0,0 +1,48 @@
+PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
+--------------------------------------------
+
+1. This LICENSE AGREEMENT is between the Python Software Foundation
+("PSF"), and the Individual or Organization ("Licensee") accessing and
+otherwise using this software ("Python") in source or binary form and
+its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, PSF
+hereby grants Licensee a nonexclusive, royalty-free, world-wide
+license to reproduce, analyze, test, perform and/or display publicly,
+prepare derivative works, distribute, and otherwise use Python
+alone or in any derivative version, provided, however, that PSF's
+License Agreement and PSF's notice of copyright, i.e., "Copyright (c)
+2001, 2002, 2003, 2004, 2005, 2006 Python Software Foundation; All Rights
+Reserved" are retained in Python alone or in any derivative version
+prepared by Licensee.
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python.
+
+4. PSF is making Python available to Licensee on an "AS IS"
+basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. Nothing in this License Agreement shall be deemed to create any
+relationship of agency, partnership, or joint venture between PSF and
+Licensee. This License Agreement does not grant permission to use PSF
+trademarks or trade name in a trademark sense to endorse or promote
+products or services of Licensee, or any third party.
+
+8. By copying, installing or otherwise using Python, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
diff --git a/contrib/deprecated/python/futures/README.rst b/contrib/deprecated/python/futures/README.rst
new file mode 100644
index 0000000000..13050037f7
--- /dev/null
+++ b/contrib/deprecated/python/futures/README.rst
@@ -0,0 +1,36 @@
+.. image:: https://travis-ci.org/agronholm/pythonfutures.svg?branch=master
+ :target: https://travis-ci.org/agronholm/pythonfutures
+ :alt: Build Status
+
+This is a backport of the `concurrent.futures`_ standard library module to Python 2.
+
+It **does not** work on Python 3 due to Python 2 syntax being used in the codebase.
+Python 3 users should not attempt to install it, since the package is already included in the
+standard library.
+
+To conditionally require this library only on Python 2, you can do this in your ``setup.py``:
+
+.. code-block:: python
+
+ setup(
+ ...
+ extras_require={
+ ':python_version == "2.7"': ['futures']
+ }
+ )
+
+Or, using the newer syntax:
+
+.. code-block:: python
+
+ setup(
+ ...
+ install_requires={
+ 'futures; python_version == "2.7"'
+ }
+ )
+
+.. warning:: The ``ProcessPoolExecutor`` class has known (unfixable) problems on Python 2 and
+ should not be relied on for mission critical work. Please see `Issue 29 <https://github.com/agronholm/pythonfutures/issues/29>`_ and `upstream bug report <https://bugs.python.org/issue9205>`_ for more details.
+
+.. _concurrent.futures: https://docs.python.org/library/concurrent.futures.html
diff --git a/contrib/deprecated/python/futures/concurrent/__init__.py b/contrib/deprecated/python/futures/concurrent/__init__.py
new file mode 100644
index 0000000000..b36383a610
--- /dev/null
+++ b/contrib/deprecated/python/futures/concurrent/__init__.py
@@ -0,0 +1,3 @@
+from pkgutil import extend_path
+
+__path__ = extend_path(__path__, __name__)
diff --git a/contrib/deprecated/python/futures/concurrent/futures/__init__.py b/contrib/deprecated/python/futures/concurrent/futures/__init__.py
new file mode 100644
index 0000000000..428b14bdfe
--- /dev/null
+++ b/contrib/deprecated/python/futures/concurrent/futures/__init__.py
@@ -0,0 +1,23 @@
+# Copyright 2009 Brian Quinlan. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Execute computations asynchronously using threads or processes."""
+
+__author__ = 'Brian Quinlan (brian@sweetapp.com)'
+
+from concurrent.futures._base import (FIRST_COMPLETED,
+ FIRST_EXCEPTION,
+ ALL_COMPLETED,
+ CancelledError,
+ TimeoutError,
+ Future,
+ Executor,
+ wait,
+ as_completed)
+from concurrent.futures.thread import ThreadPoolExecutor
+
+try:
+ from concurrent.futures.process import ProcessPoolExecutor
+except ImportError:
+ # some platforms don't have multiprocessing
+ pass
diff --git a/contrib/deprecated/python/futures/concurrent/futures/_base.py b/contrib/deprecated/python/futures/concurrent/futures/_base.py
new file mode 100644
index 0000000000..f7f525f6f1
--- /dev/null
+++ b/contrib/deprecated/python/futures/concurrent/futures/_base.py
@@ -0,0 +1,673 @@
+# Copyright 2009 Brian Quinlan. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+import collections
+import logging
+import threading
+import itertools
+import time
+import types
+
+__author__ = 'Brian Quinlan (brian@sweetapp.com)'
+
+FIRST_COMPLETED = 'FIRST_COMPLETED'
+FIRST_EXCEPTION = 'FIRST_EXCEPTION'
+ALL_COMPLETED = 'ALL_COMPLETED'
+_AS_COMPLETED = '_AS_COMPLETED'
+
+# Possible future states (for internal use by the futures package).
+PENDING = 'PENDING'
+RUNNING = 'RUNNING'
+# The future was cancelled by the user...
+CANCELLED = 'CANCELLED'
+# ...and _Waiter.add_cancelled() was called by a worker.
+CANCELLED_AND_NOTIFIED = 'CANCELLED_AND_NOTIFIED'
+FINISHED = 'FINISHED'
+
+_FUTURE_STATES = [
+ PENDING,
+ RUNNING,
+ CANCELLED,
+ CANCELLED_AND_NOTIFIED,
+ FINISHED
+]
+
+_STATE_TO_DESCRIPTION_MAP = {
+ PENDING: "pending",
+ RUNNING: "running",
+ CANCELLED: "cancelled",
+ CANCELLED_AND_NOTIFIED: "cancelled",
+ FINISHED: "finished"
+}
+
+# Logger for internal use by the futures package.
+LOGGER = logging.getLogger("concurrent.futures")
+
+class Error(Exception):
+ """Base class for all future-related exceptions."""
+ pass
+
+class CancelledError(Error):
+ """The Future was cancelled."""
+ pass
+
+class TimeoutError(Error):
+ """The operation exceeded the given deadline."""
+ pass
+
+class _Waiter(object):
+ """Provides the event that wait() and as_completed() block on."""
+ def __init__(self):
+ self.event = threading.Event()
+ self.finished_futures = []
+
+ def add_result(self, future):
+ self.finished_futures.append(future)
+
+ def add_exception(self, future):
+ self.finished_futures.append(future)
+
+ def add_cancelled(self, future):
+ self.finished_futures.append(future)
+
+class _AsCompletedWaiter(_Waiter):
+ """Used by as_completed()."""
+
+ def __init__(self):
+ super(_AsCompletedWaiter, self).__init__()
+ self.lock = threading.Lock()
+
+ def add_result(self, future):
+ with self.lock:
+ super(_AsCompletedWaiter, self).add_result(future)
+ self.event.set()
+
+ def add_exception(self, future):
+ with self.lock:
+ super(_AsCompletedWaiter, self).add_exception(future)
+ self.event.set()
+
+ def add_cancelled(self, future):
+ with self.lock:
+ super(_AsCompletedWaiter, self).add_cancelled(future)
+ self.event.set()
+
+class _FirstCompletedWaiter(_Waiter):
+ """Used by wait(return_when=FIRST_COMPLETED)."""
+
+ def add_result(self, future):
+ super(_FirstCompletedWaiter, self).add_result(future)
+ self.event.set()
+
+ def add_exception(self, future):
+ super(_FirstCompletedWaiter, self).add_exception(future)
+ self.event.set()
+
+ def add_cancelled(self, future):
+ super(_FirstCompletedWaiter, self).add_cancelled(future)
+ self.event.set()
+
+class _AllCompletedWaiter(_Waiter):
+ """Used by wait(return_when=FIRST_EXCEPTION and ALL_COMPLETED)."""
+
+ def __init__(self, num_pending_calls, stop_on_exception):
+ self.num_pending_calls = num_pending_calls
+ self.stop_on_exception = stop_on_exception
+ self.lock = threading.Lock()
+ super(_AllCompletedWaiter, self).__init__()
+
+ def _decrement_pending_calls(self):
+ with self.lock:
+ self.num_pending_calls -= 1
+ if not self.num_pending_calls:
+ self.event.set()
+
+ def add_result(self, future):
+ super(_AllCompletedWaiter, self).add_result(future)
+ self._decrement_pending_calls()
+
+ def add_exception(self, future):
+ super(_AllCompletedWaiter, self).add_exception(future)
+ if self.stop_on_exception:
+ self.event.set()
+ else:
+ self._decrement_pending_calls()
+
+ def add_cancelled(self, future):
+ super(_AllCompletedWaiter, self).add_cancelled(future)
+ self._decrement_pending_calls()
+
+class _AcquireFutures(object):
+ """A context manager that does an ordered acquire of Future conditions."""
+
+ def __init__(self, futures):
+ self.futures = sorted(futures, key=id)
+
+ def __enter__(self):
+ for future in self.futures:
+ future._condition.acquire()
+
+ def __exit__(self, *args):
+ for future in self.futures:
+ future._condition.release()
+
+def _create_and_install_waiters(fs, return_when):
+ if return_when == _AS_COMPLETED:
+ waiter = _AsCompletedWaiter()
+ elif return_when == FIRST_COMPLETED:
+ waiter = _FirstCompletedWaiter()
+ else:
+ pending_count = sum(
+ f._state not in [CANCELLED_AND_NOTIFIED, FINISHED] for f in fs)
+
+ if return_when == FIRST_EXCEPTION:
+ waiter = _AllCompletedWaiter(pending_count, stop_on_exception=True)
+ elif return_when == ALL_COMPLETED:
+ waiter = _AllCompletedWaiter(pending_count, stop_on_exception=False)
+ else:
+ raise ValueError("Invalid return condition: %r" % return_when)
+
+ for f in fs:
+ f._waiters.append(waiter)
+
+ return waiter
+
+
+def _yield_finished_futures(fs, waiter, ref_collect):
+ """
+ Iterate on the list *fs*, yielding finished futures one by one in
+ reverse order.
+ Before yielding a future, *waiter* is removed from its waiters
+ and the future is removed from each set in the collection of sets
+ *ref_collect*.
+
+ The aim of this function is to avoid keeping stale references after
+ the future is yielded and before the iterator resumes.
+ """
+ while fs:
+ f = fs[-1]
+ for futures_set in ref_collect:
+ futures_set.remove(f)
+ with f._condition:
+ f._waiters.remove(waiter)
+ del f
+ # Careful not to keep a reference to the popped value
+ yield fs.pop()
+
+
+def as_completed(fs, timeout=None):
+ """An iterator over the given futures that yields each as it completes.
+
+ Args:
+ fs: The sequence of Futures (possibly created by different Executors) to
+ iterate over.
+ timeout: The maximum number of seconds to wait. If None, then there
+ is no limit on the wait time.
+
+ Returns:
+ An iterator that yields the given Futures as they complete (finished or
+ cancelled). If any given Futures are duplicated, they will be returned
+ once.
+
+ Raises:
+ TimeoutError: If the entire result iterator could not be generated
+ before the given timeout.
+ """
+ if timeout is not None:
+ end_time = timeout + time.time()
+
+ fs = set(fs)
+ total_futures = len(fs)
+ with _AcquireFutures(fs):
+ finished = set(
+ f for f in fs
+ if f._state in [CANCELLED_AND_NOTIFIED, FINISHED])
+ pending = fs - finished
+ waiter = _create_and_install_waiters(fs, _AS_COMPLETED)
+ finished = list(finished)
+ try:
+ for f in _yield_finished_futures(finished, waiter,
+ ref_collect=(fs,)):
+ f = [f]
+ yield f.pop()
+
+ while pending:
+ if timeout is None:
+ wait_timeout = None
+ else:
+ wait_timeout = end_time - time.time()
+ if wait_timeout < 0:
+ raise TimeoutError(
+ '%d (of %d) futures unfinished' % (
+ len(pending), total_futures))
+
+ waiter.event.wait(wait_timeout)
+
+ with waiter.lock:
+ finished = waiter.finished_futures
+ waiter.finished_futures = []
+ waiter.event.clear()
+
+ # reverse to keep finishing order
+ finished.reverse()
+ for f in _yield_finished_futures(finished, waiter,
+ ref_collect=(fs, pending)):
+ f = [f]
+ yield f.pop()
+
+ finally:
+ # Remove waiter from unfinished futures
+ for f in fs:
+ with f._condition:
+ f._waiters.remove(waiter)
+
+DoneAndNotDoneFutures = collections.namedtuple(
+ 'DoneAndNotDoneFutures', 'done not_done')
+def wait(fs, timeout=None, return_when=ALL_COMPLETED):
+ """Wait for the futures in the given sequence to complete.
+
+ Args:
+ fs: The sequence of Futures (possibly created by different Executors) to
+ wait upon.
+ timeout: The maximum number of seconds to wait. If None, then there
+ is no limit on the wait time.
+ return_when: Indicates when this function should return. The options
+ are:
+
+ FIRST_COMPLETED - Return when any future finishes or is
+ cancelled.
+ FIRST_EXCEPTION - Return when any future finishes by raising an
+ exception. If no future raises an exception
+ then it is equivalent to ALL_COMPLETED.
+ ALL_COMPLETED - Return when all futures finish or are cancelled.
+
+ Returns:
+ A named 2-tuple of sets. The first set, named 'done', contains the
+ futures that completed (is finished or cancelled) before the wait
+ completed. The second set, named 'not_done', contains uncompleted
+ futures.
+ """
+ with _AcquireFutures(fs):
+ done = set(f for f in fs
+ if f._state in [CANCELLED_AND_NOTIFIED, FINISHED])
+ not_done = set(fs) - done
+
+ if (return_when == FIRST_COMPLETED) and done:
+ return DoneAndNotDoneFutures(done, not_done)
+ elif (return_when == FIRST_EXCEPTION) and done:
+ if any(f for f in done
+ if not f.cancelled() and f.exception() is not None):
+ return DoneAndNotDoneFutures(done, not_done)
+
+ if len(done) == len(fs):
+ return DoneAndNotDoneFutures(done, not_done)
+
+ waiter = _create_and_install_waiters(fs, return_when)
+
+ waiter.event.wait(timeout)
+ for f in fs:
+ with f._condition:
+ f._waiters.remove(waiter)
+
+ done.update(waiter.finished_futures)
+ return DoneAndNotDoneFutures(done, set(fs) - done)
+
+class Future(object):
+ """Represents the result of an asynchronous computation."""
+
+ def __init__(self):
+ """Initializes the future. Should not be called by clients."""
+ self._condition = threading.Condition()
+ self._state = PENDING
+ self._result = None
+ self._exception = None
+ self._traceback = None
+ self._waiters = []
+ self._done_callbacks = []
+
+ def _invoke_callbacks(self):
+ for callback in self._done_callbacks:
+ try:
+ callback(self)
+ except Exception:
+ LOGGER.exception('exception calling callback for %r', self)
+ except BaseException:
+ # Explicitly let all other new-style exceptions through so
+ # that we can catch all old-style exceptions with a simple
+ # "except:" clause below.
+ #
+ # All old-style exception objects are instances of
+ # types.InstanceType, but "except types.InstanceType:" does
+ # not catch old-style exceptions for some reason. Thus, the
+ # only way to catch all old-style exceptions without catching
+ # any new-style exceptions is to filter out the new-style
+ # exceptions, which all derive from BaseException.
+ raise
+ except:
+ # Because of the BaseException clause above, this handler only
+ # executes for old-style exception objects.
+ LOGGER.exception('exception calling callback for %r', self)
+
+ def __repr__(self):
+ with self._condition:
+ if self._state == FINISHED:
+ if self._exception:
+ return '<%s at %#x state=%s raised %s>' % (
+ self.__class__.__name__,
+ id(self),
+ _STATE_TO_DESCRIPTION_MAP[self._state],
+ self._exception.__class__.__name__)
+ else:
+ return '<%s at %#x state=%s returned %s>' % (
+ self.__class__.__name__,
+ id(self),
+ _STATE_TO_DESCRIPTION_MAP[self._state],
+ self._result.__class__.__name__)
+ return '<%s at %#x state=%s>' % (
+ self.__class__.__name__,
+ id(self),
+ _STATE_TO_DESCRIPTION_MAP[self._state])
+
+ def cancel(self):
+ """Cancel the future if possible.
+
+ Returns True if the future was cancelled, False otherwise. A future
+ cannot be cancelled if it is running or has already completed.
+ """
+ with self._condition:
+ if self._state in [RUNNING, FINISHED]:
+ return False
+
+ if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
+ return True
+
+ self._state = CANCELLED
+ self._condition.notify_all()
+
+ self._invoke_callbacks()
+ return True
+
+ def cancelled(self):
+ """Return True if the future was cancelled."""
+ with self._condition:
+ return self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]
+
+ def running(self):
+ """Return True if the future is currently executing."""
+ with self._condition:
+ return self._state == RUNNING
+
+ def done(self):
+ """Return True of the future was cancelled or finished executing."""
+ with self._condition:
+ return self._state in [CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED]
+
+ def __get_result(self):
+ if self._exception:
+ if isinstance(self._exception, types.InstanceType):
+ # The exception is an instance of an old-style class, which
+ # means type(self._exception) returns types.ClassType instead
+ # of the exception's actual class type.
+ exception_type = self._exception.__class__
+ else:
+ exception_type = type(self._exception)
+ raise exception_type, self._exception, self._traceback
+ else:
+ return self._result
+
+ def add_done_callback(self, fn):
+ """Attaches a callable that will be called when the future finishes.
+
+ Args:
+ fn: A callable that will be called with this future as its only
+ argument when the future completes or is cancelled. The callable
+ will always be called by a thread in the same process in which
+ it was added. If the future has already completed or been
+ cancelled then the callable will be called immediately. These
+ callables are called in the order that they were added.
+ """
+ with self._condition:
+ if self._state not in [CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED]:
+ self._done_callbacks.append(fn)
+ return
+ fn(self)
+
+ def result(self, timeout=None):
+ """Return the result of the call that the future represents.
+
+ Args:
+ timeout: The number of seconds to wait for the result if the future
+ isn't done. If None, then there is no limit on the wait time.
+
+ Returns:
+ The result of the call that the future represents.
+
+ Raises:
+ CancelledError: If the future was cancelled.
+ TimeoutError: If the future didn't finish executing before the given
+ timeout.
+ Exception: If the call raised then that exception will be raised.
+ """
+ with self._condition:
+ if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
+ raise CancelledError()
+ elif self._state == FINISHED:
+ return self.__get_result()
+
+ self._condition.wait(timeout)
+
+ if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
+ raise CancelledError()
+ elif self._state == FINISHED:
+ return self.__get_result()
+ else:
+ raise TimeoutError()
+
+ def exception_info(self, timeout=None):
+ """Return a tuple of (exception, traceback) raised by the call that the
+ future represents.
+
+ Args:
+ timeout: The number of seconds to wait for the exception if the
+ future isn't done. If None, then there is no limit on the wait
+ time.
+
+ Returns:
+ The exception raised by the call that the future represents or None
+ if the call completed without raising.
+
+ Raises:
+ CancelledError: If the future was cancelled.
+ TimeoutError: If the future didn't finish executing before the given
+ timeout.
+ """
+ with self._condition:
+ if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
+ raise CancelledError()
+ elif self._state == FINISHED:
+ return self._exception, self._traceback
+
+ self._condition.wait(timeout)
+
+ if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
+ raise CancelledError()
+ elif self._state == FINISHED:
+ return self._exception, self._traceback
+ else:
+ raise TimeoutError()
+
+ def exception(self, timeout=None):
+ """Return the exception raised by the call that the future represents.
+
+ Args:
+ timeout: The number of seconds to wait for the exception if the
+ future isn't done. If None, then there is no limit on the wait
+ time.
+
+ Returns:
+ The exception raised by the call that the future represents or None
+ if the call completed without raising.
+
+ Raises:
+ CancelledError: If the future was cancelled.
+ TimeoutError: If the future didn't finish executing before the given
+ timeout.
+ """
+ return self.exception_info(timeout)[0]
+
+ # The following methods should only be used by Executors and in tests.
+ def set_running_or_notify_cancel(self):
+ """Mark the future as running or process any cancel notifications.
+
+ Should only be used by Executor implementations and unit tests.
+
+ If the future has been cancelled (cancel() was called and returned
+ True) then any threads waiting on the future completing (though calls
+ to as_completed() or wait()) are notified and False is returned.
+
+ If the future was not cancelled then it is put in the running state
+ (future calls to running() will return True) and True is returned.
+
+ This method should be called by Executor implementations before
+ executing the work associated with this future. If this method returns
+ False then the work should not be executed.
+
+ Returns:
+ False if the Future was cancelled, True otherwise.
+
+ Raises:
+ RuntimeError: if this method was already called or if set_result()
+ or set_exception() was called.
+ """
+ with self._condition:
+ if self._state == CANCELLED:
+ self._state = CANCELLED_AND_NOTIFIED
+ for waiter in self._waiters:
+ waiter.add_cancelled(self)
+ # self._condition.notify_all() is not necessary because
+ # self.cancel() triggers a notification.
+ return False
+ elif self._state == PENDING:
+ self._state = RUNNING
+ return True
+ else:
+ LOGGER.critical('Future %s in unexpected state: %s',
+ id(self),
+ self._state)
+ raise RuntimeError('Future in unexpected state')
+
+ def set_result(self, result):
+ """Sets the return value of work associated with the future.
+
+ Should only be used by Executor implementations and unit tests.
+ """
+ with self._condition:
+ self._result = result
+ self._state = FINISHED
+ for waiter in self._waiters:
+ waiter.add_result(self)
+ self._condition.notify_all()
+ self._invoke_callbacks()
+
+ def set_exception_info(self, exception, traceback):
+ """Sets the result of the future as being the given exception
+ and traceback.
+
+ Should only be used by Executor implementations and unit tests.
+ """
+ with self._condition:
+ self._exception = exception
+ self._traceback = traceback
+ self._state = FINISHED
+ for waiter in self._waiters:
+ waiter.add_exception(self)
+ self._condition.notify_all()
+ self._invoke_callbacks()
+
+ def set_exception(self, exception):
+ """Sets the result of the future as being the given exception.
+
+ Should only be used by Executor implementations and unit tests.
+ """
+ self.set_exception_info(exception, None)
+
+class Executor(object):
+ """This is an abstract base class for concrete asynchronous executors."""
+
+ def submit(self, fn, *args, **kwargs):
+ """Submits a callable to be executed with the given arguments.
+
+ Schedules the callable to be executed as fn(*args, **kwargs) and returns
+ a Future instance representing the execution of the callable.
+
+ Returns:
+ A Future representing the given call.
+ """
+ raise NotImplementedError()
+
+ def map(self, fn, *iterables, **kwargs):
+ """Returns an iterator equivalent to map(fn, iter).
+
+ Args:
+ fn: A callable that will take as many arguments as there are
+ passed iterables.
+ timeout: The maximum number of seconds to wait. If None, then there
+ is no limit on the wait time.
+
+ Returns:
+ An iterator equivalent to: map(func, *iterables) but the calls may
+ be evaluated out-of-order.
+
+ Raises:
+ TimeoutError: If the entire result iterator could not be generated
+ before the given timeout.
+ Exception: If fn(*args) raises for any values.
+ """
+ timeout = kwargs.get('timeout')
+ if timeout is not None:
+ end_time = timeout + time.time()
+
+ fs = [self.submit(fn, *args) for args in itertools.izip(*iterables)]
+
+ # Yield must be hidden in closure so that the futures are submitted
+ # before the first iterator value is required.
+ def result_iterator():
+ try:
+ # reverse to keep finishing order
+ fs.reverse()
+ while fs:
+ # Careful not to keep a reference to the popped future
+ if timeout is None:
+ yield fs.pop().result()
+ else:
+ yield fs.pop().result(end_time - time.time())
+ finally:
+ for future in fs:
+ future.cancel()
+ return result_iterator()
+
+ def shutdown(self, wait=True):
+ """Clean-up the resources associated with the Executor.
+
+ It is safe to call this method several times. Otherwise, no other
+ methods can be called after this one.
+
+ Args:
+ wait: If True then shutdown will not return until all running
+ futures have finished executing and the resources used by the
+ executor have been reclaimed.
+ """
+ pass
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.shutdown(wait=True)
+ return False
+
+
+class BrokenExecutor(RuntimeError):
+ """
+ Raised when a executor has become non-functional after a severe failure.
+ """
diff --git a/contrib/deprecated/python/futures/concurrent/futures/process.py b/contrib/deprecated/python/futures/concurrent/futures/process.py
new file mode 100644
index 0000000000..bb327faf05
--- /dev/null
+++ b/contrib/deprecated/python/futures/concurrent/futures/process.py
@@ -0,0 +1,363 @@
+# Copyright 2009 Brian Quinlan. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Implements ProcessPoolExecutor.
+
+The follow diagram and text describe the data-flow through the system:
+
+|======================= In-process =====================|== Out-of-process ==|
+
++----------+ +----------+ +--------+ +-----------+ +---------+
+| | => | Work Ids | => | | => | Call Q | => | |
+| | +----------+ | | +-----------+ | |
+| | | ... | | | | ... | | |
+| | | 6 | | | | 5, call() | | |
+| | | 7 | | | | ... | | |
+| Process | | ... | | Local | +-----------+ | Process |
+| Pool | +----------+ | Worker | | #1..n |
+| Executor | | Thread | | |
+| | +----------- + | | +-----------+ | |
+| | <=> | Work Items | <=> | | <= | Result Q | <= | |
+| | +------------+ | | +-----------+ | |
+| | | 6: call() | | | | ... | | |
+| | | future | | | | 4, result | | |
+| | | ... | | | | 3, except | | |
++----------+ +------------+ +--------+ +-----------+ +---------+
+
+Executor.submit() called:
+- creates a uniquely numbered _WorkItem and adds it to the "Work Items" dict
+- adds the id of the _WorkItem to the "Work Ids" queue
+
+Local worker thread:
+- reads work ids from the "Work Ids" queue and looks up the corresponding
+ WorkItem from the "Work Items" dict: if the work item has been cancelled then
+ it is simply removed from the dict, otherwise it is repackaged as a
+ _CallItem and put in the "Call Q". New _CallItems are put in the "Call Q"
+ until "Call Q" is full. NOTE: the size of the "Call Q" is kept small because
+ calls placed in the "Call Q" can no longer be cancelled with Future.cancel().
+- reads _ResultItems from "Result Q", updates the future stored in the
+ "Work Items" dict and deletes the dict entry
+
+Process #1..n:
+- reads _CallItems from "Call Q", executes the calls, and puts the resulting
+ _ResultItems in "Request Q"
+"""
+
+import atexit
+from concurrent.futures import _base
+import Queue as queue
+import multiprocessing
+import threading
+import weakref
+import sys
+
+__author__ = 'Brian Quinlan (brian@sweetapp.com)'
+
+# Workers are created as daemon threads and processes. This is done to allow the
+# interpreter to exit when there are still idle processes in a
+# ProcessPoolExecutor's process pool (i.e. shutdown() was not called). However,
+# allowing workers to die with the interpreter has two undesirable properties:
+# - The workers would still be running during interpreter shutdown,
+# meaning that they would fail in unpredictable ways.
+# - The workers could be killed while evaluating a work item, which could
+# be bad if the callable being evaluated has external side-effects e.g.
+# writing to a file.
+#
+# To work around this problem, an exit handler is installed which tells the
+# workers to exit when their work queues are empty and then waits until the
+# threads/processes finish.
+
+_threads_queues = weakref.WeakKeyDictionary()
+_shutdown = False
+
+def _python_exit():
+ global _shutdown
+ _shutdown = True
+ items = list(_threads_queues.items()) if _threads_queues else ()
+ for t, q in items:
+ q.put(None)
+ for t, q in items:
+ t.join(sys.maxint)
+
+# Controls how many more calls than processes will be queued in the call queue.
+# A smaller number will mean that processes spend more time idle waiting for
+# work while a larger number will make Future.cancel() succeed less frequently
+# (Futures in the call queue cannot be cancelled).
+EXTRA_QUEUED_CALLS = 1
+
+class _WorkItem(object):
+ def __init__(self, future, fn, args, kwargs):
+ self.future = future
+ self.fn = fn
+ self.args = args
+ self.kwargs = kwargs
+
+class _ResultItem(object):
+ def __init__(self, work_id, exception=None, result=None):
+ self.work_id = work_id
+ self.exception = exception
+ self.result = result
+
+class _CallItem(object):
+ def __init__(self, work_id, fn, args, kwargs):
+ self.work_id = work_id
+ self.fn = fn
+ self.args = args
+ self.kwargs = kwargs
+
+def _process_worker(call_queue, result_queue):
+ """Evaluates calls from call_queue and places the results in result_queue.
+
+ This worker is run in a separate process.
+
+ Args:
+ call_queue: A multiprocessing.Queue of _CallItems that will be read and
+ evaluated by the worker.
+ result_queue: A multiprocessing.Queue of _ResultItems that will written
+ to by the worker.
+ shutdown: A multiprocessing.Event that will be set as a signal to the
+ worker that it should exit when call_queue is empty.
+ """
+ while True:
+ call_item = call_queue.get(block=True)
+ if call_item is None:
+ # Wake up queue management thread
+ result_queue.put(None)
+ return
+ try:
+ r = call_item.fn(*call_item.args, **call_item.kwargs)
+ except:
+ e = sys.exc_info()[1]
+ result_queue.put(_ResultItem(call_item.work_id,
+ exception=e))
+ else:
+ result_queue.put(_ResultItem(call_item.work_id,
+ result=r))
+
+def _add_call_item_to_queue(pending_work_items,
+ work_ids,
+ call_queue):
+ """Fills call_queue with _WorkItems from pending_work_items.
+
+ This function never blocks.
+
+ Args:
+ pending_work_items: A dict mapping work ids to _WorkItems e.g.
+ {5: <_WorkItem...>, 6: <_WorkItem...>, ...}
+ work_ids: A queue.Queue of work ids e.g. Queue([5, 6, ...]). Work ids
+ are consumed and the corresponding _WorkItems from
+ pending_work_items are transformed into _CallItems and put in
+ call_queue.
+ call_queue: A multiprocessing.Queue that will be filled with _CallItems
+ derived from _WorkItems.
+ """
+ while True:
+ if call_queue.full():
+ return
+ try:
+ work_id = work_ids.get(block=False)
+ except queue.Empty:
+ return
+ else:
+ work_item = pending_work_items[work_id]
+
+ if work_item.future.set_running_or_notify_cancel():
+ call_queue.put(_CallItem(work_id,
+ work_item.fn,
+ work_item.args,
+ work_item.kwargs),
+ block=True)
+ else:
+ del pending_work_items[work_id]
+ continue
+
+def _queue_management_worker(executor_reference,
+ processes,
+ pending_work_items,
+ work_ids_queue,
+ call_queue,
+ result_queue):
+ """Manages the communication between this process and the worker processes.
+
+ This function is run in a local thread.
+
+ Args:
+ executor_reference: A weakref.ref to the ProcessPoolExecutor that owns
+ this thread. Used to determine if the ProcessPoolExecutor has been
+ garbage collected and that this function can exit.
+ process: A list of the multiprocessing.Process instances used as
+ workers.
+ pending_work_items: A dict mapping work ids to _WorkItems e.g.
+ {5: <_WorkItem...>, 6: <_WorkItem...>, ...}
+ work_ids_queue: A queue.Queue of work ids e.g. Queue([5, 6, ...]).
+ call_queue: A multiprocessing.Queue that will be filled with _CallItems
+ derived from _WorkItems for processing by the process workers.
+ result_queue: A multiprocessing.Queue of _ResultItems generated by the
+ process workers.
+ """
+ nb_shutdown_processes = [0]
+ def shutdown_one_process():
+ """Tell a worker to terminate, which will in turn wake us again"""
+ call_queue.put(None)
+ nb_shutdown_processes[0] += 1
+ while True:
+ _add_call_item_to_queue(pending_work_items,
+ work_ids_queue,
+ call_queue)
+
+ result_item = result_queue.get(block=True)
+ if result_item is not None:
+ work_item = pending_work_items[result_item.work_id]
+ del pending_work_items[result_item.work_id]
+
+ if result_item.exception:
+ work_item.future.set_exception(result_item.exception)
+ else:
+ work_item.future.set_result(result_item.result)
+ # Delete references to object. See issue16284
+ del work_item
+ # Check whether we should start shutting down.
+ executor = executor_reference()
+ # No more work items can be added if:
+ # - The interpreter is shutting down OR
+ # - The executor that owns this worker has been collected OR
+ # - The executor that owns this worker has been shutdown.
+ if _shutdown or executor is None or executor._shutdown_thread:
+ # Since no new work items can be added, it is safe to shutdown
+ # this thread if there are no pending work items.
+ if not pending_work_items:
+ while nb_shutdown_processes[0] < len(processes):
+ shutdown_one_process()
+ # If .join() is not called on the created processes then
+ # some multiprocessing.Queue methods may deadlock on Mac OS
+ # X.
+ for p in processes:
+ p.join()
+ call_queue.close()
+ return
+ del executor
+
+_system_limits_checked = False
+_system_limited = None
+def _check_system_limits():
+ global _system_limits_checked, _system_limited
+ if _system_limits_checked:
+ if _system_limited:
+ raise NotImplementedError(_system_limited)
+ _system_limits_checked = True
+ try:
+ import os
+ nsems_max = os.sysconf("SC_SEM_NSEMS_MAX")
+ except (AttributeError, ValueError):
+ # sysconf not available or setting not available
+ return
+ if nsems_max == -1:
+ # indetermine limit, assume that limit is determined
+ # by available memory only
+ return
+ if nsems_max >= 256:
+ # minimum number of semaphores available
+ # according to POSIX
+ return
+ _system_limited = "system provides too few semaphores (%d available, 256 necessary)" % nsems_max
+ raise NotImplementedError(_system_limited)
+
+
+class ProcessPoolExecutor(_base.Executor):
+ def __init__(self, max_workers=None):
+ """Initializes a new ProcessPoolExecutor instance.
+
+ Args:
+ max_workers: The maximum number of processes that can be used to
+ execute the given calls. If None or not given then as many
+ worker processes will be created as the machine has processors.
+ """
+ _check_system_limits()
+
+ if max_workers is None:
+ self._max_workers = multiprocessing.cpu_count()
+ else:
+ if max_workers <= 0:
+ raise ValueError("max_workers must be greater than 0")
+
+ self._max_workers = max_workers
+
+ # Make the call queue slightly larger than the number of processes to
+ # prevent the worker processes from idling. But don't make it too big
+ # because futures in the call queue cannot be cancelled.
+ self._call_queue = multiprocessing.Queue(self._max_workers +
+ EXTRA_QUEUED_CALLS)
+ self._result_queue = multiprocessing.Queue()
+ self._work_ids = queue.Queue()
+ self._queue_management_thread = None
+ self._processes = set()
+
+ # Shutdown is a two-step process.
+ self._shutdown_thread = False
+ self._shutdown_lock = threading.Lock()
+ self._queue_count = 0
+ self._pending_work_items = {}
+
+ def _start_queue_management_thread(self):
+ # When the executor gets lost, the weakref callback will wake up
+ # the queue management thread.
+ def weakref_cb(_, q=self._result_queue):
+ q.put(None)
+ if self._queue_management_thread is None:
+ self._queue_management_thread = threading.Thread(
+ target=_queue_management_worker,
+ args=(weakref.ref(self, weakref_cb),
+ self._processes,
+ self._pending_work_items,
+ self._work_ids,
+ self._call_queue,
+ self._result_queue))
+ self._queue_management_thread.daemon = True
+ self._queue_management_thread.start()
+ _threads_queues[self._queue_management_thread] = self._result_queue
+
+ def _adjust_process_count(self):
+ for _ in range(len(self._processes), self._max_workers):
+ p = multiprocessing.Process(
+ target=_process_worker,
+ args=(self._call_queue,
+ self._result_queue))
+ p.start()
+ self._processes.add(p)
+
+ def submit(self, fn, *args, **kwargs):
+ with self._shutdown_lock:
+ if self._shutdown_thread:
+ raise RuntimeError('cannot schedule new futures after shutdown')
+
+ f = _base.Future()
+ w = _WorkItem(f, fn, args, kwargs)
+
+ self._pending_work_items[self._queue_count] = w
+ self._work_ids.put(self._queue_count)
+ self._queue_count += 1
+ # Wake up queue management thread
+ self._result_queue.put(None)
+
+ self._start_queue_management_thread()
+ self._adjust_process_count()
+ return f
+ submit.__doc__ = _base.Executor.submit.__doc__
+
+ def shutdown(self, wait=True):
+ with self._shutdown_lock:
+ self._shutdown_thread = True
+ if self._queue_management_thread:
+ # Wake up queue management thread
+ self._result_queue.put(None)
+ if wait:
+ self._queue_management_thread.join(sys.maxint)
+ # To reduce the risk of opening too many files, remove references to
+ # objects that use file descriptors.
+ self._queue_management_thread = None
+ self._call_queue = None
+ self._result_queue = None
+ self._processes = None
+ shutdown.__doc__ = _base.Executor.shutdown.__doc__
+
+atexit.register(_python_exit)
diff --git a/contrib/deprecated/python/futures/concurrent/futures/thread.py b/contrib/deprecated/python/futures/concurrent/futures/thread.py
new file mode 100644
index 0000000000..b9685cefe5
--- /dev/null
+++ b/contrib/deprecated/python/futures/concurrent/futures/thread.py
@@ -0,0 +1,203 @@
+# Copyright 2009 Brian Quinlan. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Implements ThreadPoolExecutor."""
+
+import atexit
+from concurrent.futures import _base
+import itertools
+import Queue as queue
+import threading
+import weakref
+import sys
+
+try:
+ from multiprocessing import cpu_count
+except ImportError:
+ # some platforms don't have multiprocessing
+ def cpu_count():
+ return None
+
+__author__ = 'Brian Quinlan (brian@sweetapp.com)'
+
+# Workers are created as daemon threads. This is done to allow the interpreter
+# to exit when there are still idle threads in a ThreadPoolExecutor's thread
+# pool (i.e. shutdown() was not called). However, allowing workers to die with
+# the interpreter has two undesirable properties:
+# - The workers would still be running during interpreter shutdown,
+# meaning that they would fail in unpredictable ways.
+# - The workers could be killed while evaluating a work item, which could
+# be bad if the callable being evaluated has external side-effects e.g.
+# writing to a file.
+#
+# To work around this problem, an exit handler is installed which tells the
+# workers to exit when their work queues are empty and then waits until the
+# threads finish.
+
+_threads_queues = weakref.WeakKeyDictionary()
+_shutdown = False
+
+def _python_exit():
+ global _shutdown
+ _shutdown = True
+ items = list(_threads_queues.items()) if _threads_queues else ()
+ for t, q in items:
+ q.put(None)
+ for t, q in items:
+ t.join(sys.maxint)
+
+atexit.register(_python_exit)
+
+class _WorkItem(object):
+ def __init__(self, future, fn, args, kwargs):
+ self.future = future
+ self.fn = fn
+ self.args = args
+ self.kwargs = kwargs
+
+ def run(self):
+ if not self.future.set_running_or_notify_cancel():
+ return
+
+ try:
+ result = self.fn(*self.args, **self.kwargs)
+ except:
+ e, tb = sys.exc_info()[1:]
+ self.future.set_exception_info(e, tb)
+ else:
+ self.future.set_result(result)
+
+def _worker(executor_reference, work_queue, initializer, initargs):
+ if initializer is not None:
+ try:
+ initializer(*initargs)
+ except BaseException:
+ _base.LOGGER.critical('Exception in initializer:', exc_info=True)
+ executor = executor_reference()
+ if executor is not None:
+ executor._initializer_failed()
+ return
+ try:
+ while True:
+ work_item = work_queue.get(block=True)
+ if work_item is not None:
+ work_item.run()
+ # Delete references to object. See issue16284
+ del work_item
+
+ # attempt to increment idle count
+ executor = executor_reference()
+ if executor is not None:
+ executor._idle_semaphore.release()
+ del executor
+ continue
+ executor = executor_reference()
+ # Exit if:
+ # - The interpreter is shutting down OR
+ # - The executor that owns the worker has been collected OR
+ # - The executor that owns the worker has been shutdown.
+ if _shutdown or executor is None or executor._shutdown:
+ # Notice other workers
+ work_queue.put(None)
+ return
+ del executor
+ except:
+ _base.LOGGER.critical('Exception in worker', exc_info=True)
+
+
+class BrokenThreadPool(_base.BrokenExecutor):
+ """
+ Raised when a worker thread in a ThreadPoolExecutor failed initializing.
+ """
+
+
+class ThreadPoolExecutor(_base.Executor):
+
+ # Used to assign unique thread names when thread_name_prefix is not supplied.
+ _counter = itertools.count().next
+
+ def __init__(self, max_workers=None, thread_name_prefix='', initializer=None, initargs=()):
+ """Initializes a new ThreadPoolExecutor instance.
+
+ Args:
+ max_workers: The maximum number of threads that can be used to
+ execute the given calls.
+ thread_name_prefix: An optional name prefix to give our threads.
+ """
+ if max_workers is None:
+ # Use this number because ThreadPoolExecutor is often
+ # used to overlap I/O instead of CPU work.
+ max_workers = (cpu_count() or 1) * 5
+ if max_workers <= 0:
+ raise ValueError("max_workers must be greater than 0")
+
+ self._max_workers = max_workers
+ self._initializer = initializer
+ self._initargs = initargs
+ self._work_queue = queue.Queue()
+ self._idle_semaphore = threading.Semaphore(0)
+ self._threads = set()
+ self._broken = False
+ self._shutdown = False
+ self._shutdown_lock = threading.Lock()
+ self._thread_name_prefix = (thread_name_prefix or
+ ("ThreadPoolExecutor-%d" % self._counter()))
+
+ def submit(self, fn, *args, **kwargs):
+ with self._shutdown_lock:
+ if self._broken:
+ raise BrokenThreadPool(self._broken)
+ if self._shutdown:
+ raise RuntimeError('cannot schedule new futures after shutdown')
+
+ f = _base.Future()
+ w = _WorkItem(f, fn, args, kwargs)
+
+ self._work_queue.put(w)
+ self._adjust_thread_count()
+ return f
+ submit.__doc__ = _base.Executor.submit.__doc__
+
+ def _adjust_thread_count(self):
+ # if idle threads are available, don't spin new threads
+ if self._idle_semaphore.acquire(False):
+ return
+
+ # When the executor gets lost, the weakref callback will wake up
+ # the worker threads.
+ def weakref_cb(_, q=self._work_queue):
+ q.put(None)
+
+ num_threads = len(self._threads)
+ if num_threads < self._max_workers:
+ thread_name = '%s_%d' % (self._thread_name_prefix or self,
+ num_threads)
+ t = threading.Thread(name=thread_name, target=_worker,
+ args=(weakref.ref(self, weakref_cb),
+ self._work_queue, self._initializer, self._initargs))
+ t.daemon = True
+ t.start()
+ self._threads.add(t)
+ _threads_queues[t] = self._work_queue
+
+ def _initializer_failed(self):
+ with self._shutdown_lock:
+ self._broken = ('A thread initializer failed, the thread pool '
+ 'is not usable anymore')
+ # Drain work queue and mark pending futures failed
+ while True:
+ try:
+ work_item = self._work_queue.get_nowait()
+ except queue.Empty:
+ break
+ if work_item is not None:
+ work_item.future.set_exception(BrokenThreadPool(self._broken))
+
+ def shutdown(self, wait=True):
+ with self._shutdown_lock:
+ self._shutdown = True
+ self._work_queue.put(None)
+ if wait:
+ for t in self._threads:
+ t.join(sys.maxint)
+ shutdown.__doc__ = _base.Executor.shutdown.__doc__
diff --git a/contrib/deprecated/python/futures/ya.make b/contrib/deprecated/python/futures/ya.make
new file mode 100644
index 0000000000..cbb045f609
--- /dev/null
+++ b/contrib/deprecated/python/futures/ya.make
@@ -0,0 +1,26 @@
+# Generated by devtools/yamaker (pypi).
+
+PY2_LIBRARY()
+
+VERSION(3.4.0)
+
+LICENSE(PSF-2.0)
+
+NO_LINT()
+
+PY_SRCS(
+ TOP_LEVEL
+ concurrent/__init__.py
+ concurrent/futures/__init__.py
+ concurrent/futures/_base.py
+ concurrent/futures/process.py
+ concurrent/futures/thread.py
+)
+
+RESOURCE_FILES(
+ PREFIX contrib/deprecated/python/futures/
+ .dist-info/METADATA
+ .dist-info/top_level.txt
+)
+
+END()
diff --git a/contrib/deprecated/python/ipaddress/.dist-info/METADATA b/contrib/deprecated/python/ipaddress/.dist-info/METADATA
new file mode 100644
index 0000000000..422aa4bf87
--- /dev/null
+++ b/contrib/deprecated/python/ipaddress/.dist-info/METADATA
@@ -0,0 +1,25 @@
+Metadata-Version: 2.1
+Name: ipaddress
+Version: 1.0.23
+Summary: IPv4/IPv6 manipulation library
+Home-page: https://github.com/phihag/ipaddress
+Author: Philipp Hagemeister
+Author-email: phihag@phihag.de
+License: Python Software Foundation License
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Natural Language :: English
+Classifier: License :: OSI Approved :: Python Software Foundation License
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3.2
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+
+Port of the 3.3+ ipaddress module to 2.6, 2.7, 3.2
+
+
diff --git a/contrib/deprecated/python/ipaddress/.dist-info/top_level.txt b/contrib/deprecated/python/ipaddress/.dist-info/top_level.txt
new file mode 100644
index 0000000000..8e9db03d52
--- /dev/null
+++ b/contrib/deprecated/python/ipaddress/.dist-info/top_level.txt
@@ -0,0 +1 @@
+ipaddress
diff --git a/contrib/deprecated/python/ipaddress/LICENSE b/contrib/deprecated/python/ipaddress/LICENSE
new file mode 100644
index 0000000000..41bd16ba6c
--- /dev/null
+++ b/contrib/deprecated/python/ipaddress/LICENSE
@@ -0,0 +1,50 @@
+This package is a modified version of cpython's ipaddress module.
+It is therefore distributed under the PSF license, as follows:
+
+PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
+--------------------------------------------
+
+1. This LICENSE AGREEMENT is between the Python Software Foundation
+("PSF"), and the Individual or Organization ("Licensee") accessing and
+otherwise using this software ("Python") in source or binary form and
+its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, PSF hereby
+grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
+analyze, test, perform and/or display publicly, prepare derivative works,
+distribute, and otherwise use Python alone or in any derivative version,
+provided, however, that PSF's License Agreement and PSF's notice of copyright,
+i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+2011, 2012, 2013, 2014 Python Software Foundation; All Rights Reserved" are
+retained in Python alone or in any derivative version prepared by Licensee.
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python.
+
+4. PSF is making Python available to Licensee on an "AS IS"
+basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. Nothing in this License Agreement shall be deemed to create any
+relationship of agency, partnership, or joint venture between PSF and
+Licensee. This License Agreement does not grant permission to use PSF
+trademarks or trade name in a trademark sense to endorse or promote
+products or services of Licensee, or any third party.
+
+8. By copying, installing or otherwise using Python, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
diff --git a/contrib/deprecated/python/ipaddress/README.md b/contrib/deprecated/python/ipaddress/README.md
new file mode 100644
index 0000000000..0a12bc0577
--- /dev/null
+++ b/contrib/deprecated/python/ipaddress/README.md
@@ -0,0 +1,28 @@
+ipaddress
+=========
+
+Python 3.3+'s [ipaddress](http://docs.python.org/dev/library/ipaddress) for Python 2.6, 2.7, 3.2.
+
+This repository tracks the latest version from cpython, e.g. ipaddress from cpython 3.8 as of writing.
+
+Note that just like in Python 3.3+ you must use character strings and not byte strings for textual IP address representations:
+
+```python
+>>> from __future__ import unicode_literals
+>>> ipaddress.ip_address('1.2.3.4')
+IPv4Address(u'1.2.3.4')
+```
+or
+```python
+>>> ipaddress.ip_address(u'1.2.3.4')
+IPv4Address(u'1.2.3.4')
+```
+but not:
+```python
+>>> ipaddress.ip_address(b'1.2.3.4')
+Traceback (most recent call last):
+ File "<stdin>", line 1, in <module>
+ File "ipaddress.py", line 163, in ip_address
+ ' a unicode object?' % address)
+ipaddress.AddressValueError: '1.2.3.4' does not appear to be an IPv4 or IPv6 address. Did you pass in a bytes (str in Python 2) instead of a unicode object?
+```
diff --git a/contrib/deprecated/python/ipaddress/ipaddress.py b/contrib/deprecated/python/ipaddress/ipaddress.py
new file mode 100644
index 0000000000..3e6f9e499c
--- /dev/null
+++ b/contrib/deprecated/python/ipaddress/ipaddress.py
@@ -0,0 +1,2420 @@
+# Copyright 2007 Google Inc.
+# Licensed to PSF under a Contributor Agreement.
+
+"""A fast, lightweight IPv4/IPv6 manipulation library in Python.
+
+This library is used to create/poke/manipulate IPv4 and IPv6 addresses
+and networks.
+
+"""
+
+from __future__ import unicode_literals
+
+
+import itertools
+import struct
+
+__version__ = '1.0.23'
+
+# Compatibility functions
+_compat_int_types = (int,)
+try:
+ _compat_int_types = (int, long)
+except NameError:
+ pass
+try:
+ _compat_str = unicode
+except NameError:
+ _compat_str = str
+ assert bytes != str
+if b'\0'[0] == 0: # Python 3 semantics
+ def _compat_bytes_to_byte_vals(byt):
+ return byt
+else:
+ def _compat_bytes_to_byte_vals(byt):
+ return [struct.unpack(b'!B', b)[0] for b in byt]
+try:
+ _compat_int_from_byte_vals = int.from_bytes
+except AttributeError:
+ def _compat_int_from_byte_vals(bytvals, endianess):
+ assert endianess == 'big'
+ res = 0
+ for bv in bytvals:
+ assert isinstance(bv, _compat_int_types)
+ res = (res << 8) + bv
+ return res
+
+
+def _compat_to_bytes(intval, length, endianess):
+ assert isinstance(intval, _compat_int_types)
+ assert endianess == 'big'
+ if length == 4:
+ if intval < 0 or intval >= 2 ** 32:
+ raise struct.error("integer out of range for 'I' format code")
+ return struct.pack(b'!I', intval)
+ elif length == 16:
+ if intval < 0 or intval >= 2 ** 128:
+ raise struct.error("integer out of range for 'QQ' format code")
+ return struct.pack(b'!QQ', intval >> 64, intval & 0xffffffffffffffff)
+ else:
+ raise NotImplementedError()
+
+
+if hasattr(int, 'bit_length'):
+ # Not int.bit_length , since that won't work in 2.7 where long exists
+ def _compat_bit_length(i):
+ return i.bit_length()
+else:
+ def _compat_bit_length(i):
+ for res in itertools.count():
+ if i >> res == 0:
+ return res
+
+
+def _compat_range(start, end, step=1):
+ assert step > 0
+ i = start
+ while i < end:
+ yield i
+ i += step
+
+
+class _TotalOrderingMixin(object):
+ __slots__ = ()
+
+ # Helper that derives the other comparison operations from
+ # __lt__ and __eq__
+ # We avoid functools.total_ordering because it doesn't handle
+ # NotImplemented correctly yet (http://bugs.python.org/issue10042)
+ def __eq__(self, other):
+ raise NotImplementedError
+
+ def __ne__(self, other):
+ equal = self.__eq__(other)
+ if equal is NotImplemented:
+ return NotImplemented
+ return not equal
+
+ def __lt__(self, other):
+ raise NotImplementedError
+
+ def __le__(self, other):
+ less = self.__lt__(other)
+ if less is NotImplemented or not less:
+ return self.__eq__(other)
+ return less
+
+ def __gt__(self, other):
+ less = self.__lt__(other)
+ if less is NotImplemented:
+ return NotImplemented
+ equal = self.__eq__(other)
+ if equal is NotImplemented:
+ return NotImplemented
+ return not (less or equal)
+
+ def __ge__(self, other):
+ less = self.__lt__(other)
+ if less is NotImplemented:
+ return NotImplemented
+ return not less
+
+
+IPV4LENGTH = 32
+IPV6LENGTH = 128
+
+
+class AddressValueError(ValueError):
+ """A Value Error related to the address."""
+
+
+class NetmaskValueError(ValueError):
+ """A Value Error related to the netmask."""
+
+
+def ip_address(address):
+ """Take an IP string/int and return an object of the correct type.
+
+ Args:
+ address: A string or integer, the IP address. Either IPv4 or
+ IPv6 addresses may be supplied; integers less than 2**32 will
+ be considered to be IPv4 by default.
+
+ Returns:
+ An IPv4Address or IPv6Address object.
+
+ Raises:
+ ValueError: if the *address* passed isn't either a v4 or a v6
+ address
+
+ """
+ try:
+ return IPv4Address(address)
+ except (AddressValueError, NetmaskValueError):
+ pass
+
+ try:
+ return IPv6Address(address)
+ except (AddressValueError, NetmaskValueError):
+ pass
+
+ if isinstance(address, bytes):
+ raise AddressValueError(
+ '%r does not appear to be an IPv4 or IPv6 address. '
+ 'Did you pass in a bytes (str in Python 2) instead of'
+ ' a unicode object?' % address)
+
+ raise ValueError('%r does not appear to be an IPv4 or IPv6 address' %
+ address)
+
+
+def ip_network(address, strict=True):
+ """Take an IP string/int and return an object of the correct type.
+
+ Args:
+ address: A string or integer, the IP network. Either IPv4 or
+ IPv6 networks may be supplied; integers less than 2**32 will
+ be considered to be IPv4 by default.
+
+ Returns:
+ An IPv4Network or IPv6Network object.
+
+ Raises:
+ ValueError: if the string passed isn't either a v4 or a v6
+ address. Or if the network has host bits set.
+
+ """
+ try:
+ return IPv4Network(address, strict)
+ except (AddressValueError, NetmaskValueError):
+ pass
+
+ try:
+ return IPv6Network(address, strict)
+ except (AddressValueError, NetmaskValueError):
+ pass
+
+ if isinstance(address, bytes):
+ raise AddressValueError(
+ '%r does not appear to be an IPv4 or IPv6 network. '
+ 'Did you pass in a bytes (str in Python 2) instead of'
+ ' a unicode object?' % address)
+
+ raise ValueError('%r does not appear to be an IPv4 or IPv6 network' %
+ address)
+
+
+def ip_interface(address):
+ """Take an IP string/int and return an object of the correct type.
+
+ Args:
+ address: A string or integer, the IP address. Either IPv4 or
+ IPv6 addresses may be supplied; integers less than 2**32 will
+ be considered to be IPv4 by default.
+
+ Returns:
+ An IPv4Interface or IPv6Interface object.
+
+ Raises:
+ ValueError: if the string passed isn't either a v4 or a v6
+ address.
+
+ Notes:
+ The IPv?Interface classes describe an Address on a particular
+ Network, so they're basically a combination of both the Address
+ and Network classes.
+
+ """
+ try:
+ return IPv4Interface(address)
+ except (AddressValueError, NetmaskValueError):
+ pass
+
+ try:
+ return IPv6Interface(address)
+ except (AddressValueError, NetmaskValueError):
+ pass
+
+ raise ValueError('%r does not appear to be an IPv4 or IPv6 interface' %
+ address)
+
+
+def v4_int_to_packed(address):
+ """Represent an address as 4 packed bytes in network (big-endian) order.
+
+ Args:
+ address: An integer representation of an IPv4 IP address.
+
+ Returns:
+ The integer address packed as 4 bytes in network (big-endian) order.
+
+ Raises:
+ ValueError: If the integer is negative or too large to be an
+ IPv4 IP address.
+
+ """
+ try:
+ return _compat_to_bytes(address, 4, 'big')
+ except (struct.error, OverflowError):
+ raise ValueError("Address negative or too large for IPv4")
+
+
+def v6_int_to_packed(address):
+ """Represent an address as 16 packed bytes in network (big-endian) order.
+
+ Args:
+ address: An integer representation of an IPv6 IP address.
+
+ Returns:
+ The integer address packed as 16 bytes in network (big-endian) order.
+
+ """
+ try:
+ return _compat_to_bytes(address, 16, 'big')
+ except (struct.error, OverflowError):
+ raise ValueError("Address negative or too large for IPv6")
+
+
+def _split_optional_netmask(address):
+ """Helper to split the netmask and raise AddressValueError if needed"""
+ addr = _compat_str(address).split('/')
+ if len(addr) > 2:
+ raise AddressValueError("Only one '/' permitted in %r" % address)
+ return addr
+
+
+def _find_address_range(addresses):
+ """Find a sequence of sorted deduplicated IPv#Address.
+
+ Args:
+ addresses: a list of IPv#Address objects.
+
+ Yields:
+ A tuple containing the first and last IP addresses in the sequence.
+
+ """
+ it = iter(addresses)
+ first = last = next(it)
+ for ip in it:
+ if ip._ip != last._ip + 1:
+ yield first, last
+ first = ip
+ last = ip
+ yield first, last
+
+
+def _count_righthand_zero_bits(number, bits):
+ """Count the number of zero bits on the right hand side.
+
+ Args:
+ number: an integer.
+ bits: maximum number of bits to count.
+
+ Returns:
+ The number of zero bits on the right hand side of the number.
+
+ """
+ if number == 0:
+ return bits
+ return min(bits, _compat_bit_length(~number & (number - 1)))
+
+
+def summarize_address_range(first, last):
+ """Summarize a network range given the first and last IP addresses.
+
+ Example:
+ >>> list(summarize_address_range(IPv4Address('192.0.2.0'),
+ ... IPv4Address('192.0.2.130')))
+ ... #doctest: +NORMALIZE_WHITESPACE
+ [IPv4Network('192.0.2.0/25'), IPv4Network('192.0.2.128/31'),
+ IPv4Network('192.0.2.130/32')]
+
+ Args:
+ first: the first IPv4Address or IPv6Address in the range.
+ last: the last IPv4Address or IPv6Address in the range.
+
+ Returns:
+ An iterator of the summarized IPv(4|6) network objects.
+
+ Raise:
+ TypeError:
+ If the first and last objects are not IP addresses.
+ If the first and last objects are not the same version.
+ ValueError:
+ If the last object is not greater than the first.
+ If the version of the first address is not 4 or 6.
+
+ """
+ if (not (isinstance(first, _BaseAddress) and
+ isinstance(last, _BaseAddress))):
+ raise TypeError('first and last must be IP addresses, not networks')
+ if first.version != last.version:
+ raise TypeError("%s and %s are not of the same version" % (
+ first, last))
+ if first > last:
+ raise ValueError('last IP address must be greater than first')
+
+ if first.version == 4:
+ ip = IPv4Network
+ elif first.version == 6:
+ ip = IPv6Network
+ else:
+ raise ValueError('unknown IP version')
+
+ ip_bits = first._max_prefixlen
+ first_int = first._ip
+ last_int = last._ip
+ while first_int <= last_int:
+ nbits = min(_count_righthand_zero_bits(first_int, ip_bits),
+ _compat_bit_length(last_int - first_int + 1) - 1)
+ net = ip((first_int, ip_bits - nbits))
+ yield net
+ first_int += 1 << nbits
+ if first_int - 1 == ip._ALL_ONES:
+ break
+
+
+def _collapse_addresses_internal(addresses):
+ """Loops through the addresses, collapsing concurrent netblocks.
+
+ Example:
+
+ ip1 = IPv4Network('192.0.2.0/26')
+ ip2 = IPv4Network('192.0.2.64/26')
+ ip3 = IPv4Network('192.0.2.128/26')
+ ip4 = IPv4Network('192.0.2.192/26')
+
+ _collapse_addresses_internal([ip1, ip2, ip3, ip4]) ->
+ [IPv4Network('192.0.2.0/24')]
+
+ This shouldn't be called directly; it is called via
+ collapse_addresses([]).
+
+ Args:
+ addresses: A list of IPv4Network's or IPv6Network's
+
+ Returns:
+ A list of IPv4Network's or IPv6Network's depending on what we were
+ passed.
+
+ """
+ # First merge
+ to_merge = list(addresses)
+ subnets = {}
+ while to_merge:
+ net = to_merge.pop()
+ supernet = net.supernet()
+ existing = subnets.get(supernet)
+ if existing is None:
+ subnets[supernet] = net
+ elif existing != net:
+ # Merge consecutive subnets
+ del subnets[supernet]
+ to_merge.append(supernet)
+ # Then iterate over resulting networks, skipping subsumed subnets
+ last = None
+ for net in sorted(subnets.values()):
+ if last is not None:
+ # Since they are sorted,
+ # last.network_address <= net.network_address is a given.
+ if last.broadcast_address >= net.broadcast_address:
+ continue
+ yield net
+ last = net
+
+
+def collapse_addresses(addresses):
+ """Collapse a list of IP objects.
+
+ Example:
+ collapse_addresses([IPv4Network('192.0.2.0/25'),
+ IPv4Network('192.0.2.128/25')]) ->
+ [IPv4Network('192.0.2.0/24')]
+
+ Args:
+ addresses: An iterator of IPv4Network or IPv6Network objects.
+
+ Returns:
+ An iterator of the collapsed IPv(4|6)Network objects.
+
+ Raises:
+ TypeError: If passed a list of mixed version objects.
+
+ """
+ addrs = []
+ ips = []
+ nets = []
+
+ # split IP addresses and networks
+ for ip in addresses:
+ if isinstance(ip, _BaseAddress):
+ if ips and ips[-1]._version != ip._version:
+ raise TypeError("%s and %s are not of the same version" % (
+ ip, ips[-1]))
+ ips.append(ip)
+ elif ip._prefixlen == ip._max_prefixlen:
+ if ips and ips[-1]._version != ip._version:
+ raise TypeError("%s and %s are not of the same version" % (
+ ip, ips[-1]))
+ try:
+ ips.append(ip.ip)
+ except AttributeError:
+ ips.append(ip.network_address)
+ else:
+ if nets and nets[-1]._version != ip._version:
+ raise TypeError("%s and %s are not of the same version" % (
+ ip, nets[-1]))
+ nets.append(ip)
+
+ # sort and dedup
+ ips = sorted(set(ips))
+
+ # find consecutive address ranges in the sorted sequence and summarize them
+ if ips:
+ for first, last in _find_address_range(ips):
+ addrs.extend(summarize_address_range(first, last))
+
+ return _collapse_addresses_internal(addrs + nets)
+
+
+def get_mixed_type_key(obj):
+ """Return a key suitable for sorting between networks and addresses.
+
+ Address and Network objects are not sortable by default; they're
+ fundamentally different so the expression
+
+ IPv4Address('192.0.2.0') <= IPv4Network('192.0.2.0/24')
+
+ doesn't make any sense. There are some times however, where you may wish
+ to have ipaddress sort these for you anyway. If you need to do this, you
+ can use this function as the key= argument to sorted().
+
+ Args:
+ obj: either a Network or Address object.
+ Returns:
+ appropriate key.
+
+ """
+ if isinstance(obj, _BaseNetwork):
+ return obj._get_networks_key()
+ elif isinstance(obj, _BaseAddress):
+ return obj._get_address_key()
+ return NotImplemented
+
+
+class _IPAddressBase(_TotalOrderingMixin):
+
+ """The mother class."""
+
+ __slots__ = ()
+
+ @property
+ def exploded(self):
+ """Return the longhand version of the IP address as a string."""
+ return self._explode_shorthand_ip_string()
+
+ @property
+ def compressed(self):
+ """Return the shorthand version of the IP address as a string."""
+ return _compat_str(self)
+
+ @property
+ def reverse_pointer(self):
+ """The name of the reverse DNS pointer for the IP address, e.g.:
+ >>> ipaddress.ip_address("127.0.0.1").reverse_pointer
+ '1.0.0.127.in-addr.arpa'
+ >>> ipaddress.ip_address("2001:db8::1").reverse_pointer
+ '1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa'
+
+ """
+ return self._reverse_pointer()
+
+ @property
+ def version(self):
+ msg = '%200s has no version specified' % (type(self),)
+ raise NotImplementedError(msg)
+
+ def _check_int_address(self, address):
+ if address < 0:
+ msg = "%d (< 0) is not permitted as an IPv%d address"
+ raise AddressValueError(msg % (address, self._version))
+ if address > self._ALL_ONES:
+ msg = "%d (>= 2**%d) is not permitted as an IPv%d address"
+ raise AddressValueError(msg % (address, self._max_prefixlen,
+ self._version))
+
+ def _check_packed_address(self, address, expected_len):
+ address_len = len(address)
+ if address_len != expected_len:
+ msg = (
+ '%r (len %d != %d) is not permitted as an IPv%d address. '
+ 'Did you pass in a bytes (str in Python 2) instead of'
+ ' a unicode object?')
+ raise AddressValueError(msg % (address, address_len,
+ expected_len, self._version))
+
+ @classmethod
+ def _ip_int_from_prefix(cls, prefixlen):
+ """Turn the prefix length into a bitwise netmask
+
+ Args:
+ prefixlen: An integer, the prefix length.
+
+ Returns:
+ An integer.
+
+ """
+ return cls._ALL_ONES ^ (cls._ALL_ONES >> prefixlen)
+
+ @classmethod
+ def _prefix_from_ip_int(cls, ip_int):
+ """Return prefix length from the bitwise netmask.
+
+ Args:
+ ip_int: An integer, the netmask in expanded bitwise format
+
+ Returns:
+ An integer, the prefix length.
+
+ Raises:
+ ValueError: If the input intermingles zeroes & ones
+ """
+ trailing_zeroes = _count_righthand_zero_bits(ip_int,
+ cls._max_prefixlen)
+ prefixlen = cls._max_prefixlen - trailing_zeroes
+ leading_ones = ip_int >> trailing_zeroes
+ all_ones = (1 << prefixlen) - 1
+ if leading_ones != all_ones:
+ byteslen = cls._max_prefixlen // 8
+ details = _compat_to_bytes(ip_int, byteslen, 'big')
+ msg = 'Netmask pattern %r mixes zeroes & ones'
+ raise ValueError(msg % details)
+ return prefixlen
+
+ @classmethod
+ def _report_invalid_netmask(cls, netmask_str):
+ msg = '%r is not a valid netmask' % netmask_str
+ raise NetmaskValueError(msg)
+
+ @classmethod
+ def _prefix_from_prefix_string(cls, prefixlen_str):
+ """Return prefix length from a numeric string
+
+ Args:
+ prefixlen_str: The string to be converted
+
+ Returns:
+ An integer, the prefix length.
+
+ Raises:
+ NetmaskValueError: If the input is not a valid netmask
+ """
+ # int allows a leading +/- as well as surrounding whitespace,
+ # so we ensure that isn't the case
+ if not _BaseV4._DECIMAL_DIGITS.issuperset(prefixlen_str):
+ cls._report_invalid_netmask(prefixlen_str)
+ try:
+ prefixlen = int(prefixlen_str)
+ except ValueError:
+ cls._report_invalid_netmask(prefixlen_str)
+ if not (0 <= prefixlen <= cls._max_prefixlen):
+ cls._report_invalid_netmask(prefixlen_str)
+ return prefixlen
+
+ @classmethod
+ def _prefix_from_ip_string(cls, ip_str):
+ """Turn a netmask/hostmask string into a prefix length
+
+ Args:
+ ip_str: The netmask/hostmask to be converted
+
+ Returns:
+ An integer, the prefix length.
+
+ Raises:
+ NetmaskValueError: If the input is not a valid netmask/hostmask
+ """
+ # Parse the netmask/hostmask like an IP address.
+ try:
+ ip_int = cls._ip_int_from_string(ip_str)
+ except AddressValueError:
+ cls._report_invalid_netmask(ip_str)
+
+ # Try matching a netmask (this would be /1*0*/ as a bitwise regexp).
+ # Note that the two ambiguous cases (all-ones and all-zeroes) are
+ # treated as netmasks.
+ try:
+ return cls._prefix_from_ip_int(ip_int)
+ except ValueError:
+ pass
+
+ # Invert the bits, and try matching a /0+1+/ hostmask instead.
+ ip_int ^= cls._ALL_ONES
+ try:
+ return cls._prefix_from_ip_int(ip_int)
+ except ValueError:
+ cls._report_invalid_netmask(ip_str)
+
+ def __reduce__(self):
+ return self.__class__, (_compat_str(self),)
+
+
+class _BaseAddress(_IPAddressBase):
+
+ """A generic IP object.
+
+ This IP class contains the version independent methods which are
+ used by single IP addresses.
+ """
+
+ __slots__ = ()
+
+ def __int__(self):
+ return self._ip
+
+ def __eq__(self, other):
+ try:
+ return (self._ip == other._ip and
+ self._version == other._version)
+ except AttributeError:
+ return NotImplemented
+
+ def __lt__(self, other):
+ if not isinstance(other, _IPAddressBase):
+ return NotImplemented
+ if not isinstance(other, _BaseAddress):
+ raise TypeError('%s and %s are not of the same type' % (
+ self, other))
+ if self._version != other._version:
+ raise TypeError('%s and %s are not of the same version' % (
+ self, other))
+ if self._ip != other._ip:
+ return self._ip < other._ip
+ return False
+
+ # Shorthand for Integer addition and subtraction. This is not
+ # meant to ever support addition/subtraction of addresses.
+ def __add__(self, other):
+ if not isinstance(other, _compat_int_types):
+ return NotImplemented
+ return self.__class__(int(self) + other)
+
+ def __sub__(self, other):
+ if not isinstance(other, _compat_int_types):
+ return NotImplemented
+ return self.__class__(int(self) - other)
+
+ def __repr__(self):
+ return '%s(%r)' % (self.__class__.__name__, _compat_str(self))
+
+ def __str__(self):
+ return _compat_str(self._string_from_ip_int(self._ip))
+
+ def __hash__(self):
+ return hash(hex(int(self._ip)))
+
+ def _get_address_key(self):
+ return (self._version, self)
+
+ def __reduce__(self):
+ return self.__class__, (self._ip,)
+
+
+class _BaseNetwork(_IPAddressBase):
+
+ """A generic IP network object.
+
+ This IP class contains the version independent methods which are
+ used by networks.
+
+ """
+ def __init__(self, address):
+ self._cache = {}
+
+ def __repr__(self):
+ return '%s(%r)' % (self.__class__.__name__, _compat_str(self))
+
+ def __str__(self):
+ return '%s/%d' % (self.network_address, self.prefixlen)
+
+ def hosts(self):
+ """Generate Iterator over usable hosts in a network.
+
+ This is like __iter__ except it doesn't return the network
+ or broadcast addresses.
+
+ """
+ network = int(self.network_address)
+ broadcast = int(self.broadcast_address)
+ for x in _compat_range(network + 1, broadcast):
+ yield self._address_class(x)
+
+ def __iter__(self):
+ network = int(self.network_address)
+ broadcast = int(self.broadcast_address)
+ for x in _compat_range(network, broadcast + 1):
+ yield self._address_class(x)
+
+ def __getitem__(self, n):
+ network = int(self.network_address)
+ broadcast = int(self.broadcast_address)
+ if n >= 0:
+ if network + n > broadcast:
+ raise IndexError('address out of range')
+ return self._address_class(network + n)
+ else:
+ n += 1
+ if broadcast + n < network:
+ raise IndexError('address out of range')
+ return self._address_class(broadcast + n)
+
+ def __lt__(self, other):
+ if not isinstance(other, _IPAddressBase):
+ return NotImplemented
+ if not isinstance(other, _BaseNetwork):
+ raise TypeError('%s and %s are not of the same type' % (
+ self, other))
+ if self._version != other._version:
+ raise TypeError('%s and %s are not of the same version' % (
+ self, other))
+ if self.network_address != other.network_address:
+ return self.network_address < other.network_address
+ if self.netmask != other.netmask:
+ return self.netmask < other.netmask
+ return False
+
+ def __eq__(self, other):
+ try:
+ return (self._version == other._version and
+ self.network_address == other.network_address and
+ int(self.netmask) == int(other.netmask))
+ except AttributeError:
+ return NotImplemented
+
+ def __hash__(self):
+ return hash(int(self.network_address) ^ int(self.netmask))
+
+ def __contains__(self, other):
+ # always false if one is v4 and the other is v6.
+ if self._version != other._version:
+ return False
+ # dealing with another network.
+ if isinstance(other, _BaseNetwork):
+ return False
+ # dealing with another address
+ else:
+ # address
+ return (int(self.network_address) <= int(other._ip) <=
+ int(self.broadcast_address))
+
+ def overlaps(self, other):
+ """Tell if self is partly contained in other."""
+ return self.network_address in other or (
+ self.broadcast_address in other or (
+ other.network_address in self or (
+ other.broadcast_address in self)))
+
+ @property
+ def broadcast_address(self):
+ x = self._cache.get('broadcast_address')
+ if x is None:
+ x = self._address_class(int(self.network_address) |
+ int(self.hostmask))
+ self._cache['broadcast_address'] = x
+ return x
+
+ @property
+ def hostmask(self):
+ x = self._cache.get('hostmask')
+ if x is None:
+ x = self._address_class(int(self.netmask) ^ self._ALL_ONES)
+ self._cache['hostmask'] = x
+ return x
+
+ @property
+ def with_prefixlen(self):
+ return '%s/%d' % (self.network_address, self._prefixlen)
+
+ @property
+ def with_netmask(self):
+ return '%s/%s' % (self.network_address, self.netmask)
+
+ @property
+ def with_hostmask(self):
+ return '%s/%s' % (self.network_address, self.hostmask)
+
+ @property
+ def num_addresses(self):
+ """Number of hosts in the current subnet."""
+ return int(self.broadcast_address) - int(self.network_address) + 1
+
+ @property
+ def _address_class(self):
+ # Returning bare address objects (rather than interfaces) allows for
+ # more consistent behaviour across the network address, broadcast
+ # address and individual host addresses.
+ msg = '%200s has no associated address class' % (type(self),)
+ raise NotImplementedError(msg)
+
+ @property
+ def prefixlen(self):
+ return self._prefixlen
+
+ def address_exclude(self, other):
+ """Remove an address from a larger block.
+
+ For example:
+
+ addr1 = ip_network('192.0.2.0/28')
+ addr2 = ip_network('192.0.2.1/32')
+ list(addr1.address_exclude(addr2)) =
+ [IPv4Network('192.0.2.0/32'), IPv4Network('192.0.2.2/31'),
+ IPv4Network('192.0.2.4/30'), IPv4Network('192.0.2.8/29')]
+
+ or IPv6:
+
+ addr1 = ip_network('2001:db8::1/32')
+ addr2 = ip_network('2001:db8::1/128')
+ list(addr1.address_exclude(addr2)) =
+ [ip_network('2001:db8::1/128'),
+ ip_network('2001:db8::2/127'),
+ ip_network('2001:db8::4/126'),
+ ip_network('2001:db8::8/125'),
+ ...
+ ip_network('2001:db8:8000::/33')]
+
+ Args:
+ other: An IPv4Network or IPv6Network object of the same type.
+
+ Returns:
+ An iterator of the IPv(4|6)Network objects which is self
+ minus other.
+
+ Raises:
+ TypeError: If self and other are of differing address
+ versions, or if other is not a network object.
+ ValueError: If other is not completely contained by self.
+
+ """
+ if not self._version == other._version:
+ raise TypeError("%s and %s are not of the same version" % (
+ self, other))
+
+ if not isinstance(other, _BaseNetwork):
+ raise TypeError("%s is not a network object" % other)
+
+ if not other.subnet_of(self):
+ raise ValueError('%s not contained in %s' % (other, self))
+ if other == self:
+ return
+
+ # Make sure we're comparing the network of other.
+ other = other.__class__('%s/%s' % (other.network_address,
+ other.prefixlen))
+
+ s1, s2 = self.subnets()
+ while s1 != other and s2 != other:
+ if other.subnet_of(s1):
+ yield s2
+ s1, s2 = s1.subnets()
+ elif other.subnet_of(s2):
+ yield s1
+ s1, s2 = s2.subnets()
+ else:
+ # If we got here, there's a bug somewhere.
+ raise AssertionError('Error performing exclusion: '
+ 's1: %s s2: %s other: %s' %
+ (s1, s2, other))
+ if s1 == other:
+ yield s2
+ elif s2 == other:
+ yield s1
+ else:
+ # If we got here, there's a bug somewhere.
+ raise AssertionError('Error performing exclusion: '
+ 's1: %s s2: %s other: %s' %
+ (s1, s2, other))
+
+ def compare_networks(self, other):
+ """Compare two IP objects.
+
+ This is only concerned about the comparison of the integer
+ representation of the network addresses. This means that the
+ host bits aren't considered at all in this method. If you want
+ to compare host bits, you can easily enough do a
+ 'HostA._ip < HostB._ip'
+
+ Args:
+ other: An IP object.
+
+ Returns:
+ If the IP versions of self and other are the same, returns:
+
+ -1 if self < other:
+ eg: IPv4Network('192.0.2.0/25') < IPv4Network('192.0.2.128/25')
+ IPv6Network('2001:db8::1000/124') <
+ IPv6Network('2001:db8::2000/124')
+ 0 if self == other
+ eg: IPv4Network('192.0.2.0/24') == IPv4Network('192.0.2.0/24')
+ IPv6Network('2001:db8::1000/124') ==
+ IPv6Network('2001:db8::1000/124')
+ 1 if self > other
+ eg: IPv4Network('192.0.2.128/25') > IPv4Network('192.0.2.0/25')
+ IPv6Network('2001:db8::2000/124') >
+ IPv6Network('2001:db8::1000/124')
+
+ Raises:
+ TypeError if the IP versions are different.
+
+ """
+ # does this need to raise a ValueError?
+ if self._version != other._version:
+ raise TypeError('%s and %s are not of the same type' % (
+ self, other))
+ # self._version == other._version below here:
+ if self.network_address < other.network_address:
+ return -1
+ if self.network_address > other.network_address:
+ return 1
+ # self.network_address == other.network_address below here:
+ if self.netmask < other.netmask:
+ return -1
+ if self.netmask > other.netmask:
+ return 1
+ return 0
+
+ def _get_networks_key(self):
+ """Network-only key function.
+
+ Returns an object that identifies this address' network and
+ netmask. This function is a suitable "key" argument for sorted()
+ and list.sort().
+
+ """
+ return (self._version, self.network_address, self.netmask)
+
+ def subnets(self, prefixlen_diff=1, new_prefix=None):
+ """The subnets which join to make the current subnet.
+
+ In the case that self contains only one IP
+ (self._prefixlen == 32 for IPv4 or self._prefixlen == 128
+ for IPv6), yield an iterator with just ourself.
+
+ Args:
+ prefixlen_diff: An integer, the amount the prefix length
+ should be increased by. This should not be set if
+ new_prefix is also set.
+ new_prefix: The desired new prefix length. This must be a
+ larger number (smaller prefix) than the existing prefix.
+ This should not be set if prefixlen_diff is also set.
+
+ Returns:
+ An iterator of IPv(4|6) objects.
+
+ Raises:
+ ValueError: The prefixlen_diff is too small or too large.
+ OR
+ prefixlen_diff and new_prefix are both set or new_prefix
+ is a smaller number than the current prefix (smaller
+ number means a larger network)
+
+ """
+ if self._prefixlen == self._max_prefixlen:
+ yield self
+ return
+
+ if new_prefix is not None:
+ if new_prefix < self._prefixlen:
+ raise ValueError('new prefix must be longer')
+ if prefixlen_diff != 1:
+ raise ValueError('cannot set prefixlen_diff and new_prefix')
+ prefixlen_diff = new_prefix - self._prefixlen
+
+ if prefixlen_diff < 0:
+ raise ValueError('prefix length diff must be > 0')
+ new_prefixlen = self._prefixlen + prefixlen_diff
+
+ if new_prefixlen > self._max_prefixlen:
+ raise ValueError(
+ 'prefix length diff %d is invalid for netblock %s' % (
+ new_prefixlen, self))
+
+ start = int(self.network_address)
+ end = int(self.broadcast_address) + 1
+ step = (int(self.hostmask) + 1) >> prefixlen_diff
+ for new_addr in _compat_range(start, end, step):
+ current = self.__class__((new_addr, new_prefixlen))
+ yield current
+
+ def supernet(self, prefixlen_diff=1, new_prefix=None):
+ """The supernet containing the current network.
+
+ Args:
+ prefixlen_diff: An integer, the amount the prefix length of
+ the network should be decreased by. For example, given a
+ /24 network and a prefixlen_diff of 3, a supernet with a
+ /21 netmask is returned.
+
+ Returns:
+ An IPv4 network object.
+
+ Raises:
+ ValueError: If self.prefixlen - prefixlen_diff < 0. I.e., you have
+ a negative prefix length.
+ OR
+ If prefixlen_diff and new_prefix are both set or new_prefix is a
+ larger number than the current prefix (larger number means a
+ smaller network)
+
+ """
+ if self._prefixlen == 0:
+ return self
+
+ if new_prefix is not None:
+ if new_prefix > self._prefixlen:
+ raise ValueError('new prefix must be shorter')
+ if prefixlen_diff != 1:
+ raise ValueError('cannot set prefixlen_diff and new_prefix')
+ prefixlen_diff = self._prefixlen - new_prefix
+
+ new_prefixlen = self.prefixlen - prefixlen_diff
+ if new_prefixlen < 0:
+ raise ValueError(
+ 'current prefixlen is %d, cannot have a prefixlen_diff of %d' %
+ (self.prefixlen, prefixlen_diff))
+ return self.__class__((
+ int(self.network_address) & (int(self.netmask) << prefixlen_diff),
+ new_prefixlen))
+
+ @property
+ def is_multicast(self):
+ """Test if the address is reserved for multicast use.
+
+ Returns:
+ A boolean, True if the address is a multicast address.
+ See RFC 2373 2.7 for details.
+
+ """
+ return (self.network_address.is_multicast and
+ self.broadcast_address.is_multicast)
+
+ @staticmethod
+ def _is_subnet_of(a, b):
+ try:
+ # Always false if one is v4 and the other is v6.
+ if a._version != b._version:
+ raise TypeError(
+ "%s and %s are not of the same version" % (a, b))
+ return (b.network_address <= a.network_address and
+ b.broadcast_address >= a.broadcast_address)
+ except AttributeError:
+ raise TypeError("Unable to test subnet containment "
+ "between %s and %s" % (a, b))
+
+ def subnet_of(self, other):
+ """Return True if this network is a subnet of other."""
+ return self._is_subnet_of(self, other)
+
+ def supernet_of(self, other):
+ """Return True if this network is a supernet of other."""
+ return self._is_subnet_of(other, self)
+
+ @property
+ def is_reserved(self):
+ """Test if the address is otherwise IETF reserved.
+
+ Returns:
+ A boolean, True if the address is within one of the
+ reserved IPv6 Network ranges.
+
+ """
+ return (self.network_address.is_reserved and
+ self.broadcast_address.is_reserved)
+
+ @property
+ def is_link_local(self):
+ """Test if the address is reserved for link-local.
+
+ Returns:
+ A boolean, True if the address is reserved per RFC 4291.
+
+ """
+ return (self.network_address.is_link_local and
+ self.broadcast_address.is_link_local)
+
+ @property
+ def is_private(self):
+ """Test if this address is allocated for private networks.
+
+ Returns:
+ A boolean, True if the address is reserved per
+ iana-ipv4-special-registry or iana-ipv6-special-registry.
+
+ """
+ return (self.network_address.is_private and
+ self.broadcast_address.is_private)
+
+ @property
+ def is_global(self):
+ """Test if this address is allocated for public networks.
+
+ Returns:
+ A boolean, True if the address is not reserved per
+ iana-ipv4-special-registry or iana-ipv6-special-registry.
+
+ """
+ return not self.is_private
+
+ @property
+ def is_unspecified(self):
+ """Test if the address is unspecified.
+
+ Returns:
+ A boolean, True if this is the unspecified address as defined in
+ RFC 2373 2.5.2.
+
+ """
+ return (self.network_address.is_unspecified and
+ self.broadcast_address.is_unspecified)
+
+ @property
+ def is_loopback(self):
+ """Test if the address is a loopback address.
+
+ Returns:
+ A boolean, True if the address is a loopback address as defined in
+ RFC 2373 2.5.3.
+
+ """
+ return (self.network_address.is_loopback and
+ self.broadcast_address.is_loopback)
+
+
+class _BaseV4(object):
+
+ """Base IPv4 object.
+
+ The following methods are used by IPv4 objects in both single IP
+ addresses and networks.
+
+ """
+
+ __slots__ = ()
+ _version = 4
+ # Equivalent to 255.255.255.255 or 32 bits of 1's.
+ _ALL_ONES = (2 ** IPV4LENGTH) - 1
+ _DECIMAL_DIGITS = frozenset('0123456789')
+
+ # the valid octets for host and netmasks. only useful for IPv4.
+ _valid_mask_octets = frozenset([255, 254, 252, 248, 240, 224, 192, 128, 0])
+
+ _max_prefixlen = IPV4LENGTH
+ # There are only a handful of valid v4 netmasks, so we cache them all
+ # when constructed (see _make_netmask()).
+ _netmask_cache = {}
+
+ def _explode_shorthand_ip_string(self):
+ return _compat_str(self)
+
+ @classmethod
+ def _make_netmask(cls, arg):
+ """Make a (netmask, prefix_len) tuple from the given argument.
+
+ Argument can be:
+ - an integer (the prefix length)
+ - a string representing the prefix length (e.g. "24")
+ - a string representing the prefix netmask (e.g. "255.255.255.0")
+ """
+ if arg not in cls._netmask_cache:
+ if isinstance(arg, _compat_int_types):
+ prefixlen = arg
+ else:
+ try:
+ # Check for a netmask in prefix length form
+ prefixlen = cls._prefix_from_prefix_string(arg)
+ except NetmaskValueError:
+ # Check for a netmask or hostmask in dotted-quad form.
+ # This may raise NetmaskValueError.
+ prefixlen = cls._prefix_from_ip_string(arg)
+ netmask = IPv4Address(cls._ip_int_from_prefix(prefixlen))
+ cls._netmask_cache[arg] = netmask, prefixlen
+ return cls._netmask_cache[arg]
+
+ @classmethod
+ def _ip_int_from_string(cls, ip_str):
+ """Turn the given IP string into an integer for comparison.
+
+ Args:
+ ip_str: A string, the IP ip_str.
+
+ Returns:
+ The IP ip_str as an integer.
+
+ Raises:
+ AddressValueError: if ip_str isn't a valid IPv4 Address.
+
+ """
+ if not ip_str:
+ raise AddressValueError('Address cannot be empty')
+
+ octets = ip_str.split('.')
+ if len(octets) != 4:
+ raise AddressValueError("Expected 4 octets in %r" % ip_str)
+
+ try:
+ return _compat_int_from_byte_vals(
+ map(cls._parse_octet, octets), 'big')
+ except ValueError as exc:
+ raise AddressValueError("%s in %r" % (exc, ip_str))
+
+ @classmethod
+ def _parse_octet(cls, octet_str):
+ """Convert a decimal octet into an integer.
+
+ Args:
+ octet_str: A string, the number to parse.
+
+ Returns:
+ The octet as an integer.
+
+ Raises:
+ ValueError: if the octet isn't strictly a decimal from [0..255].
+
+ """
+ if not octet_str:
+ raise ValueError("Empty octet not permitted")
+ # Whitelist the characters, since int() allows a lot of bizarre stuff.
+ if not cls._DECIMAL_DIGITS.issuperset(octet_str):
+ msg = "Only decimal digits permitted in %r"
+ raise ValueError(msg % octet_str)
+ # We do the length check second, since the invalid character error
+ # is likely to be more informative for the user
+ if len(octet_str) > 3:
+ msg = "At most 3 characters permitted in %r"
+ raise ValueError(msg % octet_str)
+ # Convert to integer (we know digits are legal)
+ octet_int = int(octet_str, 10)
+ # Any octets that look like they *might* be written in octal,
+ # and which don't look exactly the same in both octal and
+ # decimal are rejected as ambiguous
+ if octet_int > 7 and octet_str[0] == '0':
+ msg = "Ambiguous (octal/decimal) value in %r not permitted"
+ raise ValueError(msg % octet_str)
+ if octet_int > 255:
+ raise ValueError("Octet %d (> 255) not permitted" % octet_int)
+ return octet_int
+
+ @classmethod
+ def _string_from_ip_int(cls, ip_int):
+ """Turns a 32-bit integer into dotted decimal notation.
+
+ Args:
+ ip_int: An integer, the IP address.
+
+ Returns:
+ The IP address as a string in dotted decimal notation.
+
+ """
+ return '.'.join(_compat_str(struct.unpack(b'!B', b)[0]
+ if isinstance(b, bytes)
+ else b)
+ for b in _compat_to_bytes(ip_int, 4, 'big'))
+
+ def _is_hostmask(self, ip_str):
+ """Test if the IP string is a hostmask (rather than a netmask).
+
+ Args:
+ ip_str: A string, the potential hostmask.
+
+ Returns:
+ A boolean, True if the IP string is a hostmask.
+
+ """
+ bits = ip_str.split('.')
+ try:
+ parts = [x for x in map(int, bits) if x in self._valid_mask_octets]
+ except ValueError:
+ return False
+ if len(parts) != len(bits):
+ return False
+ if parts[0] < parts[-1]:
+ return True
+ return False
+
+ def _reverse_pointer(self):
+ """Return the reverse DNS pointer name for the IPv4 address.
+
+ This implements the method described in RFC1035 3.5.
+
+ """
+ reverse_octets = _compat_str(self).split('.')[::-1]
+ return '.'.join(reverse_octets) + '.in-addr.arpa'
+
+ @property
+ def max_prefixlen(self):
+ return self._max_prefixlen
+
+ @property
+ def version(self):
+ return self._version
+
+
+class IPv4Address(_BaseV4, _BaseAddress):
+
+ """Represent and manipulate single IPv4 Addresses."""
+
+ __slots__ = ('_ip', '__weakref__')
+
+ def __init__(self, address):
+
+ """
+ Args:
+ address: A string or integer representing the IP
+
+ Additionally, an integer can be passed, so
+ IPv4Address('192.0.2.1') == IPv4Address(3221225985).
+ or, more generally
+ IPv4Address(int(IPv4Address('192.0.2.1'))) ==
+ IPv4Address('192.0.2.1')
+
+ Raises:
+ AddressValueError: If ipaddress isn't a valid IPv4 address.
+
+ """
+ # Efficient constructor from integer.
+ if isinstance(address, _compat_int_types):
+ self._check_int_address(address)
+ self._ip = address
+ return
+
+ # Constructing from a packed address
+ if isinstance(address, bytes):
+ self._check_packed_address(address, 4)
+ bvs = _compat_bytes_to_byte_vals(address)
+ self._ip = _compat_int_from_byte_vals(bvs, 'big')
+ return
+
+ # Assume input argument to be string or any object representation
+ # which converts into a formatted IP string.
+ addr_str = _compat_str(address)
+ if '/' in addr_str:
+ raise AddressValueError("Unexpected '/' in %r" % address)
+ self._ip = self._ip_int_from_string(addr_str)
+
+ @property
+ def packed(self):
+ """The binary representation of this address."""
+ return v4_int_to_packed(self._ip)
+
+ @property
+ def is_reserved(self):
+ """Test if the address is otherwise IETF reserved.
+
+ Returns:
+ A boolean, True if the address is within the
+ reserved IPv4 Network range.
+
+ """
+ return self in self._constants._reserved_network
+
+ @property
+ def is_private(self):
+ """Test if this address is allocated for private networks.
+
+ Returns:
+ A boolean, True if the address is reserved per
+ iana-ipv4-special-registry.
+
+ """
+ return any(self in net for net in self._constants._private_networks)
+
+ @property
+ def is_global(self):
+ return (
+ self not in self._constants._public_network and
+ not self.is_private)
+
+ @property
+ def is_multicast(self):
+ """Test if the address is reserved for multicast use.
+
+ Returns:
+ A boolean, True if the address is multicast.
+ See RFC 3171 for details.
+
+ """
+ return self in self._constants._multicast_network
+
+ @property
+ def is_unspecified(self):
+ """Test if the address is unspecified.
+
+ Returns:
+ A boolean, True if this is the unspecified address as defined in
+ RFC 5735 3.
+
+ """
+ return self == self._constants._unspecified_address
+
+ @property
+ def is_loopback(self):
+ """Test if the address is a loopback address.
+
+ Returns:
+ A boolean, True if the address is a loopback per RFC 3330.
+
+ """
+ return self in self._constants._loopback_network
+
+ @property
+ def is_link_local(self):
+ """Test if the address is reserved for link-local.
+
+ Returns:
+ A boolean, True if the address is link-local per RFC 3927.
+
+ """
+ return self in self._constants._linklocal_network
+
+
+class IPv4Interface(IPv4Address):
+
+ def __init__(self, address):
+ if isinstance(address, (bytes, _compat_int_types)):
+ IPv4Address.__init__(self, address)
+ self.network = IPv4Network(self._ip)
+ self._prefixlen = self._max_prefixlen
+ return
+
+ if isinstance(address, tuple):
+ IPv4Address.__init__(self, address[0])
+ if len(address) > 1:
+ self._prefixlen = int(address[1])
+ else:
+ self._prefixlen = self._max_prefixlen
+
+ self.network = IPv4Network(address, strict=False)
+ self.netmask = self.network.netmask
+ self.hostmask = self.network.hostmask
+ return
+
+ addr = _split_optional_netmask(address)
+ IPv4Address.__init__(self, addr[0])
+
+ self.network = IPv4Network(address, strict=False)
+ self._prefixlen = self.network._prefixlen
+
+ self.netmask = self.network.netmask
+ self.hostmask = self.network.hostmask
+
+ def __str__(self):
+ return '%s/%d' % (self._string_from_ip_int(self._ip),
+ self.network.prefixlen)
+
+ def __eq__(self, other):
+ address_equal = IPv4Address.__eq__(self, other)
+ if not address_equal or address_equal is NotImplemented:
+ return address_equal
+ try:
+ return self.network == other.network
+ except AttributeError:
+ # An interface with an associated network is NOT the
+ # same as an unassociated address. That's why the hash
+ # takes the extra info into account.
+ return False
+
+ def __lt__(self, other):
+ address_less = IPv4Address.__lt__(self, other)
+ if address_less is NotImplemented:
+ return NotImplemented
+ try:
+ return (self.network < other.network or
+ self.network == other.network and address_less)
+ except AttributeError:
+ # We *do* allow addresses and interfaces to be sorted. The
+ # unassociated address is considered less than all interfaces.
+ return False
+
+ def __hash__(self):
+ return self._ip ^ self._prefixlen ^ int(self.network.network_address)
+
+ __reduce__ = _IPAddressBase.__reduce__
+
+ @property
+ def ip(self):
+ return IPv4Address(self._ip)
+
+ @property
+ def with_prefixlen(self):
+ return '%s/%s' % (self._string_from_ip_int(self._ip),
+ self._prefixlen)
+
+ @property
+ def with_netmask(self):
+ return '%s/%s' % (self._string_from_ip_int(self._ip),
+ self.netmask)
+
+ @property
+ def with_hostmask(self):
+ return '%s/%s' % (self._string_from_ip_int(self._ip),
+ self.hostmask)
+
+
+class IPv4Network(_BaseV4, _BaseNetwork):
+
+ """This class represents and manipulates 32-bit IPv4 network + addresses..
+
+ Attributes: [examples for IPv4Network('192.0.2.0/27')]
+ .network_address: IPv4Address('192.0.2.0')
+ .hostmask: IPv4Address('0.0.0.31')
+ .broadcast_address: IPv4Address('192.0.2.32')
+ .netmask: IPv4Address('255.255.255.224')
+ .prefixlen: 27
+
+ """
+ # Class to use when creating address objects
+ _address_class = IPv4Address
+
+ def __init__(self, address, strict=True):
+
+ """Instantiate a new IPv4 network object.
+
+ Args:
+ address: A string or integer representing the IP [& network].
+ '192.0.2.0/24'
+ '192.0.2.0/255.255.255.0'
+ '192.0.0.2/0.0.0.255'
+ are all functionally the same in IPv4. Similarly,
+ '192.0.2.1'
+ '192.0.2.1/255.255.255.255'
+ '192.0.2.1/32'
+ are also functionally equivalent. That is to say, failing to
+ provide a subnetmask will create an object with a mask of /32.
+
+ If the mask (portion after the / in the argument) is given in
+ dotted quad form, it is treated as a netmask if it starts with a
+ non-zero field (e.g. /255.0.0.0 == /8) and as a hostmask if it
+ starts with a zero field (e.g. 0.255.255.255 == /8), with the
+ single exception of an all-zero mask which is treated as a
+ netmask == /0. If no mask is given, a default of /32 is used.
+
+ Additionally, an integer can be passed, so
+ IPv4Network('192.0.2.1') == IPv4Network(3221225985)
+ or, more generally
+ IPv4Interface(int(IPv4Interface('192.0.2.1'))) ==
+ IPv4Interface('192.0.2.1')
+
+ Raises:
+ AddressValueError: If ipaddress isn't a valid IPv4 address.
+ NetmaskValueError: If the netmask isn't valid for
+ an IPv4 address.
+ ValueError: If strict is True and a network address is not
+ supplied.
+
+ """
+ _BaseNetwork.__init__(self, address)
+
+ # Constructing from a packed address or integer
+ if isinstance(address, (_compat_int_types, bytes)):
+ self.network_address = IPv4Address(address)
+ self.netmask, self._prefixlen = self._make_netmask(
+ self._max_prefixlen)
+ # fixme: address/network test here.
+ return
+
+ if isinstance(address, tuple):
+ if len(address) > 1:
+ arg = address[1]
+ else:
+ # We weren't given an address[1]
+ arg = self._max_prefixlen
+ self.network_address = IPv4Address(address[0])
+ self.netmask, self._prefixlen = self._make_netmask(arg)
+ packed = int(self.network_address)
+ if packed & int(self.netmask) != packed:
+ if strict:
+ raise ValueError('%s has host bits set' % self)
+ else:
+ self.network_address = IPv4Address(packed &
+ int(self.netmask))
+ return
+
+ # Assume input argument to be string or any object representation
+ # which converts into a formatted IP prefix string.
+ addr = _split_optional_netmask(address)
+ self.network_address = IPv4Address(self._ip_int_from_string(addr[0]))
+
+ if len(addr) == 2:
+ arg = addr[1]
+ else:
+ arg = self._max_prefixlen
+ self.netmask, self._prefixlen = self._make_netmask(arg)
+
+ if strict:
+ if (IPv4Address(int(self.network_address) & int(self.netmask)) !=
+ self.network_address):
+ raise ValueError('%s has host bits set' % self)
+ self.network_address = IPv4Address(int(self.network_address) &
+ int(self.netmask))
+
+ if self._prefixlen == (self._max_prefixlen - 1):
+ self.hosts = self.__iter__
+
+ @property
+ def is_global(self):
+ """Test if this address is allocated for public networks.
+
+ Returns:
+ A boolean, True if the address is not reserved per
+ iana-ipv4-special-registry.
+
+ """
+ return (not (self.network_address in IPv4Network('100.64.0.0/10') and
+ self.broadcast_address in IPv4Network('100.64.0.0/10')) and
+ not self.is_private)
+
+
+class _IPv4Constants(object):
+
+ _linklocal_network = IPv4Network('169.254.0.0/16')
+
+ _loopback_network = IPv4Network('127.0.0.0/8')
+
+ _multicast_network = IPv4Network('224.0.0.0/4')
+
+ _public_network = IPv4Network('100.64.0.0/10')
+
+ _private_networks = [
+ IPv4Network('0.0.0.0/8'),
+ IPv4Network('10.0.0.0/8'),
+ IPv4Network('127.0.0.0/8'),
+ IPv4Network('169.254.0.0/16'),
+ IPv4Network('172.16.0.0/12'),
+ IPv4Network('192.0.0.0/29'),
+ IPv4Network('192.0.0.170/31'),
+ IPv4Network('192.0.2.0/24'),
+ IPv4Network('192.168.0.0/16'),
+ IPv4Network('198.18.0.0/15'),
+ IPv4Network('198.51.100.0/24'),
+ IPv4Network('203.0.113.0/24'),
+ IPv4Network('240.0.0.0/4'),
+ IPv4Network('255.255.255.255/32'),
+ ]
+
+ _reserved_network = IPv4Network('240.0.0.0/4')
+
+ _unspecified_address = IPv4Address('0.0.0.0')
+
+
+IPv4Address._constants = _IPv4Constants
+
+
+class _BaseV6(object):
+
+ """Base IPv6 object.
+
+ The following methods are used by IPv6 objects in both single IP
+ addresses and networks.
+
+ """
+
+ __slots__ = ()
+ _version = 6
+ _ALL_ONES = (2 ** IPV6LENGTH) - 1
+ _HEXTET_COUNT = 8
+ _HEX_DIGITS = frozenset('0123456789ABCDEFabcdef')
+ _max_prefixlen = IPV6LENGTH
+
+ # There are only a bunch of valid v6 netmasks, so we cache them all
+ # when constructed (see _make_netmask()).
+ _netmask_cache = {}
+
+ @classmethod
+ def _make_netmask(cls, arg):
+ """Make a (netmask, prefix_len) tuple from the given argument.
+
+ Argument can be:
+ - an integer (the prefix length)
+ - a string representing the prefix length (e.g. "24")
+ - a string representing the prefix netmask (e.g. "255.255.255.0")
+ """
+ if arg not in cls._netmask_cache:
+ if isinstance(arg, _compat_int_types):
+ prefixlen = arg
+ else:
+ prefixlen = cls._prefix_from_prefix_string(arg)
+ netmask = IPv6Address(cls._ip_int_from_prefix(prefixlen))
+ cls._netmask_cache[arg] = netmask, prefixlen
+ return cls._netmask_cache[arg]
+
+ @classmethod
+ def _ip_int_from_string(cls, ip_str):
+ """Turn an IPv6 ip_str into an integer.
+
+ Args:
+ ip_str: A string, the IPv6 ip_str.
+
+ Returns:
+ An int, the IPv6 address
+
+ Raises:
+ AddressValueError: if ip_str isn't a valid IPv6 Address.
+
+ """
+ if not ip_str:
+ raise AddressValueError('Address cannot be empty')
+
+ parts = ip_str.split(':')
+
+ # An IPv6 address needs at least 2 colons (3 parts).
+ _min_parts = 3
+ if len(parts) < _min_parts:
+ msg = "At least %d parts expected in %r" % (_min_parts, ip_str)
+ raise AddressValueError(msg)
+
+ # If the address has an IPv4-style suffix, convert it to hexadecimal.
+ if '.' in parts[-1]:
+ try:
+ ipv4_int = IPv4Address(parts.pop())._ip
+ except AddressValueError as exc:
+ raise AddressValueError("%s in %r" % (exc, ip_str))
+ parts.append('%x' % ((ipv4_int >> 16) & 0xFFFF))
+ parts.append('%x' % (ipv4_int & 0xFFFF))
+
+ # An IPv6 address can't have more than 8 colons (9 parts).
+ # The extra colon comes from using the "::" notation for a single
+ # leading or trailing zero part.
+ _max_parts = cls._HEXTET_COUNT + 1
+ if len(parts) > _max_parts:
+ msg = "At most %d colons permitted in %r" % (
+ _max_parts - 1, ip_str)
+ raise AddressValueError(msg)
+
+ # Disregarding the endpoints, find '::' with nothing in between.
+ # This indicates that a run of zeroes has been skipped.
+ skip_index = None
+ for i in _compat_range(1, len(parts) - 1):
+ if not parts[i]:
+ if skip_index is not None:
+ # Can't have more than one '::'
+ msg = "At most one '::' permitted in %r" % ip_str
+ raise AddressValueError(msg)
+ skip_index = i
+
+ # parts_hi is the number of parts to copy from above/before the '::'
+ # parts_lo is the number of parts to copy from below/after the '::'
+ if skip_index is not None:
+ # If we found a '::', then check if it also covers the endpoints.
+ parts_hi = skip_index
+ parts_lo = len(parts) - skip_index - 1
+ if not parts[0]:
+ parts_hi -= 1
+ if parts_hi:
+ msg = "Leading ':' only permitted as part of '::' in %r"
+ raise AddressValueError(msg % ip_str) # ^: requires ^::
+ if not parts[-1]:
+ parts_lo -= 1
+ if parts_lo:
+ msg = "Trailing ':' only permitted as part of '::' in %r"
+ raise AddressValueError(msg % ip_str) # :$ requires ::$
+ parts_skipped = cls._HEXTET_COUNT - (parts_hi + parts_lo)
+ if parts_skipped < 1:
+ msg = "Expected at most %d other parts with '::' in %r"
+ raise AddressValueError(msg % (cls._HEXTET_COUNT - 1, ip_str))
+ else:
+ # Otherwise, allocate the entire address to parts_hi. The
+ # endpoints could still be empty, but _parse_hextet() will check
+ # for that.
+ if len(parts) != cls._HEXTET_COUNT:
+ msg = "Exactly %d parts expected without '::' in %r"
+ raise AddressValueError(msg % (cls._HEXTET_COUNT, ip_str))
+ if not parts[0]:
+ msg = "Leading ':' only permitted as part of '::' in %r"
+ raise AddressValueError(msg % ip_str) # ^: requires ^::
+ if not parts[-1]:
+ msg = "Trailing ':' only permitted as part of '::' in %r"
+ raise AddressValueError(msg % ip_str) # :$ requires ::$
+ parts_hi = len(parts)
+ parts_lo = 0
+ parts_skipped = 0
+
+ try:
+ # Now, parse the hextets into a 128-bit integer.
+ ip_int = 0
+ for i in range(parts_hi):
+ ip_int <<= 16
+ ip_int |= cls._parse_hextet(parts[i])
+ ip_int <<= 16 * parts_skipped
+ for i in range(-parts_lo, 0):
+ ip_int <<= 16
+ ip_int |= cls._parse_hextet(parts[i])
+ return ip_int
+ except ValueError as exc:
+ raise AddressValueError("%s in %r" % (exc, ip_str))
+
+ @classmethod
+ def _parse_hextet(cls, hextet_str):
+ """Convert an IPv6 hextet string into an integer.
+
+ Args:
+ hextet_str: A string, the number to parse.
+
+ Returns:
+ The hextet as an integer.
+
+ Raises:
+ ValueError: if the input isn't strictly a hex number from
+ [0..FFFF].
+
+ """
+ # Whitelist the characters, since int() allows a lot of bizarre stuff.
+ if not cls._HEX_DIGITS.issuperset(hextet_str):
+ raise ValueError("Only hex digits permitted in %r" % hextet_str)
+ # We do the length check second, since the invalid character error
+ # is likely to be more informative for the user
+ if len(hextet_str) > 4:
+ msg = "At most 4 characters permitted in %r"
+ raise ValueError(msg % hextet_str)
+ # Length check means we can skip checking the integer value
+ return int(hextet_str, 16)
+
+ @classmethod
+ def _compress_hextets(cls, hextets):
+ """Compresses a list of hextets.
+
+ Compresses a list of strings, replacing the longest continuous
+ sequence of "0" in the list with "" and adding empty strings at
+ the beginning or at the end of the string such that subsequently
+ calling ":".join(hextets) will produce the compressed version of
+ the IPv6 address.
+
+ Args:
+ hextets: A list of strings, the hextets to compress.
+
+ Returns:
+ A list of strings.
+
+ """
+ best_doublecolon_start = -1
+ best_doublecolon_len = 0
+ doublecolon_start = -1
+ doublecolon_len = 0
+ for index, hextet in enumerate(hextets):
+ if hextet == '0':
+ doublecolon_len += 1
+ if doublecolon_start == -1:
+ # Start of a sequence of zeros.
+ doublecolon_start = index
+ if doublecolon_len > best_doublecolon_len:
+ # This is the longest sequence of zeros so far.
+ best_doublecolon_len = doublecolon_len
+ best_doublecolon_start = doublecolon_start
+ else:
+ doublecolon_len = 0
+ doublecolon_start = -1
+
+ if best_doublecolon_len > 1:
+ best_doublecolon_end = (best_doublecolon_start +
+ best_doublecolon_len)
+ # For zeros at the end of the address.
+ if best_doublecolon_end == len(hextets):
+ hextets += ['']
+ hextets[best_doublecolon_start:best_doublecolon_end] = ['']
+ # For zeros at the beginning of the address.
+ if best_doublecolon_start == 0:
+ hextets = [''] + hextets
+
+ return hextets
+
+ @classmethod
+ def _string_from_ip_int(cls, ip_int=None):
+ """Turns a 128-bit integer into hexadecimal notation.
+
+ Args:
+ ip_int: An integer, the IP address.
+
+ Returns:
+ A string, the hexadecimal representation of the address.
+
+ Raises:
+ ValueError: The address is bigger than 128 bits of all ones.
+
+ """
+ if ip_int is None:
+ ip_int = int(cls._ip)
+
+ if ip_int > cls._ALL_ONES:
+ raise ValueError('IPv6 address is too large')
+
+ hex_str = '%032x' % ip_int
+ hextets = ['%x' % int(hex_str[x:x + 4], 16) for x in range(0, 32, 4)]
+
+ hextets = cls._compress_hextets(hextets)
+ return ':'.join(hextets)
+
+ def _explode_shorthand_ip_string(self):
+ """Expand a shortened IPv6 address.
+
+ Args:
+ ip_str: A string, the IPv6 address.
+
+ Returns:
+ A string, the expanded IPv6 address.
+
+ """
+ if isinstance(self, IPv6Network):
+ ip_str = _compat_str(self.network_address)
+ elif isinstance(self, IPv6Interface):
+ ip_str = _compat_str(self.ip)
+ else:
+ ip_str = _compat_str(self)
+
+ ip_int = self._ip_int_from_string(ip_str)
+ hex_str = '%032x' % ip_int
+ parts = [hex_str[x:x + 4] for x in range(0, 32, 4)]
+ if isinstance(self, (_BaseNetwork, IPv6Interface)):
+ return '%s/%d' % (':'.join(parts), self._prefixlen)
+ return ':'.join(parts)
+
+ def _reverse_pointer(self):
+ """Return the reverse DNS pointer name for the IPv6 address.
+
+ This implements the method described in RFC3596 2.5.
+
+ """
+ reverse_chars = self.exploded[::-1].replace(':', '')
+ return '.'.join(reverse_chars) + '.ip6.arpa'
+
+ @property
+ def max_prefixlen(self):
+ return self._max_prefixlen
+
+ @property
+ def version(self):
+ return self._version
+
+
+class IPv6Address(_BaseV6, _BaseAddress):
+
+ """Represent and manipulate single IPv6 Addresses."""
+
+ __slots__ = ('_ip', '__weakref__')
+
+ def __init__(self, address):
+ """Instantiate a new IPv6 address object.
+
+ Args:
+ address: A string or integer representing the IP
+
+ Additionally, an integer can be passed, so
+ IPv6Address('2001:db8::') ==
+ IPv6Address(42540766411282592856903984951653826560)
+ or, more generally
+ IPv6Address(int(IPv6Address('2001:db8::'))) ==
+ IPv6Address('2001:db8::')
+
+ Raises:
+ AddressValueError: If address isn't a valid IPv6 address.
+
+ """
+ # Efficient constructor from integer.
+ if isinstance(address, _compat_int_types):
+ self._check_int_address(address)
+ self._ip = address
+ return
+
+ # Constructing from a packed address
+ if isinstance(address, bytes):
+ self._check_packed_address(address, 16)
+ bvs = _compat_bytes_to_byte_vals(address)
+ self._ip = _compat_int_from_byte_vals(bvs, 'big')
+ return
+
+ # Assume input argument to be string or any object representation
+ # which converts into a formatted IP string.
+ addr_str = _compat_str(address)
+ if '/' in addr_str:
+ raise AddressValueError("Unexpected '/' in %r" % address)
+ self._ip = self._ip_int_from_string(addr_str)
+
+ @property
+ def packed(self):
+ """The binary representation of this address."""
+ return v6_int_to_packed(self._ip)
+
+ @property
+ def is_multicast(self):
+ """Test if the address is reserved for multicast use.
+
+ Returns:
+ A boolean, True if the address is a multicast address.
+ See RFC 2373 2.7 for details.
+
+ """
+ return self in self._constants._multicast_network
+
+ @property
+ def is_reserved(self):
+ """Test if the address is otherwise IETF reserved.
+
+ Returns:
+ A boolean, True if the address is within one of the
+ reserved IPv6 Network ranges.
+
+ """
+ return any(self in x for x in self._constants._reserved_networks)
+
+ @property
+ def is_link_local(self):
+ """Test if the address is reserved for link-local.
+
+ Returns:
+ A boolean, True if the address is reserved per RFC 4291.
+
+ """
+ return self in self._constants._linklocal_network
+
+ @property
+ def is_site_local(self):
+ """Test if the address is reserved for site-local.
+
+ Note that the site-local address space has been deprecated by RFC 3879.
+ Use is_private to test if this address is in the space of unique local
+ addresses as defined by RFC 4193.
+
+ Returns:
+ A boolean, True if the address is reserved per RFC 3513 2.5.6.
+
+ """
+ return self in self._constants._sitelocal_network
+
+ @property
+ def is_private(self):
+ """Test if this address is allocated for private networks.
+
+ Returns:
+ A boolean, True if the address is reserved per
+ iana-ipv6-special-registry.
+
+ """
+ return any(self in net for net in self._constants._private_networks)
+
+ @property
+ def is_global(self):
+ """Test if this address is allocated for public networks.
+
+ Returns:
+ A boolean, true if the address is not reserved per
+ iana-ipv6-special-registry.
+
+ """
+ return not self.is_private
+
+ @property
+ def is_unspecified(self):
+ """Test if the address is unspecified.
+
+ Returns:
+ A boolean, True if this is the unspecified address as defined in
+ RFC 2373 2.5.2.
+
+ """
+ return self._ip == 0
+
+ @property
+ def is_loopback(self):
+ """Test if the address is a loopback address.
+
+ Returns:
+ A boolean, True if the address is a loopback address as defined in
+ RFC 2373 2.5.3.
+
+ """
+ return self._ip == 1
+
+ @property
+ def ipv4_mapped(self):
+ """Return the IPv4 mapped address.
+
+ Returns:
+ If the IPv6 address is a v4 mapped address, return the
+ IPv4 mapped address. Return None otherwise.
+
+ """
+ if (self._ip >> 32) != 0xFFFF:
+ return None
+ return IPv4Address(self._ip & 0xFFFFFFFF)
+
+ @property
+ def teredo(self):
+ """Tuple of embedded teredo IPs.
+
+ Returns:
+ Tuple of the (server, client) IPs or None if the address
+ doesn't appear to be a teredo address (doesn't start with
+ 2001::/32)
+
+ """
+ if (self._ip >> 96) != 0x20010000:
+ return None
+ return (IPv4Address((self._ip >> 64) & 0xFFFFFFFF),
+ IPv4Address(~self._ip & 0xFFFFFFFF))
+
+ @property
+ def sixtofour(self):
+ """Return the IPv4 6to4 embedded address.
+
+ Returns:
+ The IPv4 6to4-embedded address if present or None if the
+ address doesn't appear to contain a 6to4 embedded address.
+
+ """
+ if (self._ip >> 112) != 0x2002:
+ return None
+ return IPv4Address((self._ip >> 80) & 0xFFFFFFFF)
+
+
+class IPv6Interface(IPv6Address):
+
+ def __init__(self, address):
+ if isinstance(address, (bytes, _compat_int_types)):
+ IPv6Address.__init__(self, address)
+ self.network = IPv6Network(self._ip)
+ self._prefixlen = self._max_prefixlen
+ return
+ if isinstance(address, tuple):
+ IPv6Address.__init__(self, address[0])
+ if len(address) > 1:
+ self._prefixlen = int(address[1])
+ else:
+ self._prefixlen = self._max_prefixlen
+ self.network = IPv6Network(address, strict=False)
+ self.netmask = self.network.netmask
+ self.hostmask = self.network.hostmask
+ return
+
+ addr = _split_optional_netmask(address)
+ IPv6Address.__init__(self, addr[0])
+ self.network = IPv6Network(address, strict=False)
+ self.netmask = self.network.netmask
+ self._prefixlen = self.network._prefixlen
+ self.hostmask = self.network.hostmask
+
+ def __str__(self):
+ return '%s/%d' % (self._string_from_ip_int(self._ip),
+ self.network.prefixlen)
+
+ def __eq__(self, other):
+ address_equal = IPv6Address.__eq__(self, other)
+ if not address_equal or address_equal is NotImplemented:
+ return address_equal
+ try:
+ return self.network == other.network
+ except AttributeError:
+ # An interface with an associated network is NOT the
+ # same as an unassociated address. That's why the hash
+ # takes the extra info into account.
+ return False
+
+ def __lt__(self, other):
+ address_less = IPv6Address.__lt__(self, other)
+ if address_less is NotImplemented:
+ return NotImplemented
+ try:
+ return (self.network < other.network or
+ self.network == other.network and address_less)
+ except AttributeError:
+ # We *do* allow addresses and interfaces to be sorted. The
+ # unassociated address is considered less than all interfaces.
+ return False
+
+ def __hash__(self):
+ return self._ip ^ self._prefixlen ^ int(self.network.network_address)
+
+ __reduce__ = _IPAddressBase.__reduce__
+
+ @property
+ def ip(self):
+ return IPv6Address(self._ip)
+
+ @property
+ def with_prefixlen(self):
+ return '%s/%s' % (self._string_from_ip_int(self._ip),
+ self._prefixlen)
+
+ @property
+ def with_netmask(self):
+ return '%s/%s' % (self._string_from_ip_int(self._ip),
+ self.netmask)
+
+ @property
+ def with_hostmask(self):
+ return '%s/%s' % (self._string_from_ip_int(self._ip),
+ self.hostmask)
+
+ @property
+ def is_unspecified(self):
+ return self._ip == 0 and self.network.is_unspecified
+
+ @property
+ def is_loopback(self):
+ return self._ip == 1 and self.network.is_loopback
+
+
+class IPv6Network(_BaseV6, _BaseNetwork):
+
+ """This class represents and manipulates 128-bit IPv6 networks.
+
+ Attributes: [examples for IPv6('2001:db8::1000/124')]
+ .network_address: IPv6Address('2001:db8::1000')
+ .hostmask: IPv6Address('::f')
+ .broadcast_address: IPv6Address('2001:db8::100f')
+ .netmask: IPv6Address('ffff:ffff:ffff:ffff:ffff:ffff:ffff:fff0')
+ .prefixlen: 124
+
+ """
+
+ # Class to use when creating address objects
+ _address_class = IPv6Address
+
+ def __init__(self, address, strict=True):
+ """Instantiate a new IPv6 Network object.
+
+ Args:
+ address: A string or integer representing the IPv6 network or the
+ IP and prefix/netmask.
+ '2001:db8::/128'
+ '2001:db8:0000:0000:0000:0000:0000:0000/128'
+ '2001:db8::'
+ are all functionally the same in IPv6. That is to say,
+ failing to provide a subnetmask will create an object with
+ a mask of /128.
+
+ Additionally, an integer can be passed, so
+ IPv6Network('2001:db8::') ==
+ IPv6Network(42540766411282592856903984951653826560)
+ or, more generally
+ IPv6Network(int(IPv6Network('2001:db8::'))) ==
+ IPv6Network('2001:db8::')
+
+ strict: A boolean. If true, ensure that we have been passed
+ A true network address, eg, 2001:db8::1000/124 and not an
+ IP address on a network, eg, 2001:db8::1/124.
+
+ Raises:
+ AddressValueError: If address isn't a valid IPv6 address.
+ NetmaskValueError: If the netmask isn't valid for
+ an IPv6 address.
+ ValueError: If strict was True and a network address was not
+ supplied.
+
+ """
+ _BaseNetwork.__init__(self, address)
+
+ # Efficient constructor from integer or packed address
+ if isinstance(address, (bytes, _compat_int_types)):
+ self.network_address = IPv6Address(address)
+ self.netmask, self._prefixlen = self._make_netmask(
+ self._max_prefixlen)
+ return
+
+ if isinstance(address, tuple):
+ if len(address) > 1:
+ arg = address[1]
+ else:
+ arg = self._max_prefixlen
+ self.netmask, self._prefixlen = self._make_netmask(arg)
+ self.network_address = IPv6Address(address[0])
+ packed = int(self.network_address)
+ if packed & int(self.netmask) != packed:
+ if strict:
+ raise ValueError('%s has host bits set' % self)
+ else:
+ self.network_address = IPv6Address(packed &
+ int(self.netmask))
+ return
+
+ # Assume input argument to be string or any object representation
+ # which converts into a formatted IP prefix string.
+ addr = _split_optional_netmask(address)
+
+ self.network_address = IPv6Address(self._ip_int_from_string(addr[0]))
+
+ if len(addr) == 2:
+ arg = addr[1]
+ else:
+ arg = self._max_prefixlen
+ self.netmask, self._prefixlen = self._make_netmask(arg)
+
+ if strict:
+ if (IPv6Address(int(self.network_address) & int(self.netmask)) !=
+ self.network_address):
+ raise ValueError('%s has host bits set' % self)
+ self.network_address = IPv6Address(int(self.network_address) &
+ int(self.netmask))
+
+ if self._prefixlen == (self._max_prefixlen - 1):
+ self.hosts = self.__iter__
+
+ def hosts(self):
+ """Generate Iterator over usable hosts in a network.
+
+ This is like __iter__ except it doesn't return the
+ Subnet-Router anycast address.
+
+ """
+ network = int(self.network_address)
+ broadcast = int(self.broadcast_address)
+ for x in _compat_range(network + 1, broadcast + 1):
+ yield self._address_class(x)
+
+ @property
+ def is_site_local(self):
+ """Test if the address is reserved for site-local.
+
+ Note that the site-local address space has been deprecated by RFC 3879.
+ Use is_private to test if this address is in the space of unique local
+ addresses as defined by RFC 4193.
+
+ Returns:
+ A boolean, True if the address is reserved per RFC 3513 2.5.6.
+
+ """
+ return (self.network_address.is_site_local and
+ self.broadcast_address.is_site_local)
+
+
+class _IPv6Constants(object):
+
+ _linklocal_network = IPv6Network('fe80::/10')
+
+ _multicast_network = IPv6Network('ff00::/8')
+
+ _private_networks = [
+ IPv6Network('::1/128'),
+ IPv6Network('::/128'),
+ IPv6Network('::ffff:0:0/96'),
+ IPv6Network('100::/64'),
+ IPv6Network('2001::/23'),
+ IPv6Network('2001:2::/48'),
+ IPv6Network('2001:db8::/32'),
+ IPv6Network('2001:10::/28'),
+ IPv6Network('fc00::/7'),
+ IPv6Network('fe80::/10'),
+ ]
+
+ _reserved_networks = [
+ IPv6Network('::/8'), IPv6Network('100::/8'),
+ IPv6Network('200::/7'), IPv6Network('400::/6'),
+ IPv6Network('800::/5'), IPv6Network('1000::/4'),
+ IPv6Network('4000::/3'), IPv6Network('6000::/3'),
+ IPv6Network('8000::/3'), IPv6Network('A000::/3'),
+ IPv6Network('C000::/3'), IPv6Network('E000::/4'),
+ IPv6Network('F000::/5'), IPv6Network('F800::/6'),
+ IPv6Network('FE00::/9'),
+ ]
+
+ _sitelocal_network = IPv6Network('fec0::/10')
+
+
+IPv6Address._constants = _IPv6Constants
diff --git a/contrib/deprecated/python/ipaddress/ya.make b/contrib/deprecated/python/ipaddress/ya.make
new file mode 100644
index 0000000000..83f3742b1c
--- /dev/null
+++ b/contrib/deprecated/python/ipaddress/ya.make
@@ -0,0 +1,22 @@
+# Generated by devtools/yamaker (pypi).
+
+PY2_LIBRARY()
+
+VERSION(1.0.23)
+
+LICENSE(PSF-2.0)
+
+NO_LINT()
+
+PY_SRCS(
+ TOP_LEVEL
+ ipaddress.py
+)
+
+RESOURCE_FILES(
+ PREFIX contrib/deprecated/python/ipaddress/
+ .dist-info/METADATA
+ .dist-info/top_level.txt
+)
+
+END()
diff --git a/contrib/deprecated/python/scandir/.dist-info/METADATA b/contrib/deprecated/python/scandir/.dist-info/METADATA
new file mode 100644
index 0000000000..ee4b11a523
--- /dev/null
+++ b/contrib/deprecated/python/scandir/.dist-info/METADATA
@@ -0,0 +1,238 @@
+Metadata-Version: 2.1
+Name: scandir
+Version: 1.10.0
+Summary: scandir, a better directory iterator and faster os.walk()
+Home-page: https://github.com/benhoyt/scandir
+Author: Ben Hoyt
+Author-email: benhoyt@gmail.com
+License: New BSD License
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Operating System :: OS Independent
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Programming Language :: Python
+Classifier: Topic :: System :: Filesystems
+Classifier: Topic :: System :: Operating System
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: Implementation :: CPython
+
+
+scandir, a better directory iterator and faster os.walk()
+=========================================================
+
+.. image:: https://img.shields.io/pypi/v/scandir.svg
+ :target: https://pypi.python.org/pypi/scandir
+ :alt: scandir on PyPI (Python Package Index)
+
+.. image:: https://travis-ci.org/benhoyt/scandir.svg?branch=master
+ :target: https://travis-ci.org/benhoyt/scandir
+ :alt: Travis CI tests (Linux)
+
+.. image:: https://ci.appveyor.com/api/projects/status/github/benhoyt/scandir?branch=master&svg=true
+ :target: https://ci.appveyor.com/project/benhoyt/scandir
+ :alt: Appveyor tests (Windows)
+
+
+``scandir()`` is a directory iteration function like ``os.listdir()``,
+except that instead of returning a list of bare filenames, it yields
+``DirEntry`` objects that include file type and stat information along
+with the name. Using ``scandir()`` increases the speed of ``os.walk()``
+by 2-20 times (depending on the platform and file system) by avoiding
+unnecessary calls to ``os.stat()`` in most cases.
+
+
+Now included in a Python near you!
+----------------------------------
+
+``scandir`` has been included in the Python 3.5 standard library as
+``os.scandir()``, and the related performance improvements to
+``os.walk()`` have also been included. So if you're lucky enough to be
+using Python 3.5 (release date September 13, 2015) you get the benefit
+immediately, otherwise just
+`download this module from PyPI <https://pypi.python.org/pypi/scandir>`_,
+install it with ``pip install scandir``, and then do something like
+this in your code:
+
+.. code-block:: python
+
+ # Use the built-in version of scandir/walk if possible, otherwise
+ # use the scandir module version
+ try:
+ from os import scandir, walk
+ except ImportError:
+ from scandir import scandir, walk
+
+`PEP 471 <https://www.python.org/dev/peps/pep-0471/>`_, which is the
+PEP that proposes including ``scandir`` in the Python standard library,
+was `accepted <https://mail.python.org/pipermail/python-dev/2014-July/135561.html>`_
+in July 2014 by Victor Stinner, the BDFL-delegate for the PEP.
+
+This ``scandir`` module is intended to work on Python 2.7+ and Python
+3.4+ (and it has been tested on those versions).
+
+
+Background
+----------
+
+Python's built-in ``os.walk()`` is significantly slower than it needs to be,
+because -- in addition to calling ``listdir()`` on each directory -- it calls
+``stat()`` on each file to determine whether the filename is a directory or not.
+But both ``FindFirstFile`` / ``FindNextFile`` on Windows and ``readdir`` on Linux/OS
+X already tell you whether the files returned are directories or not, so
+no further ``stat`` system calls are needed. In short, you can reduce the number
+of system calls from about 2N to N, where N is the total number of files and
+directories in the tree.
+
+In practice, removing all those extra system calls makes ``os.walk()`` about
+**7-50 times as fast on Windows, and about 3-10 times as fast on Linux and Mac OS
+X.** So we're not talking about micro-optimizations. See more benchmarks
+in the "Benchmarks" section below.
+
+Somewhat relatedly, many people have also asked for a version of
+``os.listdir()`` that yields filenames as it iterates instead of returning them
+as one big list. This improves memory efficiency for iterating very large
+directories.
+
+So as well as a faster ``walk()``, scandir adds a new ``scandir()`` function.
+They're pretty easy to use, but see "The API" below for the full docs.
+
+
+Benchmarks
+----------
+
+Below are results showing how many times as fast ``scandir.walk()`` is than
+``os.walk()`` on various systems, found by running ``benchmark.py`` with no
+arguments:
+
+==================== ============== =============
+System version Python version Times as fast
+==================== ============== =============
+Windows 7 64-bit 2.7.7 64-bit 10.4
+Windows 7 64-bit SSD 2.7.7 64-bit 10.3
+Windows 7 64-bit NFS 2.7.6 64-bit 36.8
+Windows 7 64-bit SSD 3.4.1 64-bit 9.9
+Windows 7 64-bit SSD 3.5.0 64-bit 9.5
+Ubuntu 14.04 64-bit 2.7.6 64-bit 5.8
+Mac OS X 10.9.3 2.7.5 64-bit 3.8
+==================== ============== =============
+
+All of the above tests were done using the fast C version of scandir
+(source code in ``_scandir.c``).
+
+Note that the gains are less than the above on smaller directories and greater
+on larger directories. This is why ``benchmark.py`` creates a test directory
+tree with a standardized size.
+
+
+The API
+-------
+
+walk()
+~~~~~~
+
+The API for ``scandir.walk()`` is exactly the same as ``os.walk()``, so just
+`read the Python docs <https://docs.python.org/3.5/library/os.html#os.walk>`_.
+
+scandir()
+~~~~~~~~~
+
+The full docs for ``scandir()`` and the ``DirEntry`` objects it yields are
+available in the `Python documentation here <https://docs.python.org/3.5/library/os.html#os.scandir>`_.
+But below is a brief summary as well.
+
+ scandir(path='.') -> iterator of DirEntry objects for given path
+
+Like ``listdir``, ``scandir`` calls the operating system's directory
+iteration system calls to get the names of the files in the given
+``path``, but it's different from ``listdir`` in two ways:
+
+* Instead of returning bare filename strings, it returns lightweight
+ ``DirEntry`` objects that hold the filename string and provide
+ simple methods that allow access to the additional data the
+ operating system may have returned.
+
+* It returns a generator instead of a list, so that ``scandir`` acts
+ as a true iterator instead of returning the full list immediately.
+
+``scandir()`` yields a ``DirEntry`` object for each file and
+sub-directory in ``path``. Just like ``listdir``, the ``'.'``
+and ``'..'`` pseudo-directories are skipped, and the entries are
+yielded in system-dependent order. Each ``DirEntry`` object has the
+following attributes and methods:
+
+* ``name``: the entry's filename, relative to the scandir ``path``
+ argument (corresponds to the return values of ``os.listdir``)
+
+* ``path``: the entry's full path name (not necessarily an absolute
+ path) -- the equivalent of ``os.path.join(scandir_path, entry.name)``
+
+* ``is_dir(*, follow_symlinks=True)``: similar to
+ ``pathlib.Path.is_dir()``, but the return value is cached on the
+ ``DirEntry`` object; doesn't require a system call in most cases;
+ don't follow symbolic links if ``follow_symlinks`` is False
+
+* ``is_file(*, follow_symlinks=True)``: similar to
+ ``pathlib.Path.is_file()``, but the return value is cached on the
+ ``DirEntry`` object; doesn't require a system call in most cases;
+ don't follow symbolic links if ``follow_symlinks`` is False
+
+* ``is_symlink()``: similar to ``pathlib.Path.is_symlink()``, but the
+ return value is cached on the ``DirEntry`` object; doesn't require a
+ system call in most cases
+
+* ``stat(*, follow_symlinks=True)``: like ``os.stat()``, but the
+ return value is cached on the ``DirEntry`` object; does not require a
+ system call on Windows (except for symlinks); don't follow symbolic links
+ (like ``os.lstat()``) if ``follow_symlinks`` is False
+
+* ``inode()``: return the inode number of the entry; the return value
+ is cached on the ``DirEntry`` object
+
+Here's a very simple example of ``scandir()`` showing use of the
+``DirEntry.name`` attribute and the ``DirEntry.is_dir()`` method:
+
+.. code-block:: python
+
+ def subdirs(path):
+ """Yield directory names not starting with '.' under given path."""
+ for entry in os.scandir(path):
+ if not entry.name.startswith('.') and entry.is_dir():
+ yield entry.name
+
+This ``subdirs()`` function will be significantly faster with scandir
+than ``os.listdir()`` and ``os.path.isdir()`` on both Windows and POSIX
+systems, especially on medium-sized or large directories.
+
+
+Further reading
+---------------
+
+* `The Python docs for scandir <https://docs.python.org/3.5/library/os.html#os.scandir>`_
+* `PEP 471 <https://www.python.org/dev/peps/pep-0471/>`_, the
+ (now-accepted) Python Enhancement Proposal that proposed adding
+ ``scandir`` to the standard library -- a lot of details here,
+ including rejected ideas and previous discussion
+
+
+Flames, comments, bug reports
+-----------------------------
+
+Please send flames, comments, and questions about scandir to Ben Hoyt:
+
+http://benhoyt.com/
+
+File bug reports for the version in the Python 3.5 standard library
+`here <https://docs.python.org/3.5/bugs.html>`_, or file bug reports
+or feature requests for this module at the GitHub project page:
+
+https://github.com/benhoyt/scandir
+
+
diff --git a/contrib/deprecated/python/scandir/.dist-info/top_level.txt b/contrib/deprecated/python/scandir/.dist-info/top_level.txt
new file mode 100644
index 0000000000..b13832ba1d
--- /dev/null
+++ b/contrib/deprecated/python/scandir/.dist-info/top_level.txt
@@ -0,0 +1,2 @@
+_scandir
+scandir
diff --git a/contrib/deprecated/python/scandir/LICENSE.txt b/contrib/deprecated/python/scandir/LICENSE.txt
new file mode 100644
index 0000000000..0759f503f2
--- /dev/null
+++ b/contrib/deprecated/python/scandir/LICENSE.txt
@@ -0,0 +1,27 @@
+Copyright (c) 2012, Ben Hoyt
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+* Redistributions of source code must retain the above copyright notice, this
+list of conditions and the following disclaimer.
+
+* Redistributions in binary form must reproduce the above copyright notice,
+this list of conditions and the following disclaimer in the documentation
+and/or other materials provided with the distribution.
+
+* Neither the name of Ben Hoyt nor the names of its contributors may be used
+to endorse or promote products derived from this software without specific
+prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/contrib/deprecated/python/scandir/README.rst b/contrib/deprecated/python/scandir/README.rst
new file mode 100644
index 0000000000..5059db138a
--- /dev/null
+++ b/contrib/deprecated/python/scandir/README.rst
@@ -0,0 +1,210 @@
+
+scandir, a better directory iterator and faster os.walk()
+=========================================================
+
+.. image:: https://img.shields.io/pypi/v/scandir.svg
+ :target: https://pypi.python.org/pypi/scandir
+ :alt: scandir on PyPI (Python Package Index)
+
+.. image:: https://travis-ci.org/benhoyt/scandir.svg?branch=master
+ :target: https://travis-ci.org/benhoyt/scandir
+ :alt: Travis CI tests (Linux)
+
+.. image:: https://ci.appveyor.com/api/projects/status/github/benhoyt/scandir?branch=master&svg=true
+ :target: https://ci.appveyor.com/project/benhoyt/scandir
+ :alt: Appveyor tests (Windows)
+
+
+``scandir()`` is a directory iteration function like ``os.listdir()``,
+except that instead of returning a list of bare filenames, it yields
+``DirEntry`` objects that include file type and stat information along
+with the name. Using ``scandir()`` increases the speed of ``os.walk()``
+by 2-20 times (depending on the platform and file system) by avoiding
+unnecessary calls to ``os.stat()`` in most cases.
+
+
+Now included in a Python near you!
+----------------------------------
+
+``scandir`` has been included in the Python 3.5 standard library as
+``os.scandir()``, and the related performance improvements to
+``os.walk()`` have also been included. So if you're lucky enough to be
+using Python 3.5 (release date September 13, 2015) you get the benefit
+immediately, otherwise just
+`download this module from PyPI <https://pypi.python.org/pypi/scandir>`_,
+install it with ``pip install scandir``, and then do something like
+this in your code:
+
+.. code-block:: python
+
+ # Use the built-in version of scandir/walk if possible, otherwise
+ # use the scandir module version
+ try:
+ from os import scandir, walk
+ except ImportError:
+ from scandir import scandir, walk
+
+`PEP 471 <https://www.python.org/dev/peps/pep-0471/>`_, which is the
+PEP that proposes including ``scandir`` in the Python standard library,
+was `accepted <https://mail.python.org/pipermail/python-dev/2014-July/135561.html>`_
+in July 2014 by Victor Stinner, the BDFL-delegate for the PEP.
+
+This ``scandir`` module is intended to work on Python 2.7+ and Python
+3.4+ (and it has been tested on those versions).
+
+
+Background
+----------
+
+Python's built-in ``os.walk()`` is significantly slower than it needs to be,
+because -- in addition to calling ``listdir()`` on each directory -- it calls
+``stat()`` on each file to determine whether the filename is a directory or not.
+But both ``FindFirstFile`` / ``FindNextFile`` on Windows and ``readdir`` on Linux/OS
+X already tell you whether the files returned are directories or not, so
+no further ``stat`` system calls are needed. In short, you can reduce the number
+of system calls from about 2N to N, where N is the total number of files and
+directories in the tree.
+
+In practice, removing all those extra system calls makes ``os.walk()`` about
+**7-50 times as fast on Windows, and about 3-10 times as fast on Linux and Mac OS
+X.** So we're not talking about micro-optimizations. See more benchmarks
+in the "Benchmarks" section below.
+
+Somewhat relatedly, many people have also asked for a version of
+``os.listdir()`` that yields filenames as it iterates instead of returning them
+as one big list. This improves memory efficiency for iterating very large
+directories.
+
+So as well as a faster ``walk()``, scandir adds a new ``scandir()`` function.
+They're pretty easy to use, but see "The API" below for the full docs.
+
+
+Benchmarks
+----------
+
+Below are results showing how many times as fast ``scandir.walk()`` is than
+``os.walk()`` on various systems, found by running ``benchmark.py`` with no
+arguments:
+
+==================== ============== =============
+System version Python version Times as fast
+==================== ============== =============
+Windows 7 64-bit 2.7.7 64-bit 10.4
+Windows 7 64-bit SSD 2.7.7 64-bit 10.3
+Windows 7 64-bit NFS 2.7.6 64-bit 36.8
+Windows 7 64-bit SSD 3.4.1 64-bit 9.9
+Windows 7 64-bit SSD 3.5.0 64-bit 9.5
+Ubuntu 14.04 64-bit 2.7.6 64-bit 5.8
+Mac OS X 10.9.3 2.7.5 64-bit 3.8
+==================== ============== =============
+
+All of the above tests were done using the fast C version of scandir
+(source code in ``_scandir.c``).
+
+Note that the gains are less than the above on smaller directories and greater
+on larger directories. This is why ``benchmark.py`` creates a test directory
+tree with a standardized size.
+
+
+The API
+-------
+
+walk()
+~~~~~~
+
+The API for ``scandir.walk()`` is exactly the same as ``os.walk()``, so just
+`read the Python docs <https://docs.python.org/3.5/library/os.html#os.walk>`_.
+
+scandir()
+~~~~~~~~~
+
+The full docs for ``scandir()`` and the ``DirEntry`` objects it yields are
+available in the `Python documentation here <https://docs.python.org/3.5/library/os.html#os.scandir>`_.
+But below is a brief summary as well.
+
+ scandir(path='.') -> iterator of DirEntry objects for given path
+
+Like ``listdir``, ``scandir`` calls the operating system's directory
+iteration system calls to get the names of the files in the given
+``path``, but it's different from ``listdir`` in two ways:
+
+* Instead of returning bare filename strings, it returns lightweight
+ ``DirEntry`` objects that hold the filename string and provide
+ simple methods that allow access to the additional data the
+ operating system may have returned.
+
+* It returns a generator instead of a list, so that ``scandir`` acts
+ as a true iterator instead of returning the full list immediately.
+
+``scandir()`` yields a ``DirEntry`` object for each file and
+sub-directory in ``path``. Just like ``listdir``, the ``'.'``
+and ``'..'`` pseudo-directories are skipped, and the entries are
+yielded in system-dependent order. Each ``DirEntry`` object has the
+following attributes and methods:
+
+* ``name``: the entry's filename, relative to the scandir ``path``
+ argument (corresponds to the return values of ``os.listdir``)
+
+* ``path``: the entry's full path name (not necessarily an absolute
+ path) -- the equivalent of ``os.path.join(scandir_path, entry.name)``
+
+* ``is_dir(*, follow_symlinks=True)``: similar to
+ ``pathlib.Path.is_dir()``, but the return value is cached on the
+ ``DirEntry`` object; doesn't require a system call in most cases;
+ don't follow symbolic links if ``follow_symlinks`` is False
+
+* ``is_file(*, follow_symlinks=True)``: similar to
+ ``pathlib.Path.is_file()``, but the return value is cached on the
+ ``DirEntry`` object; doesn't require a system call in most cases;
+ don't follow symbolic links if ``follow_symlinks`` is False
+
+* ``is_symlink()``: similar to ``pathlib.Path.is_symlink()``, but the
+ return value is cached on the ``DirEntry`` object; doesn't require a
+ system call in most cases
+
+* ``stat(*, follow_symlinks=True)``: like ``os.stat()``, but the
+ return value is cached on the ``DirEntry`` object; does not require a
+ system call on Windows (except for symlinks); don't follow symbolic links
+ (like ``os.lstat()``) if ``follow_symlinks`` is False
+
+* ``inode()``: return the inode number of the entry; the return value
+ is cached on the ``DirEntry`` object
+
+Here's a very simple example of ``scandir()`` showing use of the
+``DirEntry.name`` attribute and the ``DirEntry.is_dir()`` method:
+
+.. code-block:: python
+
+ def subdirs(path):
+ """Yield directory names not starting with '.' under given path."""
+ for entry in os.scandir(path):
+ if not entry.name.startswith('.') and entry.is_dir():
+ yield entry.name
+
+This ``subdirs()`` function will be significantly faster with scandir
+than ``os.listdir()`` and ``os.path.isdir()`` on both Windows and POSIX
+systems, especially on medium-sized or large directories.
+
+
+Further reading
+---------------
+
+* `The Python docs for scandir <https://docs.python.org/3.5/library/os.html#os.scandir>`_
+* `PEP 471 <https://www.python.org/dev/peps/pep-0471/>`_, the
+ (now-accepted) Python Enhancement Proposal that proposed adding
+ ``scandir`` to the standard library -- a lot of details here,
+ including rejected ideas and previous discussion
+
+
+Flames, comments, bug reports
+-----------------------------
+
+Please send flames, comments, and questions about scandir to Ben Hoyt:
+
+http://benhoyt.com/
+
+File bug reports for the version in the Python 3.5 standard library
+`here <https://docs.python.org/3.5/bugs.html>`_, or file bug reports
+or feature requests for this module at the GitHub project page:
+
+https://github.com/benhoyt/scandir
diff --git a/contrib/deprecated/python/scandir/_scandir.c b/contrib/deprecated/python/scandir/_scandir.c
new file mode 100644
index 0000000000..1e1ca4b27a
--- /dev/null
+++ b/contrib/deprecated/python/scandir/_scandir.c
@@ -0,0 +1,1834 @@
+/* C speedups for scandir module
+
+This is divided into four sections (each prefixed with a "SECTION:"
+comment):
+
+1) Python 2/3 compatibility
+2) Helper utilities from posixmodule.c, fileutils.h, etc
+3) SECTION: Main DirEntry and scandir implementation, taken from
+ Python 3.5's posixmodule.c
+4) Module and method definitions and initialization code
+
+*/
+
+#include <Python.h>
+#include <structseq.h>
+#include <structmember.h>
+#include "osdefs.h"
+
+#ifdef MS_WINDOWS
+#include <windows.h>
+#include <winioctl.h>
+#include "winreparse.h"
+#else
+#include <dirent.h>
+#ifndef HAVE_DIRENT_H
+#define HAVE_DIRENT_H 1
+#endif
+#endif
+
+#define MODNAME "scandir"
+
+
+/* SECTION: Python 2/3 compatibility */
+
+#if PY_MAJOR_VERSION >= 3
+#define INIT_ERROR return NULL
+#else
+#define INIT_ERROR return
+// Because on PyPy, Py_FileSystemDefaultEncoding is (was) defined to be NULL
+// (see PyPy Bitbucket issue #2669)
+#define FS_ENCODING (Py_FileSystemDefaultEncoding ? Py_FileSystemDefaultEncoding : "UTF-8")
+#endif
+
+#if PY_MAJOR_VERSION < 3 || PY_MAJOR_VERSION == 3 && PY_MINOR_VERSION <= 2
+#define _Py_IDENTIFIER(name) static char * PyId_##name = #name;
+#define _PyObject_GetAttrId(obj, pyid_name) PyObject_GetAttrString((obj), *(pyid_name))
+#define PyExc_FileNotFoundError PyExc_OSError
+#define PyUnicode_AsUnicodeAndSize(unicode, addr_length) \
+ PyUnicode_AsUnicode(unicode); *(addr_length) = PyUnicode_GetSize(unicode)
+#endif
+
+// Because on PyPy not working without
+#if PY_MAJOR_VERSION == 3 && PY_MINOR_VERSION > 2 && defined(PYPY_VERSION_NUM)
+#define _Py_IDENTIFIER(name) static char * PyId_##name = #name;
+#define _PyObject_GetAttrId(obj, pyid_name) PyObject_GetAttrString((obj), *(pyid_name))
+#endif
+
+/* SECTION: Helper utilities from posixmodule.c, fileutils.h, etc */
+
+#if !defined(MS_WINDOWS) && defined(DT_UNKNOWN)
+#define HAVE_DIRENT_D_TYPE 1
+#endif
+
+#ifdef HAVE_DIRENT_H
+#include <dirent.h>
+#define NAMLEN(dirent) strlen((dirent)->d_name)
+#else
+#if defined(__WATCOMC__) && !defined(__QNX__)
+#include <direct.h>
+#define NAMLEN(dirent) strlen((dirent)->d_name)
+#else
+#define dirent direct
+#define NAMLEN(dirent) (dirent)->d_namlen
+#endif
+#ifdef HAVE_SYS_NDIR_H
+#include <sys/ndir.h>
+#endif
+#ifdef HAVE_SYS_DIR_H
+#include <sys/dir.h>
+#endif
+#ifdef HAVE_NDIR_H
+#include <ndir.h>
+#endif
+#endif
+
+#ifndef Py_CLEANUP_SUPPORTED
+#define Py_CLEANUP_SUPPORTED 0x20000
+#endif
+
+#ifndef S_IFLNK
+/* Windows doesn't define S_IFLNK but posixmodule.c maps
+ * IO_REPARSE_TAG_SYMLINK to S_IFLNK */
+# define S_IFLNK 0120000
+#endif
+
+// _Py_stat_struct is already defined in fileutils.h on Python 3.5+
+// But not in PyPy
+#if PY_MAJOR_VERSION < 3 || (PY_MAJOR_VERSION == 3 && PY_MINOR_VERSION < 5) || defined(PYPY_VERSION_NUM)
+#ifdef MS_WINDOWS
+struct _Py_stat_struct {
+ unsigned long st_dev;
+ unsigned __int64 st_ino;
+ unsigned short st_mode;
+ int st_nlink;
+ int st_uid;
+ int st_gid;
+ unsigned long st_rdev;
+ __int64 st_size;
+ time_t st_atime;
+ int st_atime_nsec;
+ time_t st_mtime;
+ int st_mtime_nsec;
+ time_t st_ctime;
+ int st_ctime_nsec;
+ unsigned long st_file_attributes;
+};
+#else
+# define _Py_stat_struct stat
+#endif
+#endif
+
+/* choose the appropriate stat and fstat functions and return structs */
+#undef STAT
+#undef FSTAT
+#undef STRUCT_STAT
+#ifdef MS_WINDOWS
+# define STAT win32_stat
+# define LSTAT win32_lstat
+# define FSTAT _Py_fstat_noraise
+# define STRUCT_STAT struct _Py_stat_struct
+#else
+# define STAT stat
+# define LSTAT lstat
+# define FSTAT fstat
+# define STRUCT_STAT struct stat
+#endif
+
+#ifdef MS_WINDOWS
+
+static __int64 secs_between_epochs = 11644473600; /* Seconds between 1.1.1601 and 1.1.1970 */
+
+static void
+FILE_TIME_to_time_t_nsec(FILETIME *in_ptr, time_t *time_out, int* nsec_out)
+{
+ /* XXX endianness. Shouldn't matter, as all Windows implementations are little-endian */
+ /* Cannot simply cast and dereference in_ptr,
+ since it might not be aligned properly */
+ __int64 in;
+ memcpy(&in, in_ptr, sizeof(in));
+ *nsec_out = (int)(in % 10000000) * 100; /* FILETIME is in units of 100 nsec. */
+ *time_out = Py_SAFE_DOWNCAST((in / 10000000) - secs_between_epochs, __int64, time_t);
+}
+
+/* Below, we *know* that ugo+r is 0444 */
+#if _S_IREAD != 0400
+#error Unsupported C library
+#endif
+static int
+attributes_to_mode(DWORD attr)
+{
+ int m = 0;
+ if (attr & FILE_ATTRIBUTE_DIRECTORY)
+ m |= _S_IFDIR | 0111; /* IFEXEC for user,group,other */
+ else
+ m |= _S_IFREG;
+ if (attr & FILE_ATTRIBUTE_READONLY)
+ m |= 0444;
+ else
+ m |= 0666;
+ return m;
+}
+
+void
+_Py_attribute_data_to_stat(BY_HANDLE_FILE_INFORMATION *info, ULONG reparse_tag,
+ struct _Py_stat_struct *result)
+{
+ memset(result, 0, sizeof(*result));
+ result->st_mode = attributes_to_mode(info->dwFileAttributes);
+ result->st_size = (((__int64)info->nFileSizeHigh)<<32) + info->nFileSizeLow;
+ result->st_dev = info->dwVolumeSerialNumber;
+ result->st_rdev = result->st_dev;
+ FILE_TIME_to_time_t_nsec(&info->ftCreationTime, &result->st_ctime, &result->st_ctime_nsec);
+ FILE_TIME_to_time_t_nsec(&info->ftLastWriteTime, &result->st_mtime, &result->st_mtime_nsec);
+ FILE_TIME_to_time_t_nsec(&info->ftLastAccessTime, &result->st_atime, &result->st_atime_nsec);
+ result->st_nlink = info->nNumberOfLinks;
+ result->st_ino = (((unsigned __int64)info->nFileIndexHigh)<<32) + info->nFileIndexLow;
+ if (reparse_tag == IO_REPARSE_TAG_SYMLINK) {
+ /* first clear the S_IFMT bits */
+ result->st_mode ^= (result->st_mode & S_IFMT);
+ /* now set the bits that make this a symlink */
+ result->st_mode |= S_IFLNK;
+ }
+ result->st_file_attributes = info->dwFileAttributes;
+}
+
+static BOOL
+get_target_path(HANDLE hdl, wchar_t **target_path)
+{
+ int buf_size, result_length;
+ wchar_t *buf;
+
+ /* We have a good handle to the target, use it to determine
+ the target path name (then we'll call lstat on it). */
+ buf_size = GetFinalPathNameByHandleW(hdl, 0, 0,
+ VOLUME_NAME_DOS);
+ if(!buf_size)
+ return FALSE;
+
+ buf = PyMem_New(wchar_t, buf_size+1);
+ if (!buf) {
+ SetLastError(ERROR_OUTOFMEMORY);
+ return FALSE;
+ }
+
+ result_length = GetFinalPathNameByHandleW(hdl,
+ buf, buf_size, VOLUME_NAME_DOS);
+
+ if(!result_length) {
+ PyMem_Free(buf);
+ return FALSE;
+ }
+
+ if(!CloseHandle(hdl)) {
+ PyMem_Free(buf);
+ return FALSE;
+ }
+
+ buf[result_length] = 0;
+
+ *target_path = buf;
+ return TRUE;
+}
+
+static int
+win32_get_reparse_tag(HANDLE reparse_point_handle, ULONG *reparse_tag)
+{
+ char target_buffer[MAXIMUM_REPARSE_DATA_BUFFER_SIZE];
+ REPARSE_DATA_BUFFER *rdb = (REPARSE_DATA_BUFFER *)target_buffer;
+ DWORD n_bytes_returned;
+
+ if (0 == DeviceIoControl(
+ reparse_point_handle,
+ FSCTL_GET_REPARSE_POINT,
+ NULL, 0, /* in buffer */
+ target_buffer, sizeof(target_buffer),
+ &n_bytes_returned,
+ NULL)) /* we're not using OVERLAPPED_IO */
+ return FALSE;
+
+ if (reparse_tag)
+ *reparse_tag = rdb->ReparseTag;
+
+ return TRUE;
+}
+
+static void
+find_data_to_file_info_w(WIN32_FIND_DATAW *pFileData,
+ BY_HANDLE_FILE_INFORMATION *info,
+ ULONG *reparse_tag)
+{
+ memset(info, 0, sizeof(*info));
+ info->dwFileAttributes = pFileData->dwFileAttributes;
+ info->ftCreationTime = pFileData->ftCreationTime;
+ info->ftLastAccessTime = pFileData->ftLastAccessTime;
+ info->ftLastWriteTime = pFileData->ftLastWriteTime;
+ info->nFileSizeHigh = pFileData->nFileSizeHigh;
+ info->nFileSizeLow = pFileData->nFileSizeLow;
+/* info->nNumberOfLinks = 1; */
+ if (pFileData->dwFileAttributes & FILE_ATTRIBUTE_REPARSE_POINT)
+ *reparse_tag = pFileData->dwReserved0;
+ else
+ *reparse_tag = 0;
+}
+
+static BOOL
+attributes_from_dir_w(LPCWSTR pszFile, BY_HANDLE_FILE_INFORMATION *info, ULONG *reparse_tag)
+{
+ HANDLE hFindFile;
+ WIN32_FIND_DATAW FileData;
+ hFindFile = FindFirstFileW(pszFile, &FileData);
+ if (hFindFile == INVALID_HANDLE_VALUE)
+ return FALSE;
+ FindClose(hFindFile);
+ find_data_to_file_info_w(&FileData, info, reparse_tag);
+ return TRUE;
+}
+
+static int
+win32_xstat_impl_w(const wchar_t *path, struct _Py_stat_struct *result,
+ BOOL traverse)
+{
+ int code;
+ HANDLE hFile, hFile2;
+ BY_HANDLE_FILE_INFORMATION info;
+ ULONG reparse_tag = 0;
+ wchar_t *target_path;
+ const wchar_t *dot;
+
+ hFile = CreateFileW(
+ path,
+ FILE_READ_ATTRIBUTES, /* desired access */
+ 0, /* share mode */
+ NULL, /* security attributes */
+ OPEN_EXISTING,
+ /* FILE_FLAG_BACKUP_SEMANTICS is required to open a directory */
+ /* FILE_FLAG_OPEN_REPARSE_POINT does not follow the symlink.
+ Because of this, calls like GetFinalPathNameByHandle will return
+ the symlink path again and not the actual final path. */
+ FILE_ATTRIBUTE_NORMAL|FILE_FLAG_BACKUP_SEMANTICS|
+ FILE_FLAG_OPEN_REPARSE_POINT,
+ NULL);
+
+ if (hFile == INVALID_HANDLE_VALUE) {
+ /* Either the target doesn't exist, or we don't have access to
+ get a handle to it. If the former, we need to return an error.
+ If the latter, we can use attributes_from_dir. */
+ if (GetLastError() != ERROR_SHARING_VIOLATION)
+ return -1;
+ /* Could not get attributes on open file. Fall back to
+ reading the directory. */
+ if (!attributes_from_dir_w(path, &info, &reparse_tag))
+ /* Very strange. This should not fail now */
+ return -1;
+ if (info.dwFileAttributes & FILE_ATTRIBUTE_REPARSE_POINT) {
+ if (traverse) {
+ /* Should traverse, but could not open reparse point handle */
+ SetLastError(ERROR_SHARING_VIOLATION);
+ return -1;
+ }
+ }
+ } else {
+ if (!GetFileInformationByHandle(hFile, &info)) {
+ CloseHandle(hFile);
+ return -1;
+ }
+ if (info.dwFileAttributes & FILE_ATTRIBUTE_REPARSE_POINT) {
+ if (!win32_get_reparse_tag(hFile, &reparse_tag))
+ return -1;
+
+ /* Close the outer open file handle now that we're about to
+ reopen it with different flags. */
+ if (!CloseHandle(hFile))
+ return -1;
+
+ if (traverse) {
+ /* In order to call GetFinalPathNameByHandle we need to open
+ the file without the reparse handling flag set. */
+ hFile2 = CreateFileW(
+ path, FILE_READ_ATTRIBUTES, FILE_SHARE_READ,
+ NULL, OPEN_EXISTING,
+ FILE_ATTRIBUTE_NORMAL|FILE_FLAG_BACKUP_SEMANTICS,
+ NULL);
+ if (hFile2 == INVALID_HANDLE_VALUE)
+ return -1;
+
+ if (!get_target_path(hFile2, &target_path))
+ return -1;
+
+ code = win32_xstat_impl_w(target_path, result, FALSE);
+ PyMem_Free(target_path);
+ return code;
+ }
+ } else
+ CloseHandle(hFile);
+ }
+ _Py_attribute_data_to_stat(&info, reparse_tag, result);
+
+ /* Set S_IEXEC if it is an .exe, .bat, ... */
+ dot = wcsrchr(path, '.');
+ if (dot) {
+ if (_wcsicmp(dot, L".bat") == 0 || _wcsicmp(dot, L".cmd") == 0 ||
+ _wcsicmp(dot, L".exe") == 0 || _wcsicmp(dot, L".com") == 0)
+ result->st_mode |= 0111;
+ }
+ return 0;
+}
+
+static int
+win32_xstat_w(const wchar_t *path, struct _Py_stat_struct *result, BOOL traverse)
+{
+ /* Protocol violation: we explicitly clear errno, instead of
+ setting it to a POSIX error. Callers should use GetLastError. */
+ int code = win32_xstat_impl_w(path, result, traverse);
+ errno = 0;
+ return code;
+}
+
+static int
+win32_lstat_w(const wchar_t* path, struct _Py_stat_struct *result)
+{
+ return win32_xstat_w(path, result, FALSE);
+}
+
+static int
+win32_stat_w(const wchar_t* path, struct _Py_stat_struct *result)
+{
+ return win32_xstat_w(path, result, TRUE);
+}
+
+#endif /* MS_WINDOWS */
+
+static PyTypeObject StatResultType;
+
+static PyObject *billion = NULL;
+
+static newfunc structseq_new;
+
+static PyObject *
+statresult_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
+{
+ PyStructSequence *result;
+ int i;
+
+ result = (PyStructSequence*)structseq_new(type, args, kwds);
+ if (!result)
+ return NULL;
+ /* If we have been initialized from a tuple,
+ st_?time might be set to None. Initialize it
+ from the int slots. */
+ for (i = 7; i <= 9; i++) {
+ if (result->ob_item[i+3] == Py_None) {
+ Py_DECREF(Py_None);
+ Py_INCREF(result->ob_item[i]);
+ result->ob_item[i+3] = result->ob_item[i];
+ }
+ }
+ return (PyObject*)result;
+}
+
+/* If true, st_?time is float. */
+static int _stat_float_times = 1;
+
+static void
+fill_time(PyObject *v, int index, time_t sec, unsigned long nsec)
+{
+#if SIZEOF_TIME_T > SIZEOF_LONG
+ PyObject *s = PyLong_FromLongLong((PY_LONG_LONG)sec);
+#else
+#if PY_MAJOR_VERSION >= 3
+ PyObject *s = PyLong_FromLong((long)sec);
+#else
+ PyObject *s = PyInt_FromLong((long)sec);
+#endif
+#endif
+ PyObject *ns_fractional = PyLong_FromUnsignedLong(nsec);
+ PyObject *s_in_ns = NULL;
+ PyObject *ns_total = NULL;
+ PyObject *float_s = NULL;
+
+ if (!(s && ns_fractional))
+ goto exit;
+
+ s_in_ns = PyNumber_Multiply(s, billion);
+ if (!s_in_ns)
+ goto exit;
+
+ ns_total = PyNumber_Add(s_in_ns, ns_fractional);
+ if (!ns_total)
+ goto exit;
+
+ if (_stat_float_times) {
+ float_s = PyFloat_FromDouble(sec + 1e-9*nsec);
+ if (!float_s)
+ goto exit;
+ }
+ else {
+ float_s = s;
+ Py_INCREF(float_s);
+ }
+
+ PyStructSequence_SET_ITEM(v, index, s);
+ PyStructSequence_SET_ITEM(v, index+3, float_s);
+ PyStructSequence_SET_ITEM(v, index+6, ns_total);
+ s = NULL;
+ float_s = NULL;
+ ns_total = NULL;
+exit:
+ Py_XDECREF(s);
+ Py_XDECREF(ns_fractional);
+ Py_XDECREF(s_in_ns);
+ Py_XDECREF(ns_total);
+ Py_XDECREF(float_s);
+}
+
+#ifdef MS_WINDOWS
+#define HAVE_STAT_NSEC 1
+#define HAVE_STRUCT_STAT_ST_FILE_ATTRIBUTES 1
+#endif
+
+#ifdef HAVE_STRUCT_STAT_ST_BLKSIZE
+#define ST_BLKSIZE_IDX 16
+#else
+#define ST_BLKSIZE_IDX 15
+#endif
+
+#ifdef HAVE_STRUCT_STAT_ST_BLOCKS
+#define ST_BLOCKS_IDX (ST_BLKSIZE_IDX+1)
+#else
+#define ST_BLOCKS_IDX ST_BLKSIZE_IDX
+#endif
+
+#ifdef HAVE_STRUCT_STAT_ST_RDEV
+#define ST_RDEV_IDX (ST_BLOCKS_IDX+1)
+#else
+#define ST_RDEV_IDX ST_BLOCKS_IDX
+#endif
+
+#ifdef HAVE_STRUCT_STAT_ST_FLAGS
+#define ST_FLAGS_IDX (ST_RDEV_IDX+1)
+#else
+#define ST_FLAGS_IDX ST_RDEV_IDX
+#endif
+
+#ifdef HAVE_STRUCT_STAT_ST_GEN
+#define ST_GEN_IDX (ST_FLAGS_IDX+1)
+#else
+#define ST_GEN_IDX ST_FLAGS_IDX
+#endif
+
+#ifdef HAVE_STRUCT_STAT_ST_BIRTHTIME
+#define ST_BIRTHTIME_IDX (ST_GEN_IDX+1)
+#else
+#define ST_BIRTHTIME_IDX ST_GEN_IDX
+#endif
+
+#ifdef HAVE_STRUCT_STAT_ST_FILE_ATTRIBUTES
+#define ST_FILE_ATTRIBUTES_IDX (ST_BIRTHTIME_IDX+1)
+#else
+#define ST_FILE_ATTRIBUTES_IDX ST_BIRTHTIME_IDX
+#endif
+
+#ifdef HAVE_LONG_LONG
+# define _PyLong_FromDev PyLong_FromLongLong
+#else
+# define _PyLong_FromDev PyLong_FromLong
+#endif
+
+#ifndef MS_WINDOWS
+PyObject *
+_PyLong_FromUid(uid_t uid)
+{
+ if (uid == (uid_t)-1)
+ return PyLong_FromLong(-1);
+ return PyLong_FromUnsignedLong(uid);
+}
+
+PyObject *
+_PyLong_FromGid(gid_t gid)
+{
+ if (gid == (gid_t)-1)
+ return PyLong_FromLong(-1);
+ return PyLong_FromUnsignedLong(gid);
+}
+#endif
+
+/* pack a system stat C structure into the Python stat tuple
+ (used by posix_stat() and posix_fstat()) */
+static PyObject*
+_pystat_fromstructstat(STRUCT_STAT *st)
+{
+ unsigned long ansec, mnsec, cnsec;
+ PyObject *v = PyStructSequence_New(&StatResultType);
+ if (v == NULL)
+ return NULL;
+
+ PyStructSequence_SET_ITEM(v, 0, PyLong_FromLong((long)st->st_mode));
+#ifdef HAVE_LARGEFILE_SUPPORT
+ PyStructSequence_SET_ITEM(v, 1,
+ PyLong_FromUnsignedLongLong(st->st_ino));
+#else
+ PyStructSequence_SET_ITEM(v, 1, PyLong_FromUnsignedLong((unsigned long)st->st_ino));
+#endif
+#ifdef MS_WINDOWS
+ PyStructSequence_SET_ITEM(v, 2, PyLong_FromUnsignedLong(st->st_dev));
+#else
+ PyStructSequence_SET_ITEM(v, 2, _PyLong_FromDev(st->st_dev));
+#endif
+ PyStructSequence_SET_ITEM(v, 3, PyLong_FromLong((long)st->st_nlink));
+#if defined(MS_WINDOWS)
+ PyStructSequence_SET_ITEM(v, 4, PyLong_FromLong(0));
+ PyStructSequence_SET_ITEM(v, 5, PyLong_FromLong(0));
+#else
+ PyStructSequence_SET_ITEM(v, 4, _PyLong_FromUid(st->st_uid));
+ PyStructSequence_SET_ITEM(v, 5, _PyLong_FromGid(st->st_gid));
+#endif
+#ifdef HAVE_LARGEFILE_SUPPORT
+ PyStructSequence_SET_ITEM(v, 6,
+ PyLong_FromLongLong((PY_LONG_LONG)st->st_size));
+#else
+ PyStructSequence_SET_ITEM(v, 6, PyLong_FromLong(st->st_size));
+#endif
+
+#if defined(HAVE_STAT_TV_NSEC)
+ ansec = st->st_atim.tv_nsec;
+ mnsec = st->st_mtim.tv_nsec;
+ cnsec = st->st_ctim.tv_nsec;
+#elif defined(HAVE_STAT_TV_NSEC2)
+ ansec = st->st_atimespec.tv_nsec;
+ mnsec = st->st_mtimespec.tv_nsec;
+ cnsec = st->st_ctimespec.tv_nsec;
+#elif defined(HAVE_STAT_NSEC)
+ ansec = st->st_atime_nsec;
+ mnsec = st->st_mtime_nsec;
+ cnsec = st->st_ctime_nsec;
+#else
+ ansec = mnsec = cnsec = 0;
+#endif
+ fill_time(v, 7, st->st_atime, ansec);
+ fill_time(v, 8, st->st_mtime, mnsec);
+ fill_time(v, 9, st->st_ctime, cnsec);
+
+#ifdef HAVE_STRUCT_STAT_ST_BLKSIZE
+ PyStructSequence_SET_ITEM(v, ST_BLKSIZE_IDX,
+ PyLong_FromLong((long)st->st_blksize));
+#endif
+#ifdef HAVE_STRUCT_STAT_ST_BLOCKS
+ PyStructSequence_SET_ITEM(v, ST_BLOCKS_IDX,
+ PyLong_FromLong((long)st->st_blocks));
+#endif
+#ifdef HAVE_STRUCT_STAT_ST_RDEV
+ PyStructSequence_SET_ITEM(v, ST_RDEV_IDX,
+ PyLong_FromLong((long)st->st_rdev));
+#endif
+#ifdef HAVE_STRUCT_STAT_ST_GEN
+ PyStructSequence_SET_ITEM(v, ST_GEN_IDX,
+ PyLong_FromLong((long)st->st_gen));
+#endif
+#ifdef HAVE_STRUCT_STAT_ST_BIRTHTIME
+ {
+ PyObject *val;
+ unsigned long bsec,bnsec;
+ bsec = (long)st->st_birthtime;
+#ifdef HAVE_STAT_TV_NSEC2
+ bnsec = st->st_birthtimespec.tv_nsec;
+#else
+ bnsec = 0;
+#endif
+ if (_stat_float_times) {
+ val = PyFloat_FromDouble(bsec + 1e-9*bnsec);
+ } else {
+ val = PyLong_FromLong((long)bsec);
+ }
+ PyStructSequence_SET_ITEM(v, ST_BIRTHTIME_IDX,
+ val);
+ }
+#endif
+#ifdef HAVE_STRUCT_STAT_ST_FLAGS
+ PyStructSequence_SET_ITEM(v, ST_FLAGS_IDX,
+ PyLong_FromLong((long)st->st_flags));
+#endif
+#ifdef HAVE_STRUCT_STAT_ST_FILE_ATTRIBUTES
+ PyStructSequence_SET_ITEM(v, ST_FILE_ATTRIBUTES_IDX,
+ PyLong_FromUnsignedLong(st->st_file_attributes));
+#endif
+
+ if (PyErr_Occurred()) {
+ Py_DECREF(v);
+ return NULL;
+ }
+
+ return v;
+}
+
+//char *PyStructSequence_UnnamedField = "unnamed field";
+
+PyDoc_STRVAR(stat_result__doc__,
+"stat_result: Result from stat, fstat, or lstat.\n\n\
+This object may be accessed either as a tuple of\n\
+ (mode, ino, dev, nlink, uid, gid, size, atime, mtime, ctime)\n\
+or via the attributes st_mode, st_ino, st_dev, st_nlink, st_uid, and so on.\n\
+\n\
+Posix/windows: If your platform supports st_blksize, st_blocks, st_rdev,\n\
+or st_flags, they are available as attributes only.\n\
+\n\
+See os.stat for more information.");
+
+static PyStructSequence_Field stat_result_fields[] = {
+ {"st_mode", "protection bits"},
+ {"st_ino", "inode"},
+ {"st_dev", "device"},
+ {"st_nlink", "number of hard links"},
+ {"st_uid", "user ID of owner"},
+ {"st_gid", "group ID of owner"},
+ {"st_size", "total size, in bytes"},
+ /* The NULL is replaced with PyStructSequence_UnnamedField later. */
+ {NULL, "integer time of last access"},
+ {NULL, "integer time of last modification"},
+ {NULL, "integer time of last change"},
+ {"st_atime", "time of last access"},
+ {"st_mtime", "time of last modification"},
+ {"st_ctime", "time of last change"},
+ {"st_atime_ns", "time of last access in nanoseconds"},
+ {"st_mtime_ns", "time of last modification in nanoseconds"},
+ {"st_ctime_ns", "time of last change in nanoseconds"},
+#ifdef HAVE_STRUCT_STAT_ST_BLKSIZE
+ {"st_blksize", "blocksize for filesystem I/O"},
+#endif
+#ifdef HAVE_STRUCT_STAT_ST_BLOCKS
+ {"st_blocks", "number of blocks allocated"},
+#endif
+#ifdef HAVE_STRUCT_STAT_ST_RDEV
+ {"st_rdev", "device type (if inode device)"},
+#endif
+#ifdef HAVE_STRUCT_STAT_ST_FLAGS
+ {"st_flags", "user defined flags for file"},
+#endif
+#ifdef HAVE_STRUCT_STAT_ST_GEN
+ {"st_gen", "generation number"},
+#endif
+#ifdef HAVE_STRUCT_STAT_ST_BIRTHTIME
+ {"st_birthtime", "time of creation"},
+#endif
+#ifdef HAVE_STRUCT_STAT_ST_FILE_ATTRIBUTES
+ {"st_file_attributes", "Windows file attribute bits"},
+#endif
+ {0}
+};
+
+static PyStructSequence_Desc stat_result_desc = {
+ "scandir.stat_result", /* name */
+ stat_result__doc__, /* doc */
+ stat_result_fields,
+ 10
+};
+
+
+#ifdef MS_WINDOWS
+static int
+win32_warn_bytes_api()
+{
+ return PyErr_WarnEx(PyExc_DeprecationWarning,
+ "The Windows bytes API has been deprecated, "
+ "use Unicode filenames instead",
+ 1);
+}
+#endif
+
+typedef struct {
+ const char *function_name;
+ const char *argument_name;
+ int nullable;
+ wchar_t *wide;
+ char *narrow;
+ int fd;
+ Py_ssize_t length;
+ PyObject *object;
+ PyObject *cleanup;
+} path_t;
+
+static void
+path_cleanup(path_t *path) {
+ if (path->cleanup) {
+ Py_CLEAR(path->cleanup);
+ }
+}
+
+static int
+path_converter(PyObject *o, void *p) {
+ path_t *path = (path_t *)p;
+ PyObject *unicode, *bytes;
+ Py_ssize_t length;
+ char *narrow;
+
+#define FORMAT_EXCEPTION(exc, fmt) \
+ PyErr_Format(exc, "%s%s" fmt, \
+ path->function_name ? path->function_name : "", \
+ path->function_name ? ": " : "", \
+ path->argument_name ? path->argument_name : "path")
+
+ /* Py_CLEANUP_SUPPORTED support */
+ if (o == NULL) {
+ path_cleanup(path);
+ return 1;
+ }
+
+ /* ensure it's always safe to call path_cleanup() */
+ path->cleanup = NULL;
+
+ if (o == Py_None) {
+ if (!path->nullable) {
+ FORMAT_EXCEPTION(PyExc_TypeError,
+ "can't specify None for %s argument");
+ return 0;
+ }
+ path->wide = NULL;
+ path->narrow = NULL;
+ path->length = 0;
+ path->object = o;
+ path->fd = -1;
+ return 1;
+ }
+
+ unicode = PyUnicode_FromObject(o);
+ if (unicode) {
+#ifdef MS_WINDOWS
+ wchar_t *wide;
+
+ wide = PyUnicode_AsUnicodeAndSize(unicode, &length);
+ if (!wide) {
+ Py_DECREF(unicode);
+ return 0;
+ }
+ if (length > 32767) {
+ FORMAT_EXCEPTION(PyExc_ValueError, "%s too long for Windows");
+ Py_DECREF(unicode);
+ return 0;
+ }
+ if (wcslen(wide) != length) {
+ FORMAT_EXCEPTION(PyExc_ValueError, "embedded null character");
+ Py_DECREF(unicode);
+ return 0;
+ }
+
+ path->wide = wide;
+ path->narrow = NULL;
+ path->length = length;
+ path->object = o;
+ path->fd = -1;
+ path->cleanup = unicode;
+ return Py_CLEANUP_SUPPORTED;
+#else
+#if PY_MAJOR_VERSION >= 3
+ if (!PyUnicode_FSConverter(unicode, &bytes))
+ bytes = NULL;
+#else
+ bytes = PyUnicode_AsEncodedString(unicode, FS_ENCODING, "strict");
+#endif
+ Py_DECREF(unicode);
+#endif
+ }
+ else {
+ PyErr_Clear();
+#if PY_MAJOR_VERSION >= 3
+ if (PyObject_CheckBuffer(o)) {
+ bytes = PyBytes_FromObject(o);
+ }
+#else
+ if (PyString_Check(o)) {
+ bytes = o;
+ Py_INCREF(bytes);
+ }
+#endif
+ else
+ bytes = NULL;
+ if (!bytes) {
+ PyErr_Clear();
+ }
+ }
+
+ if (!bytes) {
+ if (!PyErr_Occurred())
+ FORMAT_EXCEPTION(PyExc_TypeError, "illegal type for %s parameter");
+ return 0;
+ }
+
+#ifdef MS_WINDOWS
+ if (win32_warn_bytes_api()) {
+ Py_DECREF(bytes);
+ return 0;
+ }
+#endif
+
+ length = PyBytes_GET_SIZE(bytes);
+#ifdef MS_WINDOWS
+ if (length > MAX_PATH-1) {
+ FORMAT_EXCEPTION(PyExc_ValueError, "%s too long for Windows");
+ Py_DECREF(bytes);
+ return 0;
+ }
+#endif
+
+ narrow = PyBytes_AS_STRING(bytes);
+ if ((size_t)length != strlen(narrow)) {
+ FORMAT_EXCEPTION(PyExc_ValueError, "embedded null character in %s");
+ Py_DECREF(bytes);
+ return 0;
+ }
+
+ path->wide = NULL;
+ path->narrow = narrow;
+ path->length = length;
+ path->object = o;
+ path->fd = -1;
+ path->cleanup = bytes;
+ return Py_CLEANUP_SUPPORTED;
+}
+
+static PyObject *
+path_error(path_t *path)
+{
+#ifdef MS_WINDOWS
+ return PyErr_SetExcFromWindowsErrWithFilenameObject(PyExc_OSError,
+ 0, path->object);
+#else
+ return PyErr_SetFromErrnoWithFilenameObject(PyExc_OSError, path->object);
+#endif
+}
+
+
+/* SECTION: Main DirEntry and scandir implementation, taken from
+ Python 3.5's posixmodule.c */
+
+PyDoc_STRVAR(posix_scandir__doc__,
+"scandir(path='.') -> iterator of DirEntry objects for given path");
+
+static char *follow_symlinks_keywords[] = {"follow_symlinks", NULL};
+#if PY_MAJOR_VERSION >= 3 && PY_MINOR_VERSION >= 3
+static char *follow_symlinks_format = "|$p:DirEntry.stat";
+#else
+static char *follow_symlinks_format = "|i:DirEntry.stat";
+#endif
+
+typedef struct {
+ PyObject_HEAD
+ PyObject *name;
+ PyObject *path;
+ PyObject *stat;
+ PyObject *lstat;
+#ifdef MS_WINDOWS
+ struct _Py_stat_struct win32_lstat;
+ unsigned __int64 win32_file_index;
+ int got_file_index;
+#if PY_MAJOR_VERSION < 3
+ int name_path_bytes;
+#endif
+#else /* POSIX */
+#ifdef HAVE_DIRENT_D_TYPE
+ unsigned char d_type;
+#endif
+ ino_t d_ino;
+#endif
+} DirEntry;
+
+static void
+DirEntry_dealloc(DirEntry *entry)
+{
+ Py_XDECREF(entry->name);
+ Py_XDECREF(entry->path);
+ Py_XDECREF(entry->stat);
+ Py_XDECREF(entry->lstat);
+ Py_TYPE(entry)->tp_free((PyObject *)entry);
+}
+
+/* Forward reference */
+static int
+DirEntry_test_mode(DirEntry *self, int follow_symlinks, unsigned short mode_bits);
+
+/* Set exception and return -1 on error, 0 for False, 1 for True */
+static int
+DirEntry_is_symlink(DirEntry *self)
+{
+#ifdef MS_WINDOWS
+ return (self->win32_lstat.st_mode & S_IFMT) == S_IFLNK;
+#elif defined(HAVE_DIRENT_D_TYPE)
+ /* POSIX */
+ if (self->d_type != DT_UNKNOWN)
+ return self->d_type == DT_LNK;
+ else
+ return DirEntry_test_mode(self, 0, S_IFLNK);
+#else
+ /* POSIX without d_type */
+ return DirEntry_test_mode(self, 0, S_IFLNK);
+#endif
+}
+
+static PyObject *
+DirEntry_py_is_symlink(DirEntry *self)
+{
+ int result;
+
+ result = DirEntry_is_symlink(self);
+ if (result == -1)
+ return NULL;
+ return PyBool_FromLong(result);
+}
+
+static PyObject *
+DirEntry_fetch_stat(DirEntry *self, int follow_symlinks)
+{
+ int result;
+ struct _Py_stat_struct st;
+
+#ifdef MS_WINDOWS
+ wchar_t *path;
+
+ path = PyUnicode_AsUnicode(self->path);
+ if (!path)
+ return NULL;
+
+ if (follow_symlinks)
+ result = win32_stat_w(path, &st);
+ else
+ result = win32_lstat_w(path, &st);
+
+ if (result != 0) {
+ return PyErr_SetExcFromWindowsErrWithFilenameObject(PyExc_OSError,
+ 0, self->path);
+ }
+#else /* POSIX */
+ PyObject *bytes;
+ char *path;
+
+#if PY_MAJOR_VERSION >= 3
+ if (!PyUnicode_FSConverter(self->path, &bytes))
+ return NULL;
+#else
+ if (PyString_Check(self->path)) {
+ bytes = self->path;
+ Py_INCREF(bytes);
+ } else {
+ bytes = PyUnicode_AsEncodedString(self->path, FS_ENCODING, "strict");
+ if (!bytes)
+ return NULL;
+ }
+#endif
+ path = PyBytes_AS_STRING(bytes);
+
+ if (follow_symlinks)
+ result = STAT(path, &st);
+ else
+ result = LSTAT(path, &st);
+ Py_DECREF(bytes);
+
+ if (result != 0)
+ return PyErr_SetFromErrnoWithFilenameObject(PyExc_OSError, self->path);
+#endif
+
+ return _pystat_fromstructstat(&st);
+}
+
+static PyObject *
+DirEntry_get_lstat(DirEntry *self)
+{
+ if (!self->lstat) {
+#ifdef MS_WINDOWS
+ self->lstat = _pystat_fromstructstat(&self->win32_lstat);
+#else /* POSIX */
+ self->lstat = DirEntry_fetch_stat(self, 0);
+#endif
+ }
+ Py_XINCREF(self->lstat);
+ return self->lstat;
+}
+
+static PyObject *
+DirEntry_get_stat(DirEntry *self, int follow_symlinks)
+{
+ if (!follow_symlinks)
+ return DirEntry_get_lstat(self);
+
+ if (!self->stat) {
+ int result = DirEntry_is_symlink(self);
+ if (result == -1)
+ return NULL;
+ else if (result)
+ self->stat = DirEntry_fetch_stat(self, 1);
+ else
+ self->stat = DirEntry_get_lstat(self);
+ }
+
+ Py_XINCREF(self->stat);
+ return self->stat;
+}
+
+static PyObject *
+DirEntry_stat(DirEntry *self, PyObject *args, PyObject *kwargs)
+{
+ int follow_symlinks = 1;
+
+ if (!PyArg_ParseTupleAndKeywords(args, kwargs, follow_symlinks_format,
+ follow_symlinks_keywords, &follow_symlinks))
+ return NULL;
+
+ return DirEntry_get_stat(self, follow_symlinks);
+}
+
+/* Set exception and return -1 on error, 0 for False, 1 for True */
+static int
+DirEntry_test_mode(DirEntry *self, int follow_symlinks, unsigned short mode_bits)
+{
+ PyObject *stat = NULL;
+ PyObject *st_mode = NULL;
+ long mode;
+ int result;
+#if defined(MS_WINDOWS) || defined(HAVE_DIRENT_D_TYPE)
+ int is_symlink;
+ int need_stat;
+#endif
+#ifdef MS_WINDOWS
+ unsigned long dir_bits;
+#endif
+ _Py_IDENTIFIER(st_mode);
+
+#ifdef MS_WINDOWS
+ is_symlink = (self->win32_lstat.st_mode & S_IFMT) == S_IFLNK;
+ need_stat = follow_symlinks && is_symlink;
+#elif defined(HAVE_DIRENT_D_TYPE)
+ is_symlink = self->d_type == DT_LNK;
+ need_stat = self->d_type == DT_UNKNOWN || (follow_symlinks && is_symlink);
+#endif
+
+#if defined(MS_WINDOWS) || defined(HAVE_DIRENT_D_TYPE)
+ if (need_stat) {
+#endif
+ stat = DirEntry_get_stat(self, follow_symlinks);
+ if (!stat) {
+ if (PyErr_ExceptionMatches(PyExc_FileNotFoundError)) {
+ /* If file doesn't exist (anymore), then return False
+ (i.e., say it's not a file/directory) */
+ PyErr_Clear();
+ return 0;
+ }
+ goto error;
+ }
+ st_mode = _PyObject_GetAttrId(stat, &PyId_st_mode);
+ if (!st_mode)
+ goto error;
+
+ mode = PyLong_AsLong(st_mode);
+ if (mode == -1 && PyErr_Occurred())
+ goto error;
+ Py_CLEAR(st_mode);
+ Py_CLEAR(stat);
+ result = (mode & S_IFMT) == mode_bits;
+#if defined(MS_WINDOWS) || defined(HAVE_DIRENT_D_TYPE)
+ }
+ else if (is_symlink) {
+ assert(mode_bits != S_IFLNK);
+ result = 0;
+ }
+ else {
+ assert(mode_bits == S_IFDIR || mode_bits == S_IFREG);
+#ifdef MS_WINDOWS
+ dir_bits = self->win32_lstat.st_file_attributes & FILE_ATTRIBUTE_DIRECTORY;
+ if (mode_bits == S_IFDIR)
+ result = dir_bits != 0;
+ else
+ result = dir_bits == 0;
+#else /* POSIX */
+ if (mode_bits == S_IFDIR)
+ result = self->d_type == DT_DIR;
+ else
+ result = self->d_type == DT_REG;
+#endif
+ }
+#endif
+
+ return result;
+
+error:
+ Py_XDECREF(st_mode);
+ Py_XDECREF(stat);
+ return -1;
+}
+
+static PyObject *
+DirEntry_py_test_mode(DirEntry *self, int follow_symlinks, unsigned short mode_bits)
+{
+ int result;
+
+ result = DirEntry_test_mode(self, follow_symlinks, mode_bits);
+ if (result == -1)
+ return NULL;
+ return PyBool_FromLong(result);
+}
+
+static PyObject *
+DirEntry_is_dir(DirEntry *self, PyObject *args, PyObject *kwargs)
+{
+ int follow_symlinks = 1;
+
+ if (!PyArg_ParseTupleAndKeywords(args, kwargs, follow_symlinks_format,
+ follow_symlinks_keywords, &follow_symlinks))
+ return NULL;
+
+ return DirEntry_py_test_mode(self, follow_symlinks, S_IFDIR);
+}
+
+static PyObject *
+DirEntry_is_file(DirEntry *self, PyObject *args, PyObject *kwargs)
+{
+ int follow_symlinks = 1;
+
+ if (!PyArg_ParseTupleAndKeywords(args, kwargs, follow_symlinks_format,
+ follow_symlinks_keywords, &follow_symlinks))
+ return NULL;
+
+ return DirEntry_py_test_mode(self, follow_symlinks, S_IFREG);
+}
+
+static PyObject *
+DirEntry_inode(DirEntry *self)
+{
+#ifdef MS_WINDOWS
+ if (!self->got_file_index) {
+ wchar_t *path;
+ struct _Py_stat_struct stat;
+
+ path = PyUnicode_AsUnicode(self->path);
+ if (!path)
+ return NULL;
+
+ if (win32_lstat_w(path, &stat) != 0) {
+ return PyErr_SetExcFromWindowsErrWithFilenameObject(PyExc_OSError,
+ 0, self->path);
+ }
+
+ self->win32_file_index = stat.st_ino;
+ self->got_file_index = 1;
+ }
+ return PyLong_FromUnsignedLongLong(self->win32_file_index);
+#else /* POSIX */
+#ifdef HAVE_LARGEFILE_SUPPORT
+ return PyLong_FromUnsignedLongLong(self->d_ino);
+#else
+ return PyLong_FromUnsignedLong((unsigned long)self->d_ino);
+#endif
+#endif
+}
+
+#if PY_MAJOR_VERSION < 3 && defined(MS_WINDOWS)
+
+PyObject *DirEntry_name_getter(DirEntry *self, void *closure) {
+ if (self->name_path_bytes) {
+ return PyUnicode_EncodeMBCS(PyUnicode_AS_UNICODE(self->name),
+ PyUnicode_GetSize(self->name), "strict");
+ } else {
+ Py_INCREF(self->name);
+ return self->name;
+ }
+}
+
+PyObject *DirEntry_path_getter(DirEntry *self, void *closure) {
+ if (self->name_path_bytes) {
+ return PyUnicode_EncodeMBCS(PyUnicode_AS_UNICODE(self->path),
+ PyUnicode_GetSize(self->path), "strict");
+ } else {
+ Py_INCREF(self->path);
+ return self->path;
+ }
+}
+
+static PyGetSetDef DirEntry_getset[] = {
+ {"name", (getter)DirEntry_name_getter, NULL,
+ "the entry's base filename, relative to scandir() \"path\" argument", NULL},
+ {"path", (getter)DirEntry_path_getter, NULL,
+ "the entry's full path name; equivalent to os.path.join(scandir_path, entry.name)", NULL},
+ {NULL}
+};
+
+#else
+
+static PyMemberDef DirEntry_members[] = {
+ {"name", T_OBJECT_EX, offsetof(DirEntry, name), READONLY,
+ "the entry's base filename, relative to scandir() \"path\" argument"},
+ {"path", T_OBJECT_EX, offsetof(DirEntry, path), READONLY,
+ "the entry's full path name; equivalent to os.path.join(scandir_path, entry.name)"},
+ {NULL}
+};
+
+#endif
+
+static PyObject *
+DirEntry_repr(DirEntry *self)
+{
+#if PY_MAJOR_VERSION >= 3
+ return PyUnicode_FromFormat("<DirEntry %R>", self->name);
+#elif defined(MS_WINDOWS)
+ PyObject *name;
+ PyObject *name_repr;
+ PyObject *entry_repr;
+
+ name = DirEntry_name_getter(self, NULL);
+ if (!name)
+ return NULL;
+ name_repr = PyObject_Repr(name);
+ Py_DECREF(name);
+ if (!name_repr)
+ return NULL;
+ entry_repr = PyString_FromFormat("<DirEntry %s>", PyString_AsString(name_repr));
+ Py_DECREF(name_repr);
+ return entry_repr;
+#else
+ PyObject *name_repr;
+ PyObject *entry_repr;
+
+ name_repr = PyObject_Repr(self->name);
+ if (!name_repr)
+ return NULL;
+ entry_repr = PyString_FromFormat("<DirEntry %s>", PyString_AsString(name_repr));
+ Py_DECREF(name_repr);
+ return entry_repr;
+#endif
+}
+
+static PyMethodDef DirEntry_methods[] = {
+ {"is_dir", (PyCFunction)DirEntry_is_dir, METH_VARARGS | METH_KEYWORDS,
+ "return True if the entry is a directory; cached per entry"
+ },
+ {"is_file", (PyCFunction)DirEntry_is_file, METH_VARARGS | METH_KEYWORDS,
+ "return True if the entry is a file; cached per entry"
+ },
+ {"is_symlink", (PyCFunction)DirEntry_py_is_symlink, METH_NOARGS,
+ "return True if the entry is a symbolic link; cached per entry"
+ },
+ {"stat", (PyCFunction)DirEntry_stat, METH_VARARGS | METH_KEYWORDS,
+ "return stat_result object for the entry; cached per entry"
+ },
+ {"inode", (PyCFunction)DirEntry_inode, METH_NOARGS,
+ "return inode of the entry; cached per entry",
+ },
+ {NULL}
+};
+
+static PyTypeObject DirEntryType = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+ MODNAME ".DirEntry", /* tp_name */
+ sizeof(DirEntry), /* tp_basicsize */
+ 0, /* tp_itemsize */
+ /* methods */
+ (destructor)DirEntry_dealloc, /* tp_dealloc */
+ 0, /* tp_print */
+ 0, /* tp_getattr */
+ 0, /* tp_setattr */
+ 0, /* tp_compare */
+ (reprfunc)DirEntry_repr, /* tp_repr */
+ 0, /* tp_as_number */
+ 0, /* tp_as_sequence */
+ 0, /* tp_as_mapping */
+ 0, /* tp_hash */
+ 0, /* tp_call */
+ 0, /* tp_str */
+ 0, /* tp_getattro */
+ 0, /* tp_setattro */
+ 0, /* tp_as_buffer */
+ Py_TPFLAGS_DEFAULT, /* tp_flags */
+ 0, /* tp_doc */
+ 0, /* tp_traverse */
+ 0, /* tp_clear */
+ 0, /* tp_richcompare */
+ 0, /* tp_weaklistoffset */
+ 0, /* tp_iter */
+ 0, /* tp_iternext */
+ DirEntry_methods, /* tp_methods */
+#if PY_MAJOR_VERSION < 3 && defined(MS_WINDOWS)
+ NULL, /* tp_members */
+ DirEntry_getset, /* tp_getset */
+#else
+ DirEntry_members, /* tp_members */
+ NULL, /* tp_getset */
+#endif
+};
+
+#ifdef MS_WINDOWS
+
+static wchar_t *
+join_path_filenameW(wchar_t *path_wide, wchar_t* filename)
+{
+ Py_ssize_t path_len;
+ Py_ssize_t size;
+ wchar_t *result;
+ wchar_t ch;
+
+ if (!path_wide) { /* Default arg: "." */
+ path_wide = L".";
+ path_len = 1;
+ }
+ else {
+ path_len = wcslen(path_wide);
+ }
+
+ /* The +1's are for the path separator and the NUL */
+ size = path_len + 1 + wcslen(filename) + 1;
+ result = PyMem_New(wchar_t, size);
+ if (!result) {
+ PyErr_NoMemory();
+ return NULL;
+ }
+ wcscpy(result, path_wide);
+ if (path_len > 0) {
+ ch = result[path_len - 1];
+ if (ch != SEP && ch != ALTSEP && ch != L':')
+ result[path_len++] = SEP;
+ wcscpy(result + path_len, filename);
+ }
+ return result;
+}
+
+static PyObject *
+DirEntry_from_find_data(path_t *path, WIN32_FIND_DATAW *dataW)
+{
+ DirEntry *entry;
+ BY_HANDLE_FILE_INFORMATION file_info;
+ ULONG reparse_tag;
+ wchar_t *joined_path;
+
+ entry = PyObject_New(DirEntry, &DirEntryType);
+ if (!entry)
+ return NULL;
+ entry->name = NULL;
+ entry->path = NULL;
+ entry->stat = NULL;
+ entry->lstat = NULL;
+ entry->got_file_index = 0;
+#if PY_MAJOR_VERSION < 3
+ entry->name_path_bytes = path->object && PyBytes_Check(path->object);
+#endif
+
+ entry->name = PyUnicode_FromWideChar(dataW->cFileName, wcslen(dataW->cFileName));
+ if (!entry->name)
+ goto error;
+
+ joined_path = join_path_filenameW(path->wide, dataW->cFileName);
+ if (!joined_path)
+ goto error;
+
+ entry->path = PyUnicode_FromWideChar(joined_path, wcslen(joined_path));
+ PyMem_Free(joined_path);
+ if (!entry->path)
+ goto error;
+
+ find_data_to_file_info_w(dataW, &file_info, &reparse_tag);
+ _Py_attribute_data_to_stat(&file_info, reparse_tag, &entry->win32_lstat);
+
+ return (PyObject *)entry;
+
+error:
+ Py_DECREF(entry);
+ return NULL;
+}
+
+#else /* POSIX */
+
+static char *
+join_path_filename(char *path_narrow, char* filename, Py_ssize_t filename_len)
+{
+ Py_ssize_t path_len;
+ Py_ssize_t size;
+ char *result;
+
+ if (!path_narrow) { /* Default arg: "." */
+ path_narrow = ".";
+ path_len = 1;
+ }
+ else {
+ path_len = strlen(path_narrow);
+ }
+
+ if (filename_len == -1)
+ filename_len = strlen(filename);
+
+ /* The +1's are for the path separator and the NUL */
+ size = path_len + 1 + filename_len + 1;
+ result = PyMem_New(char, size);
+ if (!result) {
+ PyErr_NoMemory();
+ return NULL;
+ }
+ strcpy(result, path_narrow);
+ if (path_len > 0 && result[path_len - 1] != '/')
+ result[path_len++] = '/';
+ strcpy(result + path_len, filename);
+ return result;
+}
+
+static PyObject *
+DirEntry_from_posix_info(path_t *path, char *name, Py_ssize_t name_len,
+ ino_t d_ino
+#ifdef HAVE_DIRENT_D_TYPE
+ , unsigned char d_type
+#endif
+ )
+{
+ DirEntry *entry;
+ char *joined_path;
+
+ entry = PyObject_New(DirEntry, &DirEntryType);
+ if (!entry)
+ return NULL;
+ entry->name = NULL;
+ entry->path = NULL;
+ entry->stat = NULL;
+ entry->lstat = NULL;
+
+ joined_path = join_path_filename(path->narrow, name, name_len);
+ if (!joined_path)
+ goto error;
+
+ if (!path->narrow || !PyBytes_Check(path->object)) {
+#if PY_MAJOR_VERSION >= 3
+ entry->name = PyUnicode_DecodeFSDefaultAndSize(name, name_len);
+ entry->path = PyUnicode_DecodeFSDefault(joined_path);
+#else
+ entry->name = PyUnicode_Decode(name, name_len,
+ FS_ENCODING, "strict");
+ entry->path = PyUnicode_Decode(joined_path, strlen(joined_path),
+ FS_ENCODING, "strict");
+#endif
+ }
+ else {
+ entry->name = PyBytes_FromStringAndSize(name, name_len);
+ entry->path = PyBytes_FromString(joined_path);
+ }
+ PyMem_Free(joined_path);
+ if (!entry->name || !entry->path)
+ goto error;
+
+#ifdef HAVE_DIRENT_D_TYPE
+ entry->d_type = d_type;
+#endif
+ entry->d_ino = d_ino;
+
+ return (PyObject *)entry;
+
+error:
+ Py_XDECREF(entry);
+ return NULL;
+}
+
+#endif
+
+
+typedef struct {
+ PyObject_HEAD
+ path_t path;
+#ifdef MS_WINDOWS
+ HANDLE handle;
+ WIN32_FIND_DATAW file_data;
+ int first_time;
+#else /* POSIX */
+ DIR *dirp;
+#endif
+} ScandirIterator;
+
+#ifdef MS_WINDOWS
+
+static void
+ScandirIterator_close(ScandirIterator *iterator)
+{
+ if (iterator->handle == INVALID_HANDLE_VALUE)
+ return;
+
+ Py_BEGIN_ALLOW_THREADS
+ FindClose(iterator->handle);
+ Py_END_ALLOW_THREADS
+ iterator->handle = INVALID_HANDLE_VALUE;
+}
+
+static PyObject *
+ScandirIterator_iternext(ScandirIterator *iterator)
+{
+ WIN32_FIND_DATAW *file_data = &iterator->file_data;
+ BOOL success;
+
+ /* Happens if the iterator is iterated twice */
+ if (iterator->handle == INVALID_HANDLE_VALUE) {
+ PyErr_SetNone(PyExc_StopIteration);
+ return NULL;
+ }
+
+ while (1) {
+ if (!iterator->first_time) {
+ Py_BEGIN_ALLOW_THREADS
+ success = FindNextFileW(iterator->handle, file_data);
+ Py_END_ALLOW_THREADS
+ if (!success) {
+ if (GetLastError() != ERROR_NO_MORE_FILES)
+ return path_error(&iterator->path);
+ /* No more files found in directory, stop iterating */
+ break;
+ }
+ }
+ iterator->first_time = 0;
+
+ /* Skip over . and .. */
+ if (wcscmp(file_data->cFileName, L".") != 0 &&
+ wcscmp(file_data->cFileName, L"..") != 0)
+ return DirEntry_from_find_data(&iterator->path, file_data);
+
+ /* Loop till we get a non-dot directory or finish iterating */
+ }
+
+ ScandirIterator_close(iterator);
+
+ PyErr_SetNone(PyExc_StopIteration);
+ return NULL;
+}
+
+#else /* POSIX */
+
+static void
+ScandirIterator_close(ScandirIterator *iterator)
+{
+ if (!iterator->dirp)
+ return;
+
+ Py_BEGIN_ALLOW_THREADS
+ closedir(iterator->dirp);
+ Py_END_ALLOW_THREADS
+ iterator->dirp = NULL;
+ return;
+}
+
+static PyObject *
+ScandirIterator_iternext(ScandirIterator *iterator)
+{
+ struct dirent *direntp;
+ Py_ssize_t name_len;
+ int is_dot;
+
+ /* Happens if the iterator is iterated twice */
+ if (!iterator->dirp) {
+ PyErr_SetNone(PyExc_StopIteration);
+ return NULL;
+ }
+
+ while (1) {
+ errno = 0;
+ Py_BEGIN_ALLOW_THREADS
+ direntp = readdir(iterator->dirp);
+ Py_END_ALLOW_THREADS
+
+ if (!direntp) {
+ if (errno != 0)
+ return path_error(&iterator->path);
+ /* No more files found in directory, stop iterating */
+ break;
+ }
+
+ /* Skip over . and .. */
+ name_len = NAMLEN(direntp);
+ is_dot = direntp->d_name[0] == '.' &&
+ (name_len == 1 || (direntp->d_name[1] == '.' && name_len == 2));
+ if (!is_dot) {
+ return DirEntry_from_posix_info(&iterator->path, direntp->d_name,
+ name_len, direntp->d_ino
+#ifdef HAVE_DIRENT_D_TYPE
+ , direntp->d_type
+#endif
+ );
+ }
+
+ /* Loop till we get a non-dot directory or finish iterating */
+ }
+
+ ScandirIterator_close(iterator);
+
+ PyErr_SetNone(PyExc_StopIteration);
+ return NULL;
+}
+
+#endif
+
+static void
+ScandirIterator_dealloc(ScandirIterator *iterator)
+{
+ ScandirIterator_close(iterator);
+ Py_XDECREF(iterator->path.object);
+ path_cleanup(&iterator->path);
+ Py_TYPE(iterator)->tp_free((PyObject *)iterator);
+}
+
+static PyTypeObject ScandirIteratorType = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+ MODNAME ".ScandirIterator", /* tp_name */
+ sizeof(ScandirIterator), /* tp_basicsize */
+ 0, /* tp_itemsize */
+ /* methods */
+ (destructor)ScandirIterator_dealloc, /* tp_dealloc */
+ 0, /* tp_print */
+ 0, /* tp_getattr */
+ 0, /* tp_setattr */
+ 0, /* tp_compare */
+ 0, /* tp_repr */
+ 0, /* tp_as_number */
+ 0, /* tp_as_sequence */
+ 0, /* tp_as_mapping */
+ 0, /* tp_hash */
+ 0, /* tp_call */
+ 0, /* tp_str */
+ 0, /* tp_getattro */
+ 0, /* tp_setattro */
+ 0, /* tp_as_buffer */
+ Py_TPFLAGS_DEFAULT, /* tp_flags */
+ 0, /* tp_doc */
+ 0, /* tp_traverse */
+ 0, /* tp_clear */
+ 0, /* tp_richcompare */
+ 0, /* tp_weaklistoffset */
+ PyObject_SelfIter, /* tp_iter */
+ (iternextfunc)ScandirIterator_iternext, /* tp_iternext */
+};
+
+static PyObject *
+posix_scandir(PyObject *self, PyObject *args, PyObject *kwargs)
+{
+ ScandirIterator *iterator;
+ static char *keywords[] = {"path", NULL};
+#ifdef MS_WINDOWS
+ wchar_t *path_strW;
+#else
+ char *path;
+#endif
+
+ iterator = PyObject_New(ScandirIterator, &ScandirIteratorType);
+ if (!iterator)
+ return NULL;
+ memset(&iterator->path, 0, sizeof(path_t));
+ iterator->path.function_name = "scandir";
+ iterator->path.nullable = 1;
+
+#ifdef MS_WINDOWS
+ iterator->handle = INVALID_HANDLE_VALUE;
+#else
+ iterator->dirp = NULL;
+#endif
+
+ if (!PyArg_ParseTupleAndKeywords(args, kwargs, "|O&:scandir", keywords,
+ path_converter, &iterator->path))
+ goto error;
+
+ /* path_converter doesn't keep path.object around, so do it
+ manually for the lifetime of the iterator here (the refcount
+ is decremented in ScandirIterator_dealloc)
+ */
+ Py_XINCREF(iterator->path.object);
+
+#ifdef MS_WINDOWS
+ if (iterator->path.narrow) {
+ PyErr_SetString(PyExc_TypeError,
+ "os.scandir() doesn't support bytes path on Windows, use Unicode instead");
+ goto error;
+ }
+ iterator->first_time = 1;
+
+ path_strW = join_path_filenameW(iterator->path.wide, L"*.*");
+ if (!path_strW)
+ goto error;
+
+ Py_BEGIN_ALLOW_THREADS
+ iterator->handle = FindFirstFileW(path_strW, &iterator->file_data);
+ Py_END_ALLOW_THREADS
+
+ PyMem_Free(path_strW);
+
+ if (iterator->handle == INVALID_HANDLE_VALUE) {
+ path_error(&iterator->path);
+ goto error;
+ }
+#else /* POSIX */
+ if (iterator->path.narrow)
+ path = iterator->path.narrow;
+ else
+ path = ".";
+
+ errno = 0;
+ Py_BEGIN_ALLOW_THREADS
+ iterator->dirp = opendir(path);
+ Py_END_ALLOW_THREADS
+
+ if (!iterator->dirp) {
+ path_error(&iterator->path);
+ goto error;
+ }
+#endif
+
+ return (PyObject *)iterator;
+
+error:
+ Py_DECREF(iterator);
+ return NULL;
+}
+
+
+/* SECTION: Module and method definitions and initialization code */
+
+static PyMethodDef scandir_methods[] = {
+ {"scandir", (PyCFunction)posix_scandir,
+ METH_VARARGS | METH_KEYWORDS,
+ posix_scandir__doc__},
+ {NULL, NULL},
+};
+
+#if PY_MAJOR_VERSION >= 3
+static struct PyModuleDef moduledef = {
+ PyModuleDef_HEAD_INIT,
+ "_scandir",
+ NULL,
+ 0,
+ scandir_methods,
+ NULL,
+ NULL,
+ NULL,
+ NULL,
+};
+#endif
+
+#if PY_MAJOR_VERSION >= 3
+PyObject *
+PyInit__scandir(void)
+{
+ PyObject *module = PyModule_Create(&moduledef);
+#else
+void
+init_scandir(void)
+{
+ PyObject *module = Py_InitModule("_scandir", scandir_methods);
+#endif
+ if (module == NULL) {
+ INIT_ERROR;
+ }
+
+ billion = PyLong_FromLong(1000000000);
+ if (!billion)
+ INIT_ERROR;
+
+ stat_result_desc.fields[7].name = PyStructSequence_UnnamedField;
+ stat_result_desc.fields[8].name = PyStructSequence_UnnamedField;
+ stat_result_desc.fields[9].name = PyStructSequence_UnnamedField;
+ PyStructSequence_InitType(&StatResultType, &stat_result_desc);
+ structseq_new = StatResultType.tp_new;
+ StatResultType.tp_new = statresult_new;
+
+ if (PyType_Ready(&ScandirIteratorType) < 0)
+ INIT_ERROR;
+ if (PyType_Ready(&DirEntryType) < 0)
+ INIT_ERROR;
+
+ PyModule_AddObject(module, "DirEntry", (PyObject *)&DirEntryType);
+
+#if PY_MAJOR_VERSION >= 3
+ return module;
+#endif
+}
diff --git a/contrib/deprecated/python/scandir/osdefs.h b/contrib/deprecated/python/scandir/osdefs.h
new file mode 100644
index 0000000000..d678ca3b4d
--- /dev/null
+++ b/contrib/deprecated/python/scandir/osdefs.h
@@ -0,0 +1,48 @@
+// from CPython
+#ifndef Py_OSDEFS_H
+#define Py_OSDEFS_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+/* Operating system dependencies */
+
+#ifdef MS_WINDOWS
+#define SEP L'\\'
+#define ALTSEP L'/'
+#define MAXPATHLEN 256
+#define DELIM L';'
+#endif
+
+/* Filename separator */
+#ifndef SEP
+#define SEP L'/'
+#endif
+
+/* Max pathname length */
+#ifdef __hpux
+#include <sys/param.h>
+#include <limits.h>
+#ifndef PATH_MAX
+#define PATH_MAX MAXPATHLEN
+#endif
+#endif
+
+#ifndef MAXPATHLEN
+#if defined(PATH_MAX) && PATH_MAX > 1024
+#define MAXPATHLEN PATH_MAX
+#else
+#define MAXPATHLEN 1024
+#endif
+#endif
+
+/* Search path entry delimiter */
+#ifndef DELIM
+#define DELIM L':'
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_OSDEFS_H */
diff --git a/contrib/deprecated/python/scandir/scandir.py b/contrib/deprecated/python/scandir/scandir.py
new file mode 100644
index 0000000000..c565b23f89
--- /dev/null
+++ b/contrib/deprecated/python/scandir/scandir.py
@@ -0,0 +1,693 @@
+"""scandir, a better directory iterator and faster os.walk(), now in the Python 3.5 stdlib
+
+scandir() is a generator version of os.listdir() that returns an
+iterator over files in a directory, and also exposes the extra
+information most OSes provide while iterating files in a directory
+(such as type and stat information).
+
+This module also includes a version of os.walk() that uses scandir()
+to speed it up significantly.
+
+See README.md or https://github.com/benhoyt/scandir for rationale and
+docs, or read PEP 471 (https://www.python.org/dev/peps/pep-0471/) for
+more details on its inclusion into Python 3.5
+
+scandir is released under the new BSD 3-clause license. See
+LICENSE.txt for the full license text.
+"""
+
+from __future__ import division
+
+from errno import ENOENT
+from os import listdir, lstat, stat, strerror
+from os.path import join, islink
+from stat import S_IFDIR, S_IFLNK, S_IFREG
+import collections
+import sys
+
+try:
+ import _scandir
+except ImportError:
+ _scandir = None
+
+try:
+ import ctypes
+except ImportError:
+ ctypes = None
+
+if _scandir is None and ctypes is None:
+ import warnings
+ warnings.warn("scandir can't find the compiled _scandir C module "
+ "or ctypes, using slow generic fallback")
+
+__version__ = '1.10.0'
+__all__ = ['scandir', 'walk']
+
+# Windows FILE_ATTRIBUTE constants for interpreting the
+# FIND_DATA.dwFileAttributes member
+FILE_ATTRIBUTE_ARCHIVE = 32
+FILE_ATTRIBUTE_COMPRESSED = 2048
+FILE_ATTRIBUTE_DEVICE = 64
+FILE_ATTRIBUTE_DIRECTORY = 16
+FILE_ATTRIBUTE_ENCRYPTED = 16384
+FILE_ATTRIBUTE_HIDDEN = 2
+FILE_ATTRIBUTE_INTEGRITY_STREAM = 32768
+FILE_ATTRIBUTE_NORMAL = 128
+FILE_ATTRIBUTE_NOT_CONTENT_INDEXED = 8192
+FILE_ATTRIBUTE_NO_SCRUB_DATA = 131072
+FILE_ATTRIBUTE_OFFLINE = 4096
+FILE_ATTRIBUTE_READONLY = 1
+FILE_ATTRIBUTE_REPARSE_POINT = 1024
+FILE_ATTRIBUTE_SPARSE_FILE = 512
+FILE_ATTRIBUTE_SYSTEM = 4
+FILE_ATTRIBUTE_TEMPORARY = 256
+FILE_ATTRIBUTE_VIRTUAL = 65536
+
+IS_PY3 = sys.version_info >= (3, 0)
+
+if IS_PY3:
+ unicode = str # Because Python <= 3.2 doesn't have u'unicode' syntax
+
+
+class GenericDirEntry(object):
+ __slots__ = ('name', '_stat', '_lstat', '_scandir_path', '_path')
+
+ def __init__(self, scandir_path, name):
+ self._scandir_path = scandir_path
+ self.name = name
+ self._stat = None
+ self._lstat = None
+ self._path = None
+
+ @property
+ def path(self):
+ if self._path is None:
+ self._path = join(self._scandir_path, self.name)
+ return self._path
+
+ def stat(self, follow_symlinks=True):
+ if follow_symlinks:
+ if self._stat is None:
+ self._stat = stat(self.path)
+ return self._stat
+ else:
+ if self._lstat is None:
+ self._lstat = lstat(self.path)
+ return self._lstat
+
+ # The code duplication below is intentional: this is for slightly
+ # better performance on systems that fall back to GenericDirEntry.
+ # It avoids an additional attribute lookup and method call, which
+ # are relatively slow on CPython.
+ def is_dir(self, follow_symlinks=True):
+ try:
+ st = self.stat(follow_symlinks=follow_symlinks)
+ except OSError as e:
+ if e.errno != ENOENT:
+ raise
+ return False # Path doesn't exist or is a broken symlink
+ return st.st_mode & 0o170000 == S_IFDIR
+
+ def is_file(self, follow_symlinks=True):
+ try:
+ st = self.stat(follow_symlinks=follow_symlinks)
+ except OSError as e:
+ if e.errno != ENOENT:
+ raise
+ return False # Path doesn't exist or is a broken symlink
+ return st.st_mode & 0o170000 == S_IFREG
+
+ def is_symlink(self):
+ try:
+ st = self.stat(follow_symlinks=False)
+ except OSError as e:
+ if e.errno != ENOENT:
+ raise
+ return False # Path doesn't exist or is a broken symlink
+ return st.st_mode & 0o170000 == S_IFLNK
+
+ def inode(self):
+ st = self.stat(follow_symlinks=False)
+ return st.st_ino
+
+ def __str__(self):
+ return '<{0}: {1!r}>'.format(self.__class__.__name__, self.name)
+
+ __repr__ = __str__
+
+
+def _scandir_generic(path=unicode('.')):
+ """Like os.listdir(), but yield DirEntry objects instead of returning
+ a list of names.
+ """
+ for name in listdir(path):
+ yield GenericDirEntry(path, name)
+
+
+if IS_PY3 and sys.platform == 'win32':
+ def scandir_generic(path=unicode('.')):
+ if isinstance(path, bytes):
+ raise TypeError("os.scandir() doesn't support bytes path on Windows, use Unicode instead")
+ return _scandir_generic(path)
+ scandir_generic.__doc__ = _scandir_generic.__doc__
+else:
+ scandir_generic = _scandir_generic
+
+
+scandir_c = None
+scandir_python = None
+
+
+if sys.platform == 'win32':
+ if ctypes is not None:
+ from ctypes import wintypes
+
+ # Various constants from windows.h
+ INVALID_HANDLE_VALUE = ctypes.c_void_p(-1).value
+ ERROR_FILE_NOT_FOUND = 2
+ ERROR_NO_MORE_FILES = 18
+ IO_REPARSE_TAG_SYMLINK = 0xA000000C
+
+ # Numer of seconds between 1601-01-01 and 1970-01-01
+ SECONDS_BETWEEN_EPOCHS = 11644473600
+
+ kernel32 = ctypes.windll.kernel32
+
+ # ctypes wrappers for (wide string versions of) FindFirstFile,
+ # FindNextFile, and FindClose
+ FindFirstFile = kernel32.FindFirstFileW
+ FindFirstFile.argtypes = [
+ wintypes.LPCWSTR,
+ ctypes.POINTER(wintypes.WIN32_FIND_DATAW),
+ ]
+ FindFirstFile.restype = wintypes.HANDLE
+
+ FindNextFile = kernel32.FindNextFileW
+ FindNextFile.argtypes = [
+ wintypes.HANDLE,
+ ctypes.POINTER(wintypes.WIN32_FIND_DATAW),
+ ]
+ FindNextFile.restype = wintypes.BOOL
+
+ FindClose = kernel32.FindClose
+ FindClose.argtypes = [wintypes.HANDLE]
+ FindClose.restype = wintypes.BOOL
+
+ Win32StatResult = collections.namedtuple('Win32StatResult', [
+ 'st_mode',
+ 'st_ino',
+ 'st_dev',
+ 'st_nlink',
+ 'st_uid',
+ 'st_gid',
+ 'st_size',
+ 'st_atime',
+ 'st_mtime',
+ 'st_ctime',
+ 'st_atime_ns',
+ 'st_mtime_ns',
+ 'st_ctime_ns',
+ 'st_file_attributes',
+ ])
+
+ def filetime_to_time(filetime):
+ """Convert Win32 FILETIME to time since Unix epoch in seconds."""
+ total = filetime.dwHighDateTime << 32 | filetime.dwLowDateTime
+ return total / 10000000 - SECONDS_BETWEEN_EPOCHS
+
+ def find_data_to_stat(data):
+ """Convert Win32 FIND_DATA struct to stat_result."""
+ # First convert Win32 dwFileAttributes to st_mode
+ attributes = data.dwFileAttributes
+ st_mode = 0
+ if attributes & FILE_ATTRIBUTE_DIRECTORY:
+ st_mode |= S_IFDIR | 0o111
+ else:
+ st_mode |= S_IFREG
+ if attributes & FILE_ATTRIBUTE_READONLY:
+ st_mode |= 0o444
+ else:
+ st_mode |= 0o666
+ if (attributes & FILE_ATTRIBUTE_REPARSE_POINT and
+ data.dwReserved0 == IO_REPARSE_TAG_SYMLINK):
+ st_mode ^= st_mode & 0o170000
+ st_mode |= S_IFLNK
+
+ st_size = data.nFileSizeHigh << 32 | data.nFileSizeLow
+ st_atime = filetime_to_time(data.ftLastAccessTime)
+ st_mtime = filetime_to_time(data.ftLastWriteTime)
+ st_ctime = filetime_to_time(data.ftCreationTime)
+
+ # Some fields set to zero per CPython's posixmodule.c: st_ino, st_dev,
+ # st_nlink, st_uid, st_gid
+ return Win32StatResult(st_mode, 0, 0, 0, 0, 0, st_size,
+ st_atime, st_mtime, st_ctime,
+ int(st_atime * 1000000000),
+ int(st_mtime * 1000000000),
+ int(st_ctime * 1000000000),
+ attributes)
+
+ class Win32DirEntryPython(object):
+ __slots__ = ('name', '_stat', '_lstat', '_find_data', '_scandir_path', '_path', '_inode')
+
+ def __init__(self, scandir_path, name, find_data):
+ self._scandir_path = scandir_path
+ self.name = name
+ self._stat = None
+ self._lstat = None
+ self._find_data = find_data
+ self._path = None
+ self._inode = None
+
+ @property
+ def path(self):
+ if self._path is None:
+ self._path = join(self._scandir_path, self.name)
+ return self._path
+
+ def stat(self, follow_symlinks=True):
+ if follow_symlinks:
+ if self._stat is None:
+ if self.is_symlink():
+ # It's a symlink, call link-following stat()
+ self._stat = stat(self.path)
+ else:
+ # Not a symlink, stat is same as lstat value
+ if self._lstat is None:
+ self._lstat = find_data_to_stat(self._find_data)
+ self._stat = self._lstat
+ return self._stat
+ else:
+ if self._lstat is None:
+ # Lazily convert to stat object, because it's slow
+ # in Python, and often we only need is_dir() etc
+ self._lstat = find_data_to_stat(self._find_data)
+ return self._lstat
+
+ def is_dir(self, follow_symlinks=True):
+ is_symlink = self.is_symlink()
+ if follow_symlinks and is_symlink:
+ try:
+ return self.stat().st_mode & 0o170000 == S_IFDIR
+ except OSError as e:
+ if e.errno != ENOENT:
+ raise
+ return False
+ elif is_symlink:
+ return False
+ else:
+ return (self._find_data.dwFileAttributes &
+ FILE_ATTRIBUTE_DIRECTORY != 0)
+
+ def is_file(self, follow_symlinks=True):
+ is_symlink = self.is_symlink()
+ if follow_symlinks and is_symlink:
+ try:
+ return self.stat().st_mode & 0o170000 == S_IFREG
+ except OSError as e:
+ if e.errno != ENOENT:
+ raise
+ return False
+ elif is_symlink:
+ return False
+ else:
+ return (self._find_data.dwFileAttributes &
+ FILE_ATTRIBUTE_DIRECTORY == 0)
+
+ def is_symlink(self):
+ return (self._find_data.dwFileAttributes &
+ FILE_ATTRIBUTE_REPARSE_POINT != 0 and
+ self._find_data.dwReserved0 == IO_REPARSE_TAG_SYMLINK)
+
+ def inode(self):
+ if self._inode is None:
+ self._inode = lstat(self.path).st_ino
+ return self._inode
+
+ def __str__(self):
+ return '<{0}: {1!r}>'.format(self.__class__.__name__, self.name)
+
+ __repr__ = __str__
+
+ def win_error(error, filename):
+ exc = WindowsError(error, ctypes.FormatError(error))
+ exc.filename = filename
+ return exc
+
+ def _scandir_python(path=unicode('.')):
+ """Like os.listdir(), but yield DirEntry objects instead of returning
+ a list of names.
+ """
+ # Call FindFirstFile and handle errors
+ if isinstance(path, bytes):
+ is_bytes = True
+ filename = join(path.decode('mbcs', 'strict'), '*.*')
+ else:
+ is_bytes = False
+ filename = join(path, '*.*')
+ data = wintypes.WIN32_FIND_DATAW()
+ data_p = ctypes.byref(data)
+ handle = FindFirstFile(filename, data_p)
+ if handle == INVALID_HANDLE_VALUE:
+ error = ctypes.GetLastError()
+ if error == ERROR_FILE_NOT_FOUND:
+ # No files, don't yield anything
+ return
+ raise win_error(error, path)
+
+ # Call FindNextFile in a loop, stopping when no more files
+ try:
+ while True:
+ # Skip '.' and '..' (current and parent directory), but
+ # otherwise yield (filename, stat_result) tuple
+ name = data.cFileName
+ if name not in ('.', '..'):
+ if is_bytes:
+ name = name.encode('mbcs', 'replace')
+ yield Win32DirEntryPython(path, name, data)
+
+ data = wintypes.WIN32_FIND_DATAW()
+ data_p = ctypes.byref(data)
+ success = FindNextFile(handle, data_p)
+ if not success:
+ error = ctypes.GetLastError()
+ if error == ERROR_NO_MORE_FILES:
+ break
+ raise win_error(error, path)
+ finally:
+ if not FindClose(handle):
+ raise win_error(ctypes.GetLastError(), path)
+
+ if IS_PY3:
+ def scandir_python(path=unicode('.')):
+ if isinstance(path, bytes):
+ raise TypeError("os.scandir() doesn't support bytes path on Windows, use Unicode instead")
+ return _scandir_python(path)
+ scandir_python.__doc__ = _scandir_python.__doc__
+ else:
+ scandir_python = _scandir_python
+
+ if _scandir is not None:
+ scandir_c = _scandir.scandir
+ DirEntry_c = _scandir.DirEntry
+
+ if _scandir is not None:
+ scandir = scandir_c
+ DirEntry = DirEntry_c
+ elif ctypes is not None:
+ scandir = scandir_python
+ DirEntry = Win32DirEntryPython
+ else:
+ scandir = scandir_generic
+ DirEntry = GenericDirEntry
+
+
+# Linux, OS X, and BSD implementation
+elif sys.platform.startswith(('linux', 'darwin', 'sunos5')) or 'bsd' in sys.platform:
+ have_dirent_d_type = (sys.platform != 'sunos5')
+
+ if ctypes is not None and have_dirent_d_type:
+ import ctypes.util
+
+ DIR_p = ctypes.c_void_p
+
+ # Rather annoying how the dirent struct is slightly different on each
+ # platform. The only fields we care about are d_name and d_type.
+ class Dirent(ctypes.Structure):
+ if sys.platform.startswith('linux'):
+ _fields_ = (
+ ('d_ino', ctypes.c_ulong),
+ ('d_off', ctypes.c_long),
+ ('d_reclen', ctypes.c_ushort),
+ ('d_type', ctypes.c_byte),
+ ('d_name', ctypes.c_char * 256),
+ )
+ elif 'openbsd' in sys.platform:
+ _fields_ = (
+ ('d_ino', ctypes.c_uint64),
+ ('d_off', ctypes.c_uint64),
+ ('d_reclen', ctypes.c_uint16),
+ ('d_type', ctypes.c_uint8),
+ ('d_namlen', ctypes.c_uint8),
+ ('__d_padding', ctypes.c_uint8 * 4),
+ ('d_name', ctypes.c_char * 256),
+ )
+ else:
+ _fields_ = (
+ ('d_ino', ctypes.c_uint32), # must be uint32, not ulong
+ ('d_reclen', ctypes.c_ushort),
+ ('d_type', ctypes.c_byte),
+ ('d_namlen', ctypes.c_byte),
+ ('d_name', ctypes.c_char * 256),
+ )
+
+ DT_UNKNOWN = 0
+ DT_DIR = 4
+ DT_REG = 8
+ DT_LNK = 10
+
+ Dirent_p = ctypes.POINTER(Dirent)
+ Dirent_pp = ctypes.POINTER(Dirent_p)
+
+ libc = ctypes.CDLL(ctypes.util.find_library('c'), use_errno=True)
+ opendir = libc.opendir
+ opendir.argtypes = [ctypes.c_char_p]
+ opendir.restype = DIR_p
+
+ readdir_r = libc.readdir_r
+ readdir_r.argtypes = [DIR_p, Dirent_p, Dirent_pp]
+ readdir_r.restype = ctypes.c_int
+
+ closedir = libc.closedir
+ closedir.argtypes = [DIR_p]
+ closedir.restype = ctypes.c_int
+
+ file_system_encoding = sys.getfilesystemencoding()
+
+ class PosixDirEntry(object):
+ __slots__ = ('name', '_d_type', '_stat', '_lstat', '_scandir_path', '_path', '_inode')
+
+ def __init__(self, scandir_path, name, d_type, inode):
+ self._scandir_path = scandir_path
+ self.name = name
+ self._d_type = d_type
+ self._inode = inode
+ self._stat = None
+ self._lstat = None
+ self._path = None
+
+ @property
+ def path(self):
+ if self._path is None:
+ self._path = join(self._scandir_path, self.name)
+ return self._path
+
+ def stat(self, follow_symlinks=True):
+ if follow_symlinks:
+ if self._stat is None:
+ if self.is_symlink():
+ self._stat = stat(self.path)
+ else:
+ if self._lstat is None:
+ self._lstat = lstat(self.path)
+ self._stat = self._lstat
+ return self._stat
+ else:
+ if self._lstat is None:
+ self._lstat = lstat(self.path)
+ return self._lstat
+
+ def is_dir(self, follow_symlinks=True):
+ if (self._d_type == DT_UNKNOWN or
+ (follow_symlinks and self.is_symlink())):
+ try:
+ st = self.stat(follow_symlinks=follow_symlinks)
+ except OSError as e:
+ if e.errno != ENOENT:
+ raise
+ return False
+ return st.st_mode & 0o170000 == S_IFDIR
+ else:
+ return self._d_type == DT_DIR
+
+ def is_file(self, follow_symlinks=True):
+ if (self._d_type == DT_UNKNOWN or
+ (follow_symlinks and self.is_symlink())):
+ try:
+ st = self.stat(follow_symlinks=follow_symlinks)
+ except OSError as e:
+ if e.errno != ENOENT:
+ raise
+ return False
+ return st.st_mode & 0o170000 == S_IFREG
+ else:
+ return self._d_type == DT_REG
+
+ def is_symlink(self):
+ if self._d_type == DT_UNKNOWN:
+ try:
+ st = self.stat(follow_symlinks=False)
+ except OSError as e:
+ if e.errno != ENOENT:
+ raise
+ return False
+ return st.st_mode & 0o170000 == S_IFLNK
+ else:
+ return self._d_type == DT_LNK
+
+ def inode(self):
+ return self._inode
+
+ def __str__(self):
+ return '<{0}: {1!r}>'.format(self.__class__.__name__, self.name)
+
+ __repr__ = __str__
+
+ def posix_error(filename):
+ errno = ctypes.get_errno()
+ exc = OSError(errno, strerror(errno))
+ exc.filename = filename
+ return exc
+
+ def scandir_python(path=unicode('.')):
+ """Like os.listdir(), but yield DirEntry objects instead of returning
+ a list of names.
+ """
+ if isinstance(path, bytes):
+ opendir_path = path
+ is_bytes = True
+ else:
+ opendir_path = path.encode(file_system_encoding)
+ is_bytes = False
+ dir_p = opendir(opendir_path)
+ if not dir_p:
+ raise posix_error(path)
+ try:
+ result = Dirent_p()
+ while True:
+ entry = Dirent()
+ if readdir_r(dir_p, entry, result):
+ raise posix_error(path)
+ if not result:
+ break
+ name = entry.d_name
+ if name not in (b'.', b'..'):
+ if not is_bytes:
+ name = name.decode(file_system_encoding)
+ yield PosixDirEntry(path, name, entry.d_type, entry.d_ino)
+ finally:
+ if closedir(dir_p):
+ raise posix_error(path)
+
+ if _scandir is not None:
+ scandir_c = _scandir.scandir
+ DirEntry_c = _scandir.DirEntry
+
+ if _scandir is not None:
+ scandir = scandir_c
+ DirEntry = DirEntry_c
+ elif ctypes is not None and have_dirent_d_type:
+ scandir = scandir_python
+ DirEntry = PosixDirEntry
+ else:
+ scandir = scandir_generic
+ DirEntry = GenericDirEntry
+
+
+# Some other system -- no d_type or stat information
+else:
+ scandir = scandir_generic
+ DirEntry = GenericDirEntry
+
+
+def _walk(top, topdown=True, onerror=None, followlinks=False):
+ """Like Python 3.5's implementation of os.walk() -- faster than
+ the pre-Python 3.5 version as it uses scandir() internally.
+ """
+ dirs = []
+ nondirs = []
+
+ # We may not have read permission for top, in which case we can't
+ # get a list of the files the directory contains. os.walk
+ # always suppressed the exception then, rather than blow up for a
+ # minor reason when (say) a thousand readable directories are still
+ # left to visit. That logic is copied here.
+ try:
+ scandir_it = scandir(top)
+ except OSError as error:
+ if onerror is not None:
+ onerror(error)
+ return
+
+ while True:
+ try:
+ try:
+ entry = next(scandir_it)
+ except StopIteration:
+ break
+ except OSError as error:
+ if onerror is not None:
+ onerror(error)
+ return
+
+ try:
+ is_dir = entry.is_dir()
+ except OSError:
+ # If is_dir() raises an OSError, consider that the entry is not
+ # a directory, same behaviour than os.path.isdir().
+ is_dir = False
+
+ if is_dir:
+ dirs.append(entry.name)
+ else:
+ nondirs.append(entry.name)
+
+ if not topdown and is_dir:
+ # Bottom-up: recurse into sub-directory, but exclude symlinks to
+ # directories if followlinks is False
+ if followlinks:
+ walk_into = True
+ else:
+ try:
+ is_symlink = entry.is_symlink()
+ except OSError:
+ # If is_symlink() raises an OSError, consider that the
+ # entry is not a symbolic link, same behaviour than
+ # os.path.islink().
+ is_symlink = False
+ walk_into = not is_symlink
+
+ if walk_into:
+ for entry in walk(entry.path, topdown, onerror, followlinks):
+ yield entry
+
+ # Yield before recursion if going top down
+ if topdown:
+ yield top, dirs, nondirs
+
+ # Recurse into sub-directories
+ for name in dirs:
+ new_path = join(top, name)
+ # Issue #23605: os.path.islink() is used instead of caching
+ # entry.is_symlink() result during the loop on os.scandir() because
+ # the caller can replace the directory entry during the "yield"
+ # above.
+ if followlinks or not islink(new_path):
+ for entry in walk(new_path, topdown, onerror, followlinks):
+ yield entry
+ else:
+ # Yield after recursion if going bottom up
+ yield top, dirs, nondirs
+
+
+if IS_PY3 or sys.platform != 'win32':
+ walk = _walk
+else:
+ # Fix for broken unicode handling on Windows on Python 2.x, see:
+ # https://github.com/benhoyt/scandir/issues/54
+ file_system_encoding = sys.getfilesystemencoding()
+
+ def walk(top, topdown=True, onerror=None, followlinks=False):
+ if isinstance(top, bytes):
+ top = top.decode(file_system_encoding)
+ return _walk(top, topdown, onerror, followlinks)
diff --git a/contrib/deprecated/python/scandir/tests/test_scandir.py b/contrib/deprecated/python/scandir/tests/test_scandir.py
new file mode 100644
index 0000000000..11f7e09e69
--- /dev/null
+++ b/contrib/deprecated/python/scandir/tests/test_scandir.py
@@ -0,0 +1,336 @@
+"""Tests for scandir.scandir()."""
+
+from __future__ import unicode_literals
+
+import os
+import shutil
+import sys
+import time
+import unittest
+
+import yatest.common
+
+try:
+ import scandir
+ has_scandir = True
+except ImportError:
+ has_scandir = False
+
+FILE_ATTRIBUTE_DIRECTORY = 16
+
+IS_PY3 = sys.version_info >= (3, 0)
+
+if IS_PY3:
+ int_types = int
+else:
+ int_types = (int, long)
+ str = unicode
+
+
+if hasattr(os, 'symlink'):
+ try:
+ #link_name = os.path.join(os.path.dirname(__file__), '_testlink')
+ #os.symlink(__file__, link_name)
+ #os.remove(link_name)
+ symlinks_supported = True
+ except NotImplementedError:
+ # Windows versions before Vista don't support symbolic links
+ symlinks_supported = False
+else:
+ symlinks_supported = False
+
+
+def create_file(path, contents='1234'):
+ with open(path, 'w') as f:
+ f.write(contents)
+
+
+def setup_main():
+ join = os.path.join
+
+ os.mkdir(TEST_PATH)
+ os.mkdir(join(TEST_PATH, 'subdir'))
+ create_file(join(TEST_PATH, 'file1.txt'))
+ create_file(join(TEST_PATH, 'file2.txt'), contents='12345678')
+
+ os.mkdir(join(TEST_PATH, 'subdir', 'unidir\u018F'))
+ create_file(join(TEST_PATH, 'subdir', 'file1.txt'))
+ create_file(join(TEST_PATH, 'subdir', 'unicod\u018F.txt'))
+
+ create_file(join(TEST_PATH, 'subdir', 'unidir\u018F', 'file1.txt'))
+
+ os.mkdir(join(TEST_PATH, 'linkdir'))
+
+
+def setup_symlinks():
+ join = os.path.join
+
+ os.mkdir(join(TEST_PATH, 'linkdir', 'linksubdir'))
+ create_file(join(TEST_PATH, 'linkdir', 'file1.txt'))
+
+ os.symlink(os.path.abspath(join(TEST_PATH, 'linkdir', 'file1.txt')),
+ join(TEST_PATH, 'linkdir', 'link_to_file'))
+
+ dir_name = os.path.abspath(join(TEST_PATH, 'linkdir', 'linksubdir'))
+ dir_link = join(TEST_PATH, 'linkdir', 'link_to_dir')
+ if IS_PY3:
+ os.symlink(dir_name, dir_link, target_is_directory=True)
+ else:
+ os.symlink(dir_name, dir_link)
+
+
+def teardown():
+ try:
+ shutil.rmtree(TEST_PATH)
+ except OSError:
+ # why does the above fail sometimes?
+ time.sleep(0.1)
+ shutil.rmtree(TEST_PATH)
+
+
+class TestMixin(unittest.TestCase):
+ def setUp(self):
+ global TEST_PATH
+ TEST_PATH = yatest.common.test_output_path('../test')
+ if not os.path.exists(TEST_PATH):
+ setup_main()
+ if symlinks_supported and not os.path.exists(
+ os.path.join(TEST_PATH, 'linkdir', 'linksubdir')):
+ setup_symlinks()
+
+ if not hasattr(unittest.TestCase, 'skipTest'):
+ def skipTest(self, reason):
+ sys.stdout.write('skipped {0!r} '.format(reason))
+
+ def test_basic(self):
+ if not hasattr(self, 'scandir_func'):
+ self.skipTest('skip mixin')
+ entries = sorted(self.scandir_func(TEST_PATH), key=lambda e: e.name)
+ self.assertEqual([(e.name, e.is_dir()) for e in entries],
+ [('file1.txt', False), ('file2.txt', False),
+ ('linkdir', True), ('subdir', True)])
+ self.assertEqual([e.path for e in entries],
+ [os.path.join(TEST_PATH, e.name) for e in entries])
+
+ def test_dir_entry(self):
+ if not hasattr(self, 'scandir_func'):
+ self.skipTest('skip mixin')
+ entries = dict((e.name, e) for e in self.scandir_func(TEST_PATH))
+ e = entries['file1.txt']
+ self.assertEqual([e.is_dir(), e.is_file(), e.is_symlink()], [False, True, False])
+ e = entries['file2.txt']
+ self.assertEqual([e.is_dir(), e.is_file(), e.is_symlink()], [False, True, False])
+ e = entries['subdir']
+ self.assertEqual([e.is_dir(), e.is_file(), e.is_symlink()], [True, False, False])
+
+ self.assertEqual(entries['file1.txt'].stat().st_size, 4)
+ self.assertEqual(entries['file2.txt'].stat().st_size, 8)
+
+ def test_stat(self):
+ if not hasattr(self, 'scandir_func'):
+ self.skipTest('skip mixin')
+ entries = list(self.scandir_func(TEST_PATH))
+ for entry in entries:
+ os_stat = os.stat(os.path.join(TEST_PATH, entry.name))
+ scandir_stat = entry.stat()
+ self.assertEqual(os_stat.st_mode, scandir_stat.st_mode)
+ # TODO: be nice to figure out why these aren't identical on Windows and on PyPy
+ # * Windows: they seem to be a few microseconds to tens of seconds out
+ # * PyPy: for some reason os_stat's times are nanosecond, scandir's are not
+ self.assertAlmostEqual(os_stat.st_mtime, scandir_stat.st_mtime, delta=1)
+ self.assertAlmostEqual(os_stat.st_ctime, scandir_stat.st_ctime, delta=1)
+ if entry.is_file():
+ self.assertEqual(os_stat.st_size, scandir_stat.st_size)
+
+ def test_returns_iter(self):
+ if not hasattr(self, 'scandir_func'):
+ self.skipTest('skip mixin')
+ it = self.scandir_func(TEST_PATH)
+ entry = next(it)
+ assert hasattr(entry, 'name')
+
+ def check_file_attributes(self, result):
+ self.assertTrue(hasattr(result, 'st_file_attributes'))
+ self.assertTrue(isinstance(result.st_file_attributes, int_types))
+ self.assertTrue(0 <= result.st_file_attributes <= 0xFFFFFFFF)
+
+ def test_file_attributes(self):
+ if not hasattr(self, 'scandir_func'):
+ self.skipTest('skip mixin')
+ if sys.platform != 'win32' or not self.has_file_attributes:
+ # st_file_attributes is Win32 specific
+ return self.skipTest('st_file_attributes not supported')
+
+ entries = dict((e.name, e) for e in self.scandir_func(TEST_PATH))
+
+ # test st_file_attributes on a file (FILE_ATTRIBUTE_DIRECTORY not set)
+ result = entries['file1.txt'].stat()
+ self.check_file_attributes(result)
+ self.assertEqual(result.st_file_attributes & FILE_ATTRIBUTE_DIRECTORY, 0)
+
+ # test st_file_attributes on a directory (FILE_ATTRIBUTE_DIRECTORY set)
+ result = entries['subdir'].stat()
+ self.check_file_attributes(result)
+ self.assertEqual(result.st_file_attributes & FILE_ATTRIBUTE_DIRECTORY,
+ FILE_ATTRIBUTE_DIRECTORY)
+
+ def test_path(self):
+ if not hasattr(self, 'scandir_func'):
+ self.skipTest('skip mixin')
+ entries = sorted(self.scandir_func(TEST_PATH), key=lambda e: e.name)
+ self.assertEqual([os.path.basename(e.name) for e in entries],
+ ['file1.txt', 'file2.txt', 'linkdir', 'subdir'])
+ self.assertEqual([os.path.normpath(os.path.join(TEST_PATH, e.name)) for e in entries],
+ [os.path.normpath(e.path) for e in entries])
+
+ def test_symlink(self):
+ if not hasattr(self, 'scandir_func'):
+ self.skipTest('skip mixin')
+ if not symlinks_supported:
+ return self.skipTest('symbolic links not supported')
+
+ entries = sorted(self.scandir_func(os.path.join(TEST_PATH, 'linkdir')),
+ key=lambda e: e.name)
+
+ self.assertEqual([(e.name, e.is_symlink()) for e in entries],
+ [('file1.txt', False),
+ ('link_to_dir', True),
+ ('link_to_file', True),
+ ('linksubdir', False)])
+
+ self.assertEqual([(e.name, e.is_file(), e.is_file(follow_symlinks=False))
+ for e in entries],
+ [('file1.txt', True, True),
+ ('link_to_dir', False, False),
+ ('link_to_file', True, False),
+ ('linksubdir', False, False)])
+
+ self.assertEqual([(e.name, e.is_dir(), e.is_dir(follow_symlinks=False))
+ for e in entries],
+ [('file1.txt', False, False),
+ ('link_to_dir', True, False),
+ ('link_to_file', False, False),
+ ('linksubdir', True, True)])
+
+ def test_bytes(self):
+ if not hasattr(self, 'scandir_func'):
+ self.skipTest('skip mixin')
+ # Check that unicode filenames are returned correctly as bytes in output
+ path = os.path.join(TEST_PATH, 'subdir').encode(sys.getfilesystemencoding(), 'replace')
+ self.assertTrue(isinstance(path, bytes))
+
+ # Python 3.6 on Windows fixes the bytes filename thing by using UTF-8
+ if IS_PY3 and sys.platform == 'win32':
+ if not (sys.version_info >= (3, 6) and self.scandir_func == os.scandir):
+ self.assertRaises(TypeError, self.scandir_func, path)
+ return
+
+ entries = [e for e in self.scandir_func(path) if e.name.startswith(b'unicod')]
+ self.assertEqual(len(entries), 1)
+ entry = entries[0]
+
+ self.assertTrue(isinstance(entry.name, bytes))
+ self.assertTrue(isinstance(entry.path, bytes))
+
+ # b'unicod?.txt' on Windows, b'unicod\xc6\x8f.txt' (UTF-8) or similar on POSIX
+ entry_name = 'unicod\u018f.txt'.encode(sys.getfilesystemencoding(), 'replace')
+ self.assertEqual(entry.name, entry_name)
+ self.assertEqual(entry.path, os.path.join(path, entry_name))
+
+ def test_unicode(self):
+ if not hasattr(self, 'scandir_func'):
+ self.skipTest('skip mixin')
+ # Check that unicode filenames are returned correctly as (unicode) str in output
+ path = os.path.join(TEST_PATH, 'subdir')
+ if not IS_PY3:
+ path = path.decode(sys.getfilesystemencoding(), 'replace')
+ self.assertTrue(isinstance(path, str))
+ entries = [e for e in self.scandir_func(path) if e.name.startswith('unicod')]
+ self.assertEqual(len(entries), 1)
+ entry = entries[0]
+
+ self.assertTrue(isinstance(entry.name, str))
+ self.assertTrue(isinstance(entry.path, str))
+
+ entry_name = 'unicod\u018f.txt'
+ self.assertEqual(entry.name, entry_name)
+ self.assertEqual(entry.path, os.path.join(path, 'unicod\u018f.txt'))
+
+ # Check that it handles unicode input properly
+ path = os.path.join(TEST_PATH, 'subdir', 'unidir\u018f')
+ self.assertTrue(isinstance(path, str))
+ entries = list(self.scandir_func(path))
+ self.assertEqual(len(entries), 1)
+ entry = entries[0]
+
+ self.assertTrue(isinstance(entry.name, str))
+ self.assertTrue(isinstance(entry.path, str))
+ self.assertEqual(entry.name, 'file1.txt')
+ self.assertEqual(entry.path, os.path.join(path, 'file1.txt'))
+
+ def test_walk_unicode_handling(self):
+ if not hasattr(self, 'scandir_func'):
+ self.skipTest('skip mixin')
+ encoding = sys.getfilesystemencoding()
+ dirname_unicode = u'test_unicode_dir'
+ dirname_bytes = dirname_unicode.encode(encoding)
+ dirpath = os.path.join(TEST_PATH.encode(encoding), dirname_bytes)
+ try:
+ os.makedirs(dirpath)
+
+ if sys.platform != 'win32':
+ # test bytes
+ self.assertTrue(isinstance(dirpath, bytes))
+ for (path, dirs, files) in scandir.walk(dirpath):
+ self.assertTrue(isinstance(path, bytes))
+
+ # test unicode
+ text_type = str if IS_PY3 else unicode
+ dirpath_unicode = text_type(dirpath, encoding)
+ self.assertTrue(isinstance(dirpath_unicode, text_type))
+ for (path, dirs, files) in scandir.walk(dirpath_unicode):
+ self.assertTrue(isinstance(path, text_type))
+ finally:
+ shutil.rmtree(dirpath)
+
+if has_scandir:
+ class TestScandirGeneric(TestMixin, unittest.TestCase):
+ def setUp(self):
+ self.scandir_func = scandir.scandir_generic
+ self.has_file_attributes = False
+ TestMixin.setUp(self)
+
+
+ if getattr(scandir, 'scandir_python', None):
+ class TestScandirPython(TestMixin, unittest.TestCase):
+ def setUp(self):
+ self.scandir_func = scandir.scandir_python
+ self.has_file_attributes = True
+ TestMixin.setUp(self)
+
+
+ if getattr(scandir, 'scandir_c', None):
+ class TestScandirC(TestMixin, unittest.TestCase):
+ def setUp(self):
+ self.scandir_func = scandir.scandir_c
+ self.has_file_attributes = True
+ TestMixin.setUp(self)
+
+
+ class TestScandirDirEntry(unittest.TestCase):
+ def setUp(self):
+ if not os.path.exists(TEST_PATH):
+ setup_main()
+
+ def test_iter_returns_dir_entry(self):
+ it = scandir.scandir(TEST_PATH)
+ entry = next(it)
+ assert isinstance(entry, scandir.DirEntry)
+
+
+if hasattr(os, 'scandir'):
+ class TestScandirOS(TestMixin, unittest.TestCase):
+ def setUp(self):
+ self.scandir_func = os.scandir
+ self.has_file_attributes = True
+ TestMixin.setUp(self)
diff --git a/contrib/deprecated/python/scandir/tests/test_walk.py b/contrib/deprecated/python/scandir/tests/test_walk.py
new file mode 100644
index 0000000000..6d49a4fa0b
--- /dev/null
+++ b/contrib/deprecated/python/scandir/tests/test_walk.py
@@ -0,0 +1,213 @@
+"""Tests for scandir.walk(), copied from CPython's tests for os.walk()."""
+
+import os
+import shutil
+import sys
+import unittest
+
+import scandir
+
+import yatest.common
+
+walk_func = scandir.walk
+
+IS_PY3 = sys.version_info >= (3, 0)
+
+
+class TestWalk(unittest.TestCase):
+ testfn = os.path.join(os.path.dirname(__file__), 'temp')
+
+ def test_traversal(self):
+ self.testfn = yatest.common.test_output_path('temp')
+ # Build:
+ # TESTFN/
+ # TEST1/ a file kid and two directory kids
+ # tmp1
+ # SUB1/ a file kid and a directory kid
+ # tmp2
+ # SUB11/ no kids
+ # SUB2/ a file kid and a dirsymlink kid
+ # tmp3
+ # link/ a symlink to TESTFN.2
+ # TEST2/
+ # tmp4 a lone file
+ walk_path = os.path.join(self.testfn, "TEST1")
+ sub1_path = os.path.join(walk_path, "SUB1")
+ sub11_path = os.path.join(sub1_path, "SUB11")
+ sub2_path = os.path.join(walk_path, "SUB2")
+ tmp1_path = os.path.join(walk_path, "tmp1")
+ tmp2_path = os.path.join(sub1_path, "tmp2")
+ tmp3_path = os.path.join(sub2_path, "tmp3")
+ link_path = os.path.join(sub2_path, "link")
+ t2_path = os.path.join(self.testfn, "TEST2")
+ tmp4_path = os.path.join(self.testfn, "TEST2", "tmp4")
+
+ # Create stuff.
+ os.makedirs(sub11_path)
+ os.makedirs(sub2_path)
+ os.makedirs(t2_path)
+ for path in tmp1_path, tmp2_path, tmp3_path, tmp4_path:
+ f = open(path, "w")
+ f.write("I'm " + path + " and proud of it. Blame test_os.\n")
+ f.close()
+ has_symlink = hasattr(os, "symlink")
+ if has_symlink:
+ try:
+ if IS_PY3:
+ os.symlink(os.path.abspath(t2_path), link_path, target_is_directory=True)
+ else:
+ os.symlink(os.path.abspath(t2_path), link_path)
+ sub2_tree = (sub2_path, ["link"], ["tmp3"])
+ except NotImplementedError:
+ sub2_tree = (sub2_path, [], ["tmp3"])
+ else:
+ sub2_tree = (sub2_path, [], ["tmp3"])
+
+ # Walk top-down.
+ all = list(walk_func(walk_path))
+ self.assertEqual(len(all), 4)
+ # We can't know which order SUB1 and SUB2 will appear in.
+ # Not flipped: TESTFN, SUB1, SUB11, SUB2
+ # flipped: TESTFN, SUB2, SUB1, SUB11
+ flipped = all[0][1][0] != "SUB1"
+ all[0][1].sort()
+ self.assertEqual(all[0], (walk_path, ["SUB1", "SUB2"], ["tmp1"]))
+ self.assertEqual(all[1 + flipped], (sub1_path, ["SUB11"], ["tmp2"]))
+ self.assertEqual(all[2 + flipped], (sub11_path, [], []))
+ self.assertEqual(all[3 - 2 * flipped], sub2_tree)
+
+ # Prune the search.
+ all = []
+ for root, dirs, files in walk_func(walk_path):
+ all.append((root, dirs, files))
+ # Don't descend into SUB1.
+ if 'SUB1' in dirs:
+ # Note that this also mutates the dirs we appended to all!
+ dirs.remove('SUB1')
+ self.assertEqual(len(all), 2)
+ self.assertEqual(all[0], (walk_path, ["SUB2"], ["tmp1"]))
+ self.assertEqual(all[1], sub2_tree)
+
+ # Walk bottom-up.
+ all = list(walk_func(walk_path, topdown=False))
+ self.assertEqual(len(all), 4)
+ # We can't know which order SUB1 and SUB2 will appear in.
+ # Not flipped: SUB11, SUB1, SUB2, TESTFN
+ # flipped: SUB2, SUB11, SUB1, TESTFN
+ flipped = all[3][1][0] != "SUB1"
+ all[3][1].sort()
+ self.assertEqual(all[3], (walk_path, ["SUB1", "SUB2"], ["tmp1"]))
+ self.assertEqual(all[flipped], (sub11_path, [], []))
+ self.assertEqual(all[flipped + 1], (sub1_path, ["SUB11"], ["tmp2"]))
+ self.assertEqual(all[2 - 2 * flipped], sub2_tree)
+
+ if has_symlink:
+ # Walk, following symlinks.
+ for root, dirs, files in walk_func(walk_path, followlinks=True):
+ if root == link_path:
+ self.assertEqual(dirs, [])
+ self.assertEqual(files, ["tmp4"])
+ break
+ else:
+ self.fail("Didn't follow symlink with followlinks=True")
+
+ # Test creating a directory and adding it to dirnames
+ sub3_path = os.path.join(walk_path, "SUB3")
+ all = []
+ for root, dirs, files in walk_func(walk_path):
+ all.append((root, dirs, files))
+ if 'SUB1' in dirs:
+ os.makedirs(sub3_path)
+ dirs.append('SUB3')
+ all.sort()
+ self.assertEqual(os.path.split(all[-1][0])[1], 'SUB3')
+
+ def tearDown(self):
+ # Tear everything down. This is a decent use for bottom-up on
+ # Windows, which doesn't have a recursive delete command. The
+ # (not so) subtlety is that rmdir will fail unless the dir's
+ # kids are removed first, so bottom up is essential.
+ for root, dirs, files in os.walk(self.testfn, topdown=False):
+ for name in files:
+ os.remove(os.path.join(root, name))
+ for name in dirs:
+ dirname = os.path.join(root, name)
+ if not os.path.islink(dirname):
+ os.rmdir(dirname)
+ else:
+ os.remove(dirname)
+ os.rmdir(self.testfn)
+
+
+class TestWalkSymlink(unittest.TestCase):
+ temp_dir = os.path.join(os.path.dirname(__file__), 'temp')
+
+ def setUp(self):
+ self.temp_dir = yatest.common.test_output_path('temp')
+ os.mkdir(self.temp_dir)
+ self.dir_name = os.path.join(self.temp_dir, 'dir')
+ os.mkdir(self.dir_name)
+ open(os.path.join(self.dir_name, 'subfile'), 'w').close()
+ self.file_name = os.path.join(self.temp_dir, 'file')
+ open(self.file_name, 'w').close()
+
+ def tearDown(self):
+ shutil.rmtree(self.temp_dir)
+
+ def test_symlink_to_file(self):
+ if not hasattr(os, 'symlink'):
+ return
+
+ try:
+ os.symlink(self.file_name, os.path.join(self.temp_dir,
+ 'link_to_file'))
+ except NotImplementedError:
+ # Windows versions before Vista don't support symbolic links
+ return
+
+ output = sorted(walk_func(self.temp_dir))
+ dirs = sorted(output[0][1])
+ files = sorted(output[0][2])
+ self.assertEqual(dirs, ['dir'])
+ self.assertEqual(files, ['file', 'link_to_file'])
+
+ self.assertEqual(len(output), 2)
+ self.assertEqual(output[1][1], [])
+ self.assertEqual(output[1][2], ['subfile'])
+
+ def test_symlink_to_directory(self):
+ if not hasattr(os, 'symlink'):
+ return
+
+ link_name = os.path.join(self.temp_dir, 'link_to_dir')
+ try:
+ if IS_PY3:
+ os.symlink(self.dir_name, link_name, target_is_directory=True)
+ else:
+ os.symlink(self.dir_name, link_name)
+ except NotImplementedError:
+ # Windows versions before Vista don't support symbolic links
+ return
+
+ output = sorted(walk_func(self.temp_dir))
+ dirs = sorted(output[0][1])
+ files = sorted(output[0][2])
+ self.assertEqual(dirs, ['dir', 'link_to_dir'])
+ self.assertEqual(files, ['file'])
+
+ self.assertEqual(len(output), 2)
+ self.assertEqual(output[1][1], [])
+ self.assertEqual(output[1][2], ['subfile'])
+
+ output = sorted(walk_func(self.temp_dir, followlinks=True))
+ dirs = sorted(output[0][1])
+ files = sorted(output[0][2])
+ self.assertEqual(dirs, ['dir', 'link_to_dir'])
+ self.assertEqual(files, ['file'])
+
+ self.assertEqual(len(output), 3)
+ self.assertEqual(output[1][1], [])
+ self.assertEqual(output[1][2], ['subfile'])
+ self.assertEqual(os.path.basename(output[2][0]), 'link_to_dir')
+ self.assertEqual(output[2][1], [])
+ self.assertEqual(output[2][2], ['subfile'])
diff --git a/contrib/deprecated/python/scandir/tests/ya.make b/contrib/deprecated/python/scandir/tests/ya.make
new file mode 100644
index 0000000000..544617cae8
--- /dev/null
+++ b/contrib/deprecated/python/scandir/tests/ya.make
@@ -0,0 +1,18 @@
+PY2TEST()
+
+PEERDIR(
+ contrib/deprecated/python/scandir
+)
+
+TEST_SRCS(
+ test_scandir.py
+ test_walk.py
+)
+
+DATA(
+ arcadia/contrib/deprecated/python/scandir/tests
+)
+
+NO_LINT()
+
+END()
diff --git a/contrib/deprecated/python/scandir/winreparse.h b/contrib/deprecated/python/scandir/winreparse.h
new file mode 100644
index 0000000000..66f7775dd2
--- /dev/null
+++ b/contrib/deprecated/python/scandir/winreparse.h
@@ -0,0 +1,53 @@
+#ifndef Py_WINREPARSE_H
+#define Py_WINREPARSE_H
+
+#ifdef MS_WINDOWS
+#include <Windows.h>
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/* The following structure was copied from
+ http://msdn.microsoft.com/en-us/library/ff552012.aspx as the required
+ include doesn't seem to be present in the Windows SDK (at least as included
+ with Visual Studio Express). */
+typedef struct _REPARSE_DATA_BUFFER {
+ ULONG ReparseTag;
+ USHORT ReparseDataLength;
+ USHORT Reserved;
+ union {
+ struct {
+ USHORT SubstituteNameOffset;
+ USHORT SubstituteNameLength;
+ USHORT PrintNameOffset;
+ USHORT PrintNameLength;
+ ULONG Flags;
+ WCHAR PathBuffer[1];
+ } SymbolicLinkReparseBuffer;
+
+ struct {
+ USHORT SubstituteNameOffset;
+ USHORT SubstituteNameLength;
+ USHORT PrintNameOffset;
+ USHORT PrintNameLength;
+ WCHAR PathBuffer[1];
+ } MountPointReparseBuffer;
+
+ struct {
+ UCHAR DataBuffer[1];
+ } GenericReparseBuffer;
+ };
+} REPARSE_DATA_BUFFER, *PREPARSE_DATA_BUFFER;
+
+#define REPARSE_DATA_BUFFER_HEADER_SIZE FIELD_OFFSET(REPARSE_DATA_BUFFER,\
+ GenericReparseBuffer)
+#define MAXIMUM_REPARSE_DATA_BUFFER_SIZE ( 16 * 1024 )
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif /* MS_WINDOWS */
+
+#endif /* !Py_WINREPARSE_H */
diff --git a/contrib/deprecated/python/scandir/ya.make b/contrib/deprecated/python/scandir/ya.make
new file mode 100644
index 0000000000..92b0b5cf4f
--- /dev/null
+++ b/contrib/deprecated/python/scandir/ya.make
@@ -0,0 +1,36 @@
+# Generated by devtools/yamaker (pypi).
+
+PY2_LIBRARY()
+
+VERSION(1.10.0)
+
+LICENSE(BSD-3-Clause)
+
+NO_COMPILER_WARNINGS()
+
+NO_LINT()
+
+SRCS(
+ _scandir.c
+)
+
+PY_REGISTER(
+ _scandir
+)
+
+PY_SRCS(
+ TOP_LEVEL
+ scandir.py
+)
+
+RESOURCE_FILES(
+ PREFIX contrib/deprecated/python/scandir/
+ .dist-info/METADATA
+ .dist-info/top_level.txt
+)
+
+END()
+
+RECURSE_FOR_TESTS(
+ tests
+)
diff --git a/contrib/deprecated/python/singledispatch/.dist-info/METADATA b/contrib/deprecated/python/singledispatch/.dist-info/METADATA
new file mode 100644
index 0000000000..280d474d35
--- /dev/null
+++ b/contrib/deprecated/python/singledispatch/.dist-info/METADATA
@@ -0,0 +1,91 @@
+Metadata-Version: 2.1
+Name: singledispatch
+Version: 3.7.0
+Summary: Backport functools.singledispatch from Python 3.4 to Python 2.6-3.3.
+Home-page: https://github.com/jaraco/singledispatch
+Author: Jason R. Coombs
+Author-email: jaraco@jaraco.com
+License: UNKNOWN
+Keywords: single,dispatch,generic,functions,singledispatch,genericfunctions,decorator,backport
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.2
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Requires-Python: >=2.6
+License-File: LICENSE
+Requires-Dist: six
+Requires-Dist: ordereddict ; python_version < "2.7"
+Provides-Extra: docs
+Requires-Dist: sphinx ; extra == 'docs'
+Requires-Dist: jaraco.packaging (>=8.2) ; extra == 'docs'
+Requires-Dist: rst.linker (>=1.9) ; extra == 'docs'
+Provides-Extra: testing
+Requires-Dist: pytest (>=4.6) ; extra == 'testing'
+Requires-Dist: pytest-flake8 ; extra == 'testing'
+Requires-Dist: pytest-cov ; extra == 'testing'
+Requires-Dist: pytest-black (>=0.3.7) ; (platform_python_implementation != "PyPy") and extra == 'testing'
+Requires-Dist: unittest2 ; (python_version < "3") and extra == 'testing'
+Requires-Dist: pytest-checkdocs (>=2.4) ; (python_version > "3.6") and extra == 'testing'
+
+.. image:: https://img.shields.io/pypi/v/singledispatch.svg
+ :target: `PyPI link`_
+
+.. image:: https://img.shields.io/pypi/pyversions/singledispatch.svg
+ :target: `PyPI link`_
+
+.. _PyPI link: https://pypi.org/project/singledispatch
+
+.. image:: https://github.com/jaraco/singledispatch/workflows/tests/badge.svg
+ :target: https://github.com/jaraco/singledispatch/actions?query=workflow%3A%22tests%22
+ :alt: tests
+
+.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+ :target: https://github.com/psf/black
+ :alt: Code style: Black
+
+.. .. image:: https://readthedocs.org/projects/skeleton/badge/?version=latest
+.. :target: https://skeleton.readthedocs.io/en/latest/?badge=latest
+
+.. image:: https://img.shields.io/badge/skeleton-2021-informational
+ :target: https://blog.jaraco.com/skeleton
+
+`PEP 443 <http://www.python.org/dev/peps/pep-0443/>`_ proposed to expose
+a mechanism in the ``functools`` standard library module in Python 3.4
+that provides a simple form of generic programming known as
+single-dispatch generic functions.
+
+This library is a backport of this functionality and its evolution.
+
+Refer to the `upstream documentation
+<http://docs.python.org/3/library/functools.html#functools.singledispatch>`_
+for API guidance. To use the backport, simply use
+``from singledispatch import singledispatch, singledispatchmethod`` in place of
+``from functools import singledispatch, singledispatchmethod``.
+
+
+
+Maintenance
+-----------
+
+This backport is maintained on Github by Jason R. Coombs, one of the
+members of the core CPython team:
+
+* `repository <https://github.com/jaraco/singledispatch>`_
+
+* `issue tracker <https://github.com/jaraco/singledispatch/issues>`_
+
+
diff --git a/contrib/deprecated/python/singledispatch/.dist-info/top_level.txt b/contrib/deprecated/python/singledispatch/.dist-info/top_level.txt
new file mode 100644
index 0000000000..ebb5ff79be
--- /dev/null
+++ b/contrib/deprecated/python/singledispatch/.dist-info/top_level.txt
@@ -0,0 +1 @@
+singledispatch
diff --git a/contrib/deprecated/python/singledispatch/LICENSE b/contrib/deprecated/python/singledispatch/LICENSE
new file mode 100644
index 0000000000..353924be0e
--- /dev/null
+++ b/contrib/deprecated/python/singledispatch/LICENSE
@@ -0,0 +1,19 @@
+Copyright Jason R. Coombs
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to
+deal in the Software without restriction, including without limitation the
+rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+sell copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+IN THE SOFTWARE.
diff --git a/contrib/deprecated/python/singledispatch/README.rst b/contrib/deprecated/python/singledispatch/README.rst
new file mode 100644
index 0000000000..05084c85fc
--- /dev/null
+++ b/contrib/deprecated/python/singledispatch/README.rst
@@ -0,0 +1,46 @@
+.. image:: https://img.shields.io/pypi/v/singledispatch.svg
+ :target: `PyPI link`_
+
+.. image:: https://img.shields.io/pypi/pyversions/singledispatch.svg
+ :target: `PyPI link`_
+
+.. _PyPI link: https://pypi.org/project/singledispatch
+
+.. image:: https://github.com/jaraco/singledispatch/workflows/tests/badge.svg
+ :target: https://github.com/jaraco/singledispatch/actions?query=workflow%3A%22tests%22
+ :alt: tests
+
+.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+ :target: https://github.com/psf/black
+ :alt: Code style: Black
+
+.. .. image:: https://readthedocs.org/projects/skeleton/badge/?version=latest
+.. :target: https://skeleton.readthedocs.io/en/latest/?badge=latest
+
+.. image:: https://img.shields.io/badge/skeleton-2021-informational
+ :target: https://blog.jaraco.com/skeleton
+
+`PEP 443 <http://www.python.org/dev/peps/pep-0443/>`_ proposed to expose
+a mechanism in the ``functools`` standard library module in Python 3.4
+that provides a simple form of generic programming known as
+single-dispatch generic functions.
+
+This library is a backport of this functionality and its evolution.
+
+Refer to the `upstream documentation
+<http://docs.python.org/3/library/functools.html#functools.singledispatch>`_
+for API guidance. To use the backport, simply use
+``from singledispatch import singledispatch, singledispatchmethod`` in place of
+``from functools import singledispatch, singledispatchmethod``.
+
+
+
+Maintenance
+-----------
+
+This backport is maintained on Github by Jason R. Coombs, one of the
+members of the core CPython team:
+
+* `repository <https://github.com/jaraco/singledispatch>`_
+
+* `issue tracker <https://github.com/jaraco/singledispatch/issues>`_
diff --git a/contrib/deprecated/python/singledispatch/singledispatch/__init__.py b/contrib/deprecated/python/singledispatch/singledispatch/__init__.py
new file mode 100644
index 0000000000..f6ec6eaebe
--- /dev/null
+++ b/contrib/deprecated/python/singledispatch/singledispatch/__init__.py
@@ -0,0 +1,300 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+__all__ = ['singledispatch', 'singledispatchmethod']
+
+from weakref import WeakKeyDictionary
+
+from .helpers import MappingProxyType, get_cache_token, get_type_hints, update_wrapper
+
+################################################################################
+### singledispatch() - single-dispatch generic function decorator
+################################################################################
+
+def _c3_merge(sequences):
+ """Merges MROs in *sequences* to a single MRO using the C3 algorithm.
+
+ Adapted from http://www.python.org/download/releases/2.3/mro/.
+
+ """
+ result = []
+ while True:
+ sequences = [s for s in sequences if s] # purge empty sequences
+ if not sequences:
+ return result
+ for s1 in sequences: # find merge candidates among seq heads
+ candidate = s1[0]
+ for s2 in sequences:
+ if candidate in s2[1:]:
+ candidate = None
+ break # reject the current head, it appears later
+ else:
+ break
+ if candidate is None:
+ raise RuntimeError("Inconsistent hierarchy")
+ result.append(candidate)
+ # remove the chosen candidate
+ for seq in sequences:
+ if seq[0] == candidate:
+ del seq[0]
+
+def _c3_mro(cls, abcs=None):
+ """Computes the method resolution order using extended C3 linearization.
+
+ If no *abcs* are given, the algorithm works exactly like the built-in C3
+ linearization used for method resolution.
+
+ If given, *abcs* is a list of abstract base classes that should be inserted
+ into the resulting MRO. Unrelated ABCs are ignored and don't end up in the
+ result. The algorithm inserts ABCs where their functionality is introduced,
+ i.e. issubclass(cls, abc) returns True for the class itself but returns
+ False for all its direct base classes. Implicit ABCs for a given class
+ (either registered or inferred from the presence of a special method like
+ __len__) are inserted directly after the last ABC explicitly listed in the
+ MRO of said class. If two implicit ABCs end up next to each other in the
+ resulting MRO, their ordering depends on the order of types in *abcs*.
+
+ """
+ for i, base in enumerate(reversed(cls.__bases__)):
+ if hasattr(base, '__abstractmethods__'):
+ boundary = len(cls.__bases__) - i
+ break # Bases up to the last explicit ABC are considered first.
+ else:
+ boundary = 0
+ abcs = list(abcs) if abcs else []
+ explicit_bases = list(cls.__bases__[:boundary])
+ abstract_bases = []
+ other_bases = list(cls.__bases__[boundary:])
+ for base in abcs:
+ if issubclass(cls, base) and not any(
+ issubclass(b, base) for b in cls.__bases__
+ ):
+ # If *cls* is the class that introduces behaviour described by
+ # an ABC *base*, insert said ABC to its MRO.
+ abstract_bases.append(base)
+ for base in abstract_bases:
+ abcs.remove(base)
+ explicit_c3_mros = [_c3_mro(base, abcs=abcs) for base in explicit_bases]
+ abstract_c3_mros = [_c3_mro(base, abcs=abcs) for base in abstract_bases]
+ other_c3_mros = [_c3_mro(base, abcs=abcs) for base in other_bases]
+ return _c3_merge(
+ [[cls]] +
+ explicit_c3_mros + abstract_c3_mros + other_c3_mros +
+ [explicit_bases] + [abstract_bases] + [other_bases]
+ )
+
+def _compose_mro(cls, types):
+ """Calculates the method resolution order for a given class *cls*.
+
+ Includes relevant abstract base classes (with their respective bases) from
+ the *types* iterable. Uses a modified C3 linearization algorithm.
+
+ """
+ bases = set(cls.__mro__)
+ # Remove entries which are already present in the __mro__ or unrelated.
+ def is_related(typ):
+ return (typ not in bases and hasattr(typ, '__mro__')
+ and issubclass(cls, typ))
+ types = [n for n in types if is_related(n)]
+ # Remove entries which are strict bases of other entries (they will end up
+ # in the MRO anyway.
+ def is_strict_base(typ):
+ for other in types:
+ if typ != other and typ in other.__mro__:
+ return True
+ return False
+ types = [n for n in types if not is_strict_base(n)]
+ # Subclasses of the ABCs in *types* which are also implemented by
+ # *cls* can be used to stabilize ABC ordering.
+ type_set = set(types)
+ mro = []
+ for typ in types:
+ found = []
+ for sub in filter(_safe, typ.__subclasses__()):
+ if sub not in bases and issubclass(cls, sub):
+ found.append([s for s in sub.__mro__ if s in type_set])
+ if not found:
+ mro.append(typ)
+ continue
+ # Favor subclasses with the biggest number of useful bases
+ found.sort(key=len, reverse=True)
+ for sub in found:
+ for subcls in sub:
+ if subcls not in mro:
+ mro.append(subcls)
+ return _c3_mro(cls, abcs=mro)
+
+
+def _safe(class_):
+ """
+ Return if the class is safe for testing as subclass. Ref #2.
+ """
+ return not getattr(class_, '__origin__', None)
+
+
+def _find_impl(cls, registry):
+ """Returns the best matching implementation from *registry* for type *cls*.
+
+ Where there is no registered implementation for a specific type, its method
+ resolution order is used to find a more generic implementation.
+
+ Note: if *registry* does not contain an implementation for the base
+ *object* type, this function may return None.
+
+ """
+ mro = _compose_mro(cls, registry.keys())
+ match = None
+ for t in mro:
+ if match is not None:
+ # If *match* is an implicit ABC but there is another unrelated,
+ # equally matching implicit ABC, refuse the temptation to guess.
+ if (t in registry and t not in cls.__mro__
+ and match not in cls.__mro__
+ and not issubclass(match, t)):
+ raise RuntimeError("Ambiguous dispatch: {0} or {1}".format(
+ match, t))
+ break
+ if t in registry:
+ match = t
+ return registry.get(match)
+
+def _validate_annotation(annotation):
+ """Determine if an annotation is valid for registration.
+
+ An annotation is considered valid for use in registration if it is an
+ instance of ``type`` and not a generic type from ``typing``.
+ """
+ try:
+ # In Python earlier than 3.7, the classes in typing are considered
+ # instances of type, but they invalid for registering single dispatch
+ # functions so check against GenericMeta instead.
+ from typing import GenericMeta
+ valid = not isinstance(annotation, GenericMeta)
+ except ImportError:
+ # In Python 3.7+, classes in typing are not instances of type.
+ valid = isinstance(annotation, type)
+ return valid
+
+def singledispatch(func):
+ """Single-dispatch generic function decorator.
+
+ Transforms a function into a generic function, which can have different
+ behaviours depending upon the type of its first argument. The decorated
+ function acts as the default implementation, and additional
+ implementations can be registered using the register() attribute of the
+ generic function.
+ """
+ registry = {}
+ dispatch_cache = WeakKeyDictionary()
+ def ns(): pass
+ ns.cache_token = None
+
+ def dispatch(cls):
+ """generic_func.dispatch(cls) -> <function implementation>
+
+ Runs the dispatch algorithm to return the best available implementation
+ for the given *cls* registered on *generic_func*.
+
+ """
+ if ns.cache_token is not None:
+ current_token = get_cache_token()
+ if ns.cache_token != current_token:
+ dispatch_cache.clear()
+ ns.cache_token = current_token
+ try:
+ impl = dispatch_cache[cls]
+ except KeyError:
+ try:
+ impl = registry[cls]
+ except KeyError:
+ impl = _find_impl(cls, registry)
+ dispatch_cache[cls] = impl
+ return impl
+
+ def register(cls, func=None):
+ """generic_func.register(cls, func) -> func
+
+ Registers a new implementation for the given *cls* on a *generic_func*.
+
+ """
+ if func is None:
+ if isinstance(cls, type):
+ return lambda f: register(cls, f)
+ ann = getattr(cls, '__annotations__', {})
+ if not ann:
+ raise TypeError(
+ "Invalid first argument to `register()`: {cls!r}. "
+ "Use either `@register(some_class)` or plain `@register` "
+ "on an annotated function.".format(**locals())
+ )
+ func = cls
+
+ argname, cls = next(iter(get_type_hints(func).items()))
+ if not _validate_annotation(cls):
+ raise TypeError(
+ "Invalid annotation for {argname!r}. "
+ "{cls!r} is not a class.".format(**locals())
+ )
+ registry[cls] = func
+ if ns.cache_token is None and hasattr(cls, '__abstractmethods__'):
+ ns.cache_token = get_cache_token()
+ dispatch_cache.clear()
+ return func
+
+ def wrapper(*args, **kw):
+ if not args:
+ raise TypeError('{0} requires at least '
+ '1 positional argument'.format(funcname))
+
+ return dispatch(args[0].__class__)(*args, **kw)
+
+ funcname = getattr(func, '__name__', 'singledispatch function')
+ registry[object] = func
+ wrapper.register = register
+ wrapper.dispatch = dispatch
+ wrapper.registry = MappingProxyType(registry)
+ wrapper._clear_cache = dispatch_cache.clear
+ update_wrapper(wrapper, func)
+ return wrapper
+
+
+# Descriptor version
+class singledispatchmethod(object):
+ """Single-dispatch generic method descriptor.
+
+ Supports wrapping existing descriptors and handles non-descriptor
+ callables as instance methods.
+ """
+
+ def __init__(self, func):
+ if not callable(func) and not hasattr(func, "__get__"):
+ raise TypeError("{!r} is not callable or a descriptor".format(func))
+
+ self.dispatcher = singledispatch(func)
+ self.func = func
+
+ def register(self, cls, method=None):
+ """generic_method.register(cls, func) -> func
+
+ Registers a new implementation for the given *cls* on a *generic_method*.
+ """
+ return self.dispatcher.register(cls, func=method)
+
+ def __get__(self, obj, cls=None):
+ def _method(*args, **kwargs):
+ method = self.dispatcher.dispatch(args[0].__class__)
+ return method.__get__(obj, cls)(*args, **kwargs)
+
+ _method.__isabstractmethod__ = self.__isabstractmethod__
+ _method.register = self.register
+ update_wrapper(_method, self.func)
+ return _method
+
+ @property
+ def __isabstractmethod__(self):
+ return getattr(self.func, '__isabstractmethod__', False)
diff --git a/contrib/deprecated/python/singledispatch/singledispatch/helpers.py b/contrib/deprecated/python/singledispatch/singledispatch/helpers.py
new file mode 100644
index 0000000000..74e73b1799
--- /dev/null
+++ b/contrib/deprecated/python/singledispatch/singledispatch/helpers.py
@@ -0,0 +1,217 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+from __future__ import absolute_import
+from __future__ import division
+from __future__ import print_function
+from __future__ import unicode_literals
+
+import sys
+from abc import ABCMeta
+
+try:
+ from collections.abc import MutableMapping
+except ImportError:
+ from collections import MutableMapping
+
+try:
+ from collections import UserDict
+except ImportError:
+ from UserDict import UserDict
+try:
+ from collections import OrderedDict
+except ImportError:
+ from ordereddict import OrderedDict
+try:
+ from thread import get_ident
+except ImportError:
+ try:
+ from _thread import get_ident
+ except ImportError:
+ from _dummy_thread import get_ident
+
+
+def recursive_repr(fillvalue='...'):
+ 'Decorator to make a repr function return fillvalue for a recursive call'
+
+ def decorating_function(user_function):
+ repr_running = set()
+
+ def wrapper(self):
+ key = id(self), get_ident()
+ if key in repr_running:
+ return fillvalue
+ repr_running.add(key)
+ try:
+ result = user_function(self)
+ finally:
+ repr_running.discard(key)
+ return result
+
+ # Can't use functools.wraps() here because of bootstrap issues
+ wrapper.__module__ = getattr(user_function, '__module__')
+ wrapper.__doc__ = getattr(user_function, '__doc__')
+ wrapper.__name__ = getattr(user_function, '__name__')
+ wrapper.__annotations__ = getattr(user_function, '__annotations__', {})
+ return wrapper
+
+ return decorating_function
+
+
+class ChainMap(MutableMapping):
+ ''' A ChainMap groups multiple dicts (or other mappings) together
+ to create a single, updateable view.
+
+ The underlying mappings are stored in a list. That list is public and can
+ accessed or updated using the *maps* attribute. There is no other state.
+
+ Lookups search the underlying mappings successively until a key is found.
+ In contrast, writes, updates, and deletions only operate on the first
+ mapping.
+
+ '''
+
+ def __init__(self, *maps):
+ '''Initialize a ChainMap by setting *maps* to the given mappings.
+ If no mappings are provided, a single empty dictionary is used.
+
+ '''
+ self.maps = list(maps) or [{}] # always at least one map
+
+ def __missing__(self, key):
+ raise KeyError(key)
+
+ def __getitem__(self, key):
+ for mapping in self.maps:
+ try:
+ return mapping[key] # can't use 'key in mapping' with defaultdict
+ except KeyError:
+ pass
+ return self.__missing__(key) # support subclasses that define __missing__
+
+ def get(self, key, default=None):
+ return self[key] if key in self else default
+
+ def __len__(self):
+ return len(set().union(*self.maps)) # reuses stored hash values if possible
+
+ def __iter__(self):
+ return iter(set().union(*self.maps))
+
+ def __contains__(self, key):
+ return any(key in m for m in self.maps)
+
+ @recursive_repr()
+ def __repr__(self):
+ return '{0.__class__.__name__}({1})'.format(
+ self, ', '.join(map(repr, self.maps)))
+
+ @classmethod
+ def fromkeys(cls, iterable, *args):
+ 'Create a ChainMap with a single dict created from the iterable.'
+ return cls(dict.fromkeys(iterable, *args))
+
+ def copy(self):
+ 'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]'
+ return self.__class__(self.maps[0].copy(), *self.maps[1:])
+
+ __copy__ = copy
+
+ def new_child(self): # like Django's Context.push()
+ 'New ChainMap with a new dict followed by all previous maps.'
+ return self.__class__({}, *self.maps)
+
+ @property
+ def parents(self): # like Django's Context.pop()
+ 'New ChainMap from maps[1:].'
+ return self.__class__(*self.maps[1:])
+
+ def __setitem__(self, key, value):
+ self.maps[0][key] = value
+
+ def __delitem__(self, key):
+ try:
+ del self.maps[0][key]
+ except KeyError:
+ raise KeyError('Key not found in the first mapping: {!r}'.format(key))
+
+ def popitem(self):
+ 'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.'
+ try:
+ return self.maps[0].popitem()
+ except KeyError:
+ raise KeyError('No keys found in the first mapping.')
+
+ def pop(self, key, *args):
+ 'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].'
+ try:
+ return self.maps[0].pop(key, *args)
+ except KeyError:
+ raise KeyError('Key not found in the first mapping: {!r}'.format(key))
+
+ def clear(self):
+ 'Clear maps[0], leaving maps[1:] intact.'
+ self.maps[0].clear()
+
+
+class MappingProxyType(UserDict):
+ def __init__(self, data):
+ UserDict.__init__(self)
+ self.data = data
+
+
+try:
+ from abc import get_cache_token
+except ImportError:
+ def get_cache_token():
+ return ABCMeta._abc_invalidation_counter
+
+
+class Support(object):
+ def dummy(self):
+ pass
+
+ def cpython_only(self, func):
+ if 'PyPy' in sys.version:
+ return self.dummy
+ return func
+
+
+def get_type_hints(func):
+ # only import typing if annotation parsing is necessary
+ from typing import get_type_hints
+ return get_type_hints(func) or getattr(func, '__annotations__', {})
+
+
+WRAPPER_ASSIGNMENTS = ('__module__', '__name__', '__qualname__', '__doc__',
+ '__annotations__')
+WRAPPER_UPDATES = ('__dict__',)
+def update_wrapper(wrapper,
+ wrapped,
+ assigned = WRAPPER_ASSIGNMENTS,
+ updated = WRAPPER_UPDATES):
+ """Update a wrapper function to look like the wrapped function
+
+ wrapper is the function to be updated
+ wrapped is the original function
+ assigned is a tuple naming the attributes assigned directly
+ from the wrapped function to the wrapper function (defaults to
+ functools.WRAPPER_ASSIGNMENTS)
+ updated is a tuple naming the attributes of the wrapper that
+ are updated with the corresponding attribute from the wrapped
+ function (defaults to functools.WRAPPER_UPDATES)
+ """
+ for attr in assigned:
+ try:
+ value = getattr(wrapped, attr)
+ except AttributeError:
+ pass
+ else:
+ setattr(wrapper, attr, value)
+ for attr in updated:
+ getattr(wrapper, attr).update(getattr(wrapped, attr, {}))
+ # Issue #17482: set __wrapped__ last so we don't inadvertently copy it
+ # from the wrapped function when updating __dict__
+ wrapper.__wrapped__ = wrapped
+ # Return the wrapper so this can be used as a decorator via partial()
+ return wrapper
diff --git a/contrib/deprecated/python/singledispatch/ya.make b/contrib/deprecated/python/singledispatch/ya.make
new file mode 100644
index 0000000000..ee683e8690
--- /dev/null
+++ b/contrib/deprecated/python/singledispatch/ya.make
@@ -0,0 +1,27 @@
+# Generated by devtools/yamaker (pypi).
+
+PY2_LIBRARY()
+
+VERSION(3.7.0)
+
+LICENSE(MIT)
+
+PEERDIR(
+ contrib/python/six
+)
+
+NO_LINT()
+
+PY_SRCS(
+ TOP_LEVEL
+ singledispatch/__init__.py
+ singledispatch/helpers.py
+)
+
+RESOURCE_FILES(
+ PREFIX contrib/deprecated/python/singledispatch/
+ .dist-info/METADATA
+ .dist-info/top_level.txt
+)
+
+END()
diff --git a/contrib/deprecated/python/subprocess32/ChangeLog b/contrib/deprecated/python/subprocess32/ChangeLog
new file mode 100644
index 0000000000..7db7d8465e
--- /dev/null
+++ b/contrib/deprecated/python/subprocess32/ChangeLog
@@ -0,0 +1,185 @@
+-----------------
+2019-05-20 3.5.4
+-----------------
+ * Promoted 3.5.4rc2 to become 3.5.4.
+
+-----------------
+2019-05-13 3.5.4rc2
+-----------------
+ * GitHub [#57]: TimeoutExpired and CalledProcessError exceptions can now
+ be unpickled.
+
+-----------------
+2019-05-10 3.5.4rc1
+-----------------
+* GitHub [#61]: Backport the fixes for https://bugs.python.org/issue10963,
+ https://bugs.python.org/issue19612, and https://bugs.python.org/issue30418.
+ When a child process was so short lived it dies or closes its pipes before
+ you call communicate(). When supplying stdin or reading its output in
+ this scenario, do not raise an unexpected broken pipe or interrupted
+ system call exception.
+
+-----------------
+2018-10-09 3.5.3
+-----------------
+* Disallow ridiculously large numbers (millions) of arguments. [#54]
+
+-----------------
+2018-06-07 3.5.2
+-----------------
+* Explicitly include <signal.h> in _posixsubprocess_helpers.c; it already
+ gets configure checked and pulled in via Python's own <Python.h> in many
+ circumstances but it is better to be explicit. #IWYU
+ If you were using subprocess32 on a Python interpreter built *without*
+ the --with-fpectl configure option enabled, restore_signals is now
+ useful rather than a no-op. I do not know if such builds were common.
+* Adds a functional test for restore_signals=True behavior.
+
+-----------------
+2018-05-21 3.5.1
+-----------------
+* Fix AttributeError: 'module' object has no attribute 'Signals' when
+ constructing a CalledProcessError exception. [#49]
+
+-----------------
+2018-05-13 3.5.0 (3.5.0rc3)
+-----------------
+
+* Fixed the packaging problem where the stdlib python3_redirect shim is
+ supposed to be installed on Python 3.
+* Renamed _posixsubprocess to _posixsubprocess32 for consistency.
+* Unset CLOEXEC on file descriptors given to Popen pass_fds. (GH #4)
+* Drop support for Python 2.4 and 2.5.
+* Adds a configure script - run by setup.py - to supply the missing feature
+ #define's for the _posixsubprocess32 extension module for the things that
+ Python 2's own configure generated pyconfig.h does not already provide.
+
+-----------------
+2017-10-18 3.5.0rc1
+-----------------
+
+* Backport the new subprocess.run() API from Python 3.5.
+* Backport subprocess.DEVNULL support from 3.3.
+* Allow stderr to be redirected to stdout even when stdout is not redirected.
+ https://bugs.python.org/issue22274
+* Fix subprocess.Popen.wait() when the child process has exited to a
+ a stopped instead of terminated state (ex: when under ptrace).
+ https://bugs.python.org/issue29335
+* Include the private API needed by the multiprocessing module for people who
+ want to drop subprocess32 in as a replacement for their standard library
+ subprocess module.
+* Fix a compilation issue regarding O_CLOEXEC not being defined on ancient
+ Linux distros such as RHEL 5.
+
+-----------------
+2015-11-15 3.2.7
+-----------------
+
+* Issue #6973: When we know a subprocess.Popen process has died, do
+ not allow the send_signal(), terminate(), or kill() methods to do
+ anything as they could potentially signal a different process.
+* Issue #23564: Fixed a partially broken sanity check in the _posixsubprocess
+ internals regarding how fds_to_pass were passed to the child. The bug had
+ no actual impact as subprocess32.py already avoided it.
+
+-----------------
+2015-11-14 3.2.7rc2
+-----------------
+
+* Moved the repository from code.google.com to github.
+* Added a _WAIT_TIMEOUT to satisfy the unsupported people entirely on
+ their own trying to use this on Windows.
+* Similarly: Updated setup.py to not build the extension on non-posix.
+
+-----------------
+2014-06-01 3.2.7rc1
+-----------------
+
+* Issue #21618: The subprocess module could fail to close open fds that were
+ inherited by the calling process and already higher than POSIX resource
+ limits would otherwise allow. On systems with a functioning /proc/self/fd
+ or /dev/fd interface the max is now ignored and all fds are closed.
+
+-----------------
+2014-04-23 3.2.6
+-----------------
+
+* Fixes issue #21291: Popen.wait() is now thread safe so that multiple
+ threads may be calling wait() or poll() on a Popen instance at the same time
+ without losing the Popen.returncode value.
+* Fixes issue #14396: Handle the odd rare case of waitpid returning 0 when not
+ expected in Popen.wait().
+* Fixes issue #16962: Use getdents64 instead of the obsolete getdents syscall
+ on Linux. Some architectures do not implement the latter.
+
+-----------------
+2013-12-10 3.2.5
+-----------------
+
+* Fixes issue #15798: subprocess.Popen() no longer fails if file
+ descriptor 0, 1 or 2 is closed.
+* Fixes issue #18763: close_fd file descriptors are now closed after
+ any preexec_fn call.
+
+-----------------
+2013-06-15 3.2.5rc1
+-----------------
+
+* Fixes issue #16650 - Don't reference ECHILD from outside the local scope.
+* Unittests no longer spew any test data for human verification to stdout.
+* Remove a bare print to stdout that could have happened if the child process
+ wrote garbage to its pre-exec error pipe.
+* Fixes issue #16327 - the subprocess module no longer leaks file descriptors
+ used for stdin/stdout/stderr pipes to the child when the fork() fails. It
+ also no longer potentially double closes these pipe fds.
+* Correct the Python version check around use of imp_module to specify 2.6.3
+ as the minimum version that exists in. Why is anyone using such an old 2.6?
+* Fixes Issue #16114: The subprocess module no longer provides a misleading
+ error message stating that args[0] did not exist when either the cwd or
+ executable keyword arguments specified a path that did not exist.
+* Add more Popen cwd tests.
+* Handle errno.ECHILD in poll.
+* Don't leak a reference to the gc module on capi use error.
+* Check return value to avoid a crash if the capi were misused.
+* Check result of PyObject_IsTrue().
+* Adds test_universal_newlines_communicate_input_none.
+* Most everything above consists of backports. See the hg logs for their
+ upstream hg.python.org cpython revision numbers.
+
+----------------
+2012-06-10 3.2.3
+----------------
+
+* Fixes the references to the 'surrogateescape' unicode encoding error
+ handler that does not exist in Python 2.x. 'strict' is used so that
+ a UnicodeEncodeError exception is raised in these situations. These
+ MAY occur if your sys.getfilesystemencoding() is not UTF-8 and
+ attempt to use a non-ascii executable, args or env values. Prior to
+ this change, those would result in a hard to debug LookupError for
+ surrogateescape.
+* Issue #15000: Support the "unique" x32 architecture in _posixsubprocess.c.
+* Fix a compilation problem when O_CLOEXEC is not defined.
+
+------------------
+2012-02-18 3.2.3b1
+------------------
+
+This release brings in the last year and a half's worth of bugfixes and
+improvements to Python 3.2's subprocess module:
+
+Off the top of my head, some major bugfix highlights include:
+ * Timeout support on the APIs.
+ * close_fds=True is now the default (as it is in 3.2) and performs much faster.
+ * Fixed EINTR handling.
+ * Fixed SIGCHLD handling.
+ * Fixed several race conditions.
+ * Many more bug fixes too numerous to list.
+
+You can grep out the full list of improvements related to subprocess in:
+ http://hg.python.org/cpython/file/9ce5d456138b/Misc/NEWS
+
+-------------
+2010-06 3.2.0
+-------------
+
+This was the first release. Roughly equivalent to Python 3.2.0a1.
diff --git a/contrib/deprecated/python/subprocess32/LICENSE b/contrib/deprecated/python/subprocess32/LICENSE
new file mode 100644
index 0000000000..0d336624f4
--- /dev/null
+++ b/contrib/deprecated/python/subprocess32/LICENSE
@@ -0,0 +1,283 @@
+A. HISTORY OF THE SOFTWARE
+==========================
+
+Python was created in the early 1990s by Guido van Rossum at Stichting
+Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands
+as a successor of a language called ABC. Guido remains Python's
+principal author, although it includes many contributions from others.
+
+In 1995, Guido continued his work on Python at the Corporation for
+National Research Initiatives (CNRI, see http://www.cnri.reston.va.us)
+in Reston, Virginia where he released several versions of the
+software.
+
+In May 2000, Guido and the Python core development team moved to
+BeOpen.com to form the BeOpen PythonLabs team. In October of the same
+year, the PythonLabs team moved to Digital Creations (now Zope
+Corporation, see http://www.zope.com). In 2001, the Python Software
+Foundation (PSF, see http://www.python.org/psf/) was formed, a
+non-profit organization created specifically to own Python-related
+Intellectual Property. Zope Corporation is a sponsoring member of
+the PSF.
+
+All Python releases are Open Source (see http://www.opensource.org for
+the Open Source Definition). Historically, most, but not all, Python
+releases have also been GPL-compatible; the table below summarizes
+the various releases.
+
+ Release Derived Year Owner GPL-
+ from compatible? (1)
+
+ 0.9.0 thru 1.2 1991-1995 CWI yes
+ 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes
+ 1.6 1.5.2 2000 CNRI no
+ 2.0 1.6 2000 BeOpen.com no
+ 1.6.1 1.6 2001 CNRI yes (2)
+ 2.1 2.0+1.6.1 2001 PSF no
+ 2.0.1 2.0+1.6.1 2001 PSF yes
+ 2.1.1 2.1+2.0.1 2001 PSF yes
+ 2.2 2.1.1 2001 PSF yes
+ 2.1.2 2.1.1 2002 PSF yes
+ 2.1.3 2.1.2 2002 PSF yes
+ 2.2.1 2.2 2002 PSF yes
+ 2.2.2 2.2.1 2002 PSF yes
+ 2.2.3 2.2.2 2003 PSF yes
+ 2.3 2.2.2 2002-2003 PSF yes
+ 2.3.1 2.3 2002-2003 PSF yes
+ 2.3.2 2.3.1 2002-2003 PSF yes
+ 2.3.3 2.3.2 2002-2003 PSF yes
+ 2.3.4 2.3.3 2004 PSF yes
+ 2.3.5 2.3.4 2005 PSF yes
+ 2.4 2.3 2004 PSF yes
+ 2.4.1 2.4 2005 PSF yes
+ 2.4.2 2.4.1 2005 PSF yes
+ 2.4.3 2.4.2 2006 PSF yes
+ 2.4.4 2.4.3 2006 PSF yes
+ 2.5 2.4 2006 PSF yes
+ 2.5.1 2.5 2007 PSF yes
+ 2.5.2 2.5.1 2008 PSF yes
+ 2.5.3 2.5.2 2008 PSF yes
+ 2.6 2.5 2008 PSF yes
+ 2.6.1 2.6 2008 PSF yes
+ 2.6.2 2.6.1 2009 PSF yes
+ 2.6.3 2.6.2 2009 PSF yes
+ 2.6.4 2.6.3 2009 PSF yes
+ 2.6.5 2.6.4 2010 PSF yes
+ 3.0 2.6 2008 PSF yes
+ 3.0.1 3.0 2009 PSF yes
+ 3.1 3.0.1 2009 PSF yes
+ 3.1.1 3.1 2009 PSF yes
+ 3.1.2 3.1 2010 PSF yes
+
+Footnotes:
+
+(1) GPL-compatible doesn't mean that we're distributing Python under
+ the GPL. All Python licenses, unlike the GPL, let you distribute
+ a modified version without making your changes open source. The
+ GPL-compatible licenses make it possible to combine Python with
+ other software that is released under the GPL; the others don't.
+
+(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
+ because its license has a choice of law clause. According to
+ CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
+ is "not incompatible" with the GPL.
+
+Thanks to the many outside volunteers who have worked under Guido's
+direction to make these releases possible.
+
+
+B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
+===============================================================
+
+PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
+--------------------------------------------
+
+1. This LICENSE AGREEMENT is between the Python Software Foundation
+("PSF"), and the Individual or Organization ("Licensee") accessing and
+otherwise using this software ("Python") in source or binary form and
+its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, PSF hereby
+grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
+analyze, test, perform and/or display publicly, prepare derivative works,
+distribute, and otherwise use Python alone or in any derivative version,
+provided, however, that PSF's License Agreement and PSF's notice of copyright,
+i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
+Python Software Foundation; All Rights Reserved" are retained in Python alone or
+in any derivative version prepared by Licensee.
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python.
+
+4. PSF is making Python available to Licensee on an "AS IS"
+basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. Nothing in this License Agreement shall be deemed to create any
+relationship of agency, partnership, or joint venture between PSF and
+Licensee. This License Agreement does not grant permission to use PSF
+trademarks or trade name in a trademark sense to endorse or promote
+products or services of Licensee, or any third party.
+
+8. By copying, installing or otherwise using Python, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
+
+BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
+-------------------------------------------
+
+BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
+
+1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
+office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
+Individual or Organization ("Licensee") accessing and otherwise using
+this software in source or binary form and its associated
+documentation ("the Software").
+
+2. Subject to the terms and conditions of this BeOpen Python License
+Agreement, BeOpen hereby grants Licensee a non-exclusive,
+royalty-free, world-wide license to reproduce, analyze, test, perform
+and/or display publicly, prepare derivative works, distribute, and
+otherwise use the Software alone or in any derivative version,
+provided, however, that the BeOpen Python License is retained in the
+Software, alone or in any derivative version prepared by Licensee.
+
+3. BeOpen is making the Software available to Licensee on an "AS IS"
+basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
+SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
+AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
+DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+5. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+6. This License Agreement shall be governed by and interpreted in all
+respects by the law of the State of California, excluding conflict of
+law provisions. Nothing in this License Agreement shall be deemed to
+create any relationship of agency, partnership, or joint venture
+between BeOpen and Licensee. This License Agreement does not grant
+permission to use BeOpen trademarks or trade names in a trademark
+sense to endorse or promote products or services of Licensee, or any
+third party. As an exception, the "BeOpen Python" logos available at
+http://www.pythonlabs.com/logos.html may be used according to the
+permissions granted on that web page.
+
+7. By copying, installing or otherwise using the software, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
+
+CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
+---------------------------------------
+
+1. This LICENSE AGREEMENT is between the Corporation for National
+Research Initiatives, having an office at 1895 Preston White Drive,
+Reston, VA 20191 ("CNRI"), and the Individual or Organization
+("Licensee") accessing and otherwise using Python 1.6.1 software in
+source or binary form and its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, CNRI
+hereby grants Licensee a nonexclusive, royalty-free, world-wide
+license to reproduce, analyze, test, perform and/or display publicly,
+prepare derivative works, distribute, and otherwise use Python 1.6.1
+alone or in any derivative version, provided, however, that CNRI's
+License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
+1995-2001 Corporation for National Research Initiatives; All Rights
+Reserved" are retained in Python 1.6.1 alone or in any derivative
+version prepared by Licensee. Alternately, in lieu of CNRI's License
+Agreement, Licensee may substitute the following text (omitting the
+quotes): "Python 1.6.1 is made available subject to the terms and
+conditions in CNRI's License Agreement. This Agreement together with
+Python 1.6.1 may be located on the Internet using the following
+unique, persistent identifier (known as a handle): 1895.22/1013. This
+Agreement may also be obtained from a proxy server on the Internet
+using the following URL: http://hdl.handle.net/1895.22/1013".
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python 1.6.1 or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python 1.6.1.
+
+4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
+basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. This License Agreement shall be governed by the federal
+intellectual property law of the United States, including without
+limitation the federal copyright law, and, to the extent such
+U.S. federal law does not apply, by the law of the Commonwealth of
+Virginia, excluding Virginia's conflict of law provisions.
+Notwithstanding the foregoing, with regard to derivative works based
+on Python 1.6.1 that incorporate non-separable material that was
+previously distributed under the GNU General Public License (GPL), the
+law of the Commonwealth of Virginia shall govern this License
+Agreement only as to issues arising under or with respect to
+Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this
+License Agreement shall be deemed to create any relationship of
+agency, partnership, or joint venture between CNRI and Licensee. This
+License Agreement does not grant permission to use CNRI trademarks or
+trade name in a trademark sense to endorse or promote products or
+services of Licensee, or any third party.
+
+8. By clicking on the "ACCEPT" button where indicated, or by copying,
+installing or otherwise using Python 1.6.1, Licensee agrees to be
+bound by the terms and conditions of this License Agreement.
+
+ ACCEPT
+
+
+CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
+--------------------------------------------------
+
+Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
+The Netherlands. All rights reserved.
+
+Permission to use, copy, modify, and distribute this software and its
+documentation for any purpose and without fee is hereby granted,
+provided that the above copyright notice appear in all copies and that
+both that copyright notice and this permission notice appear in
+supporting documentation, and that the name of Stichting Mathematisch
+Centrum or CWI not be used in advertising or publicity pertaining to
+distribution of the software without specific, written prior
+permission.
+
+STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
+THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
+FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
+OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/contrib/deprecated/python/subprocess32/README.md b/contrib/deprecated/python/subprocess32/README.md
new file mode 100644
index 0000000000..919e0929ed
--- /dev/null
+++ b/contrib/deprecated/python/subprocess32/README.md
@@ -0,0 +1,50 @@
+subprocess32
+------------
+[![PyPI version](https://badge.fury.io/py/subprocess32.svg)](https://badge.fury.io/py/subprocess32)
+[![POSIX Build Status](https://travis-ci.org/google/python-subprocess32.svg?branch=master)](https://travis-ci.org/google/python-subprocess32)
+[![Windows Build Status](https://ci.appveyor.com/api/projects/status/53apbb2jk1uslj0m?svg=true
+)](https://ci.appveyor.com/project/gpshead/python-subprocess32)
+
+This is a backport of the Python 3 subprocess module for use on Python 2.
+This code has not been tested on Windows or other non-POSIX platforms.
+
+subprocess32 includes many important reliability bug fixes relevant on
+POSIX platforms. The most important of which is a C extension module
+used internally to handle the code path between fork() and exec().
+This module is reliable when an application is using threads.
+
+Refer to the
+[Python 3.5 subprocess documentation](https://docs.python.org/3.5/library/subprocess.html)
+for usage information.
+
+* Timeout support backported from Python 3.3 is included.
+* The run() API from Python 3.5 was backported in subprocess32 3.5.0.
+* Otherwise features are frozen at the 3.2 level.
+
+Usage
+-----
+
+The recommend pattern for cross platform code is to use the following:
+
+```python
+if os.name == 'posix' and sys.version_info[0] < 3:
+ import subprocess32 as subprocess
+else:
+ import subprocess
+```
+
+Or if you fully control your POSIX Python 2.7 installation, this can serve
+as a replacement for its subprocess module. Users will thank you by not
+filing concurrency bugs.
+
+Got Bugs?
+---------
+
+Try to reproduce them on the latest Python 3.x itself and file bug
+reports on [bugs.python.org](https://bugs.python.org/).
+Add gregory.p.smith to the Nosy list.
+
+If you have reason to believe the issue is specifically with this backport
+and not a problem in Python 3 itself, use the github issue tracker.
+
+-- Gregory P. Smith _greg@krypto.org_
diff --git a/contrib/deprecated/python/subprocess32/_posixsubprocess.c b/contrib/deprecated/python/subprocess32/_posixsubprocess.c
new file mode 100644
index 0000000000..b6cb77ca23
--- /dev/null
+++ b/contrib/deprecated/python/subprocess32/_posixsubprocess.c
@@ -0,0 +1,927 @@
+/* Authors: Gregory P. Smith & Jeffrey Yasskin */
+
+/* We use our own small autoconf to fill in for things that were not checked
+ * for in Python 2's configure and thus pyconfig.h.
+ *
+ * This comes before Python.h on purpose. 2.7's Python.h redefines critical
+ * defines such as _POSIX_C_SOURCE with undesirable old values impacting system
+ * which header defines are available.
+ */
+#include "_posixsubprocess_config.h"
+#ifdef HAVE_SYS_CDEFS_H
+#include <sys/cdefs.h>
+#endif
+
+#define PY_SSIZE_T_CLEAN
+#include "Python.h"
+
+#include <unistd.h>
+#include <fcntl.h>
+#ifdef HAVE_SYS_TYPES_H
+#include <sys/types.h>
+#endif
+#if defined(HAVE_SYS_STAT_H) && defined(__FreeBSD__)
+#include <sys/stat.h>
+#endif
+#ifdef HAVE_SYS_SYSCALL_H
+#include <sys/syscall.h>
+#endif
+#ifdef HAVE_DIRENT_H
+#include <dirent.h>
+#endif
+
+/* TODO: Some platform conditions below could move into configure.ac. */
+
+#if defined(__ANDROID__) && !defined(SYS_getdents64)
+/* Android doesn't expose syscalls, add the definition manually. */
+# include <sys/linux-syscalls.h>
+# define SYS_getdents64 __NR_getdents64
+#endif
+
+#include "_posixsubprocess_helpers.c"
+
+#if (PY_VERSION_HEX < 0x02060300)
+/* These are not public API fuctions until 2.6.3. */
+static void _PyImport_AcquireLock(void);
+static int _PyImport_ReleaseLock(void);
+#endif
+
+#if defined(sun)
+/* readdir64 is used to work around Solaris 9 bug 6395699. */
+# define readdir readdir64
+# define dirent dirent64
+# if !defined(HAVE_DIRFD)
+/* Some versions of Solaris lack dirfd(). */
+# define dirfd(dirp) ((dirp)->dd_fd)
+# define HAVE_DIRFD
+# endif
+#endif
+
+#if defined(__FreeBSD__) || (defined(__APPLE__) && defined(__MACH__))
+# define FD_DIR "/dev/fd"
+#else
+# define FD_DIR "/proc/self/fd"
+#endif
+
+#define POSIX_CALL(call) if ((call) == -1) goto error
+
+
+/* Given the gc module call gc.enable() and return 0 on success. */
+static int
+_enable_gc(PyObject *gc_module)
+{
+ PyObject *result;
+ result = PyObject_CallMethod(gc_module, "enable", NULL);
+ if (result == NULL)
+ return 1;
+ Py_DECREF(result);
+ return 0;
+}
+
+
+/* Convert ASCII to a positive int, no libc call. no overflow. -1 on error. */
+static int
+_pos_int_from_ascii(char *name)
+{
+ int num = 0;
+ while (*name >= '0' && *name <= '9') {
+ num = num * 10 + (*name - '0');
+ ++name;
+ }
+ if (*name)
+ return -1; /* Non digit found, not a number. */
+ return num;
+}
+
+
+#if defined(__FreeBSD__)
+/* When /dev/fd isn't mounted it is often a static directory populated
+ * with 0 1 2 or entries for 0 .. 63 on FreeBSD, NetBSD and OpenBSD.
+ * NetBSD and OpenBSD have a /proc fs available (though not necessarily
+ * mounted) and do not have fdescfs for /dev/fd. MacOS X has a devfs
+ * that properly supports /dev/fd.
+ */
+static int
+_is_fdescfs_mounted_on_dev_fd()
+{
+ struct stat dev_stat;
+ struct stat dev_fd_stat;
+ if (stat("/dev", &dev_stat) != 0)
+ return 0;
+ if (stat(FD_DIR, &dev_fd_stat) != 0)
+ return 0;
+ if (dev_stat.st_dev == dev_fd_stat.st_dev)
+ return 0; /* / == /dev == /dev/fd means it is static. #fail */
+ return 1;
+}
+#endif
+
+
+/* Returns 1 if there is a problem with fd_sequence, 0 otherwise. */
+static int
+_sanity_check_python_fd_sequence(PyObject *fd_sequence)
+{
+ Py_ssize_t seq_idx, seq_len = PySequence_Length(fd_sequence);
+ long prev_fd = -1;
+ for (seq_idx = 0; seq_idx < seq_len; ++seq_idx) {
+ PyObject* py_fd = PySequence_Fast_GET_ITEM(fd_sequence, seq_idx);
+ long iter_fd = PyLong_AsLong(py_fd);
+ if (iter_fd < 0 || iter_fd <= prev_fd || iter_fd > INT_MAX) {
+ /* Negative, overflow, not a Long, unsorted, too big for a fd. */
+ return 1;
+ }
+ prev_fd = iter_fd;
+ }
+ return 0;
+}
+
+
+/* Is fd found in the sorted Python Sequence? */
+static int
+_is_fd_in_sorted_fd_sequence(int fd, PyObject *fd_sequence)
+{
+ /* Binary search. */
+ Py_ssize_t search_min = 0;
+ Py_ssize_t search_max = PySequence_Length(fd_sequence) - 1;
+ if (search_max < 0)
+ return 0;
+ do {
+ long middle = (search_min + search_max) / 2;
+ long middle_fd = PyLong_AsLong(
+ PySequence_Fast_GET_ITEM(fd_sequence, middle));
+ if (fd == middle_fd)
+ return 1;
+ if (fd > middle_fd)
+ search_min = middle + 1;
+ else
+ search_max = middle - 1;
+ } while (search_min <= search_max);
+ return 0;
+}
+
+
+/* Get the maximum file descriptor that could be opened by this process.
+ * This function is async signal safe for use between fork() and exec().
+ */
+static long
+safe_get_max_fd(void)
+{
+ long local_max_fd;
+#if defined(__NetBSD__)
+ local_max_fd = fcntl(0, F_MAXFD);
+ if (local_max_fd >= 0)
+ return local_max_fd;
+#endif
+#ifdef _SC_OPEN_MAX
+ local_max_fd = sysconf(_SC_OPEN_MAX);
+ if (local_max_fd == -1)
+#endif
+ local_max_fd = 256; /* Matches legacy Lib/subprocess.py behavior. */
+ return local_max_fd;
+}
+
+/* While uncommon in Python 2 applications, this makes sure the
+ * close on exec flag is unset on the subprocess32.Popen pass_fds.
+ * https://github.com/google/python-subprocess32/issues/4.
+ */
+static void
+_unset_cloexec_on_fds(PyObject *py_fds_to_keep, int errpipe_write)
+{
+#ifdef FD_CLOEXEC
+ Py_ssize_t num_fds_to_keep = PySequence_Length(py_fds_to_keep);
+ Py_ssize_t keep_seq_idx;
+ /* As py_fds_to_keep is sorted we can loop through the list closing
+ * fds inbetween any in the keep list falling within our range. */
+ for (keep_seq_idx = 0; keep_seq_idx < num_fds_to_keep; ++keep_seq_idx) {
+ PyObject* py_keep_fd = PySequence_Fast_GET_ITEM(py_fds_to_keep,
+ keep_seq_idx);
+ // We just keep going on errors below, there is nothing we can
+ // usefully do to report them. This is best effort.
+ long fd = PyLong_AsLong(py_keep_fd);
+ if (fd < 0) continue;
+ if (fd == errpipe_write) continue; // This one keeps its CLOEXEC.
+ // We could use ioctl FIONCLEX, but that is a more modern API
+ // not available everywhere and we are a single threaded child.
+ int old_flags = fcntl(fd, F_GETFD);
+ if (old_flags != -1) {
+ fcntl(fd, F_SETFD, old_flags & ~FD_CLOEXEC);
+ }
+ }
+#endif
+}
+
+/* Close all file descriptors in the range from start_fd and higher
+ * except for those in py_fds_to_keep. If the range defined by
+ * [start_fd, safe_get_max_fd()) is large this will take a long
+ * time as it calls close() on EVERY possible fd.
+ *
+ * It isn't possible to know for sure what the max fd to go up to
+ * is for processes with the capability of raising their maximum.
+ */
+static void
+_close_fds_by_brute_force(long start_fd, PyObject *py_fds_to_keep)
+{
+ long end_fd = safe_get_max_fd();
+ Py_ssize_t num_fds_to_keep = PySequence_Length(py_fds_to_keep);
+ Py_ssize_t keep_seq_idx;
+ int fd_num;
+ /* As py_fds_to_keep is sorted we can loop through the list closing
+ * fds inbetween any in the keep list falling within our range. */
+ for (keep_seq_idx = 0; keep_seq_idx < num_fds_to_keep; ++keep_seq_idx) {
+ PyObject* py_keep_fd = PySequence_Fast_GET_ITEM(py_fds_to_keep,
+ keep_seq_idx);
+ int keep_fd = PyLong_AsLong(py_keep_fd);
+ if (keep_fd < start_fd)
+ continue;
+ for (fd_num = start_fd; fd_num < keep_fd; ++fd_num) {
+ while (close(fd_num) < 0 && errno == EINTR);
+ }
+ start_fd = keep_fd + 1;
+ }
+ if (start_fd <= end_fd) {
+ for (fd_num = start_fd; fd_num < end_fd; ++fd_num) {
+ while (close(fd_num) < 0 && errno == EINTR);
+ }
+ }
+}
+
+
+#if defined(__linux__) && defined(HAVE_SYS_SYSCALL_H)
+/* It doesn't matter if d_name has room for NAME_MAX chars; we're using this
+ * only to read a directory of short file descriptor number names. The kernel
+ * will return an error if we didn't give it enough space. Highly Unlikely.
+ * This structure is very old and stable: It will not change unless the kernel
+ * chooses to break compatibility with all existing binaries. Highly Unlikely.
+ */
+struct linux_dirent64 {
+ unsigned long long d_ino;
+ long long d_off;
+ unsigned short d_reclen; /* Length of this linux_dirent */
+ unsigned char d_type;
+ char d_name[256]; /* Filename (null-terminated) */
+};
+
+/* Close all open file descriptors in the range from start_fd and higher
+ * Do not close any in the sorted py_fds_to_keep list.
+ *
+ * This version is async signal safe as it does not make any unsafe C library
+ * calls, malloc calls or handle any locks. It is _unfortunate_ to be forced
+ * to resort to making a kernel system call directly but this is the ONLY api
+ * available that does no harm. opendir/readdir/closedir perform memory
+ * allocation and locking so while they usually work they are not guaranteed
+ * to (especially if you have replaced your malloc implementation). A version
+ * of this function that uses those can be found in the _maybe_unsafe variant.
+ *
+ * This is Linux specific because that is all I am ready to test it on. It
+ * should be easy to add OS specific dirent or dirent64 structures and modify
+ * it with some cpp #define magic to work on other OSes as well if you want.
+ */
+static void
+_close_open_fds_safe(int start_fd, PyObject* py_fds_to_keep)
+{
+ int fd_dir_fd;
+#ifdef O_CLOEXEC
+ fd_dir_fd = open(FD_DIR, O_RDONLY | O_CLOEXEC, 0);
+#else
+ fd_dir_fd = open(FD_DIR, O_RDONLY, 0);
+#ifdef FD_CLOEXEC
+ {
+ int old = fcntl(fd_dir_fd, F_GETFD);
+ if (old != -1)
+ fcntl(fd_dir_fd, F_SETFD, old | FD_CLOEXEC);
+ }
+#endif
+#endif
+ if (fd_dir_fd == -1) {
+ /* No way to get a list of open fds. */
+ _close_fds_by_brute_force(start_fd, py_fds_to_keep);
+ return;
+ } else {
+ char buffer[sizeof(struct linux_dirent64)] = {0};
+ int bytes;
+ while ((bytes = syscall(SYS_getdents64, fd_dir_fd,
+ (struct linux_dirent64 *)buffer,
+ sizeof(buffer))) > 0) {
+ struct linux_dirent64 *entry;
+ int offset;
+ for (offset = 0; offset < bytes; offset += entry->d_reclen) {
+ int fd;
+ entry = (struct linux_dirent64 *)(buffer + offset);
+ if ((fd = _pos_int_from_ascii(entry->d_name)) < 0)
+ continue; /* Not a number. */
+ if (fd != fd_dir_fd && fd >= start_fd &&
+ !_is_fd_in_sorted_fd_sequence(fd, py_fds_to_keep)) {
+ while (close(fd) < 0 && errno == EINTR);
+ }
+ }
+ }
+ while (close(fd_dir_fd) < 0 && errno == EINTR);
+ }
+}
+
+#define _close_open_fds _close_open_fds_safe
+
+#else /* NOT (defined(__linux__) && defined(HAVE_SYS_SYSCALL_H)) */
+
+
+/* Close all open file descriptors from start_fd and higher.
+ * Do not close any in the sorted py_fds_to_keep list.
+ *
+ * This function violates the strict use of async signal safe functions. :(
+ * It calls opendir(), readdir() and closedir(). Of these, the one most
+ * likely to ever cause a problem is opendir() as it performs an internal
+ * malloc(). Practically this should not be a problem. The Java VM makes the
+ * same calls between fork and exec in its own UNIXProcess_md.c implementation.
+ *
+ * readdir_r() is not used because it provides no benefit. It is typically
+ * implemented as readdir() followed by memcpy(). See also:
+ * http://womble.decadent.org.uk/readdir_r-advisory.html
+ */
+static void
+_close_open_fds_maybe_unsafe(long start_fd, PyObject* py_fds_to_keep)
+{
+ DIR *proc_fd_dir;
+#ifndef HAVE_DIRFD
+ while (_is_fd_in_sorted_fd_sequence(start_fd, py_fds_to_keep)) {
+ ++start_fd;
+ }
+ /* Close our lowest fd before we call opendir so that it is likely to
+ * reuse that fd otherwise we might close opendir's file descriptor in
+ * our loop. This trick assumes that fd's are allocated on a lowest
+ * available basis. */
+ while (close(start_fd) < 0 && errno == EINTR);
+ ++start_fd;
+#endif
+
+#if defined(__FreeBSD__)
+ if (!_is_fdescfs_mounted_on_dev_fd())
+ proc_fd_dir = NULL;
+ else
+#endif
+ proc_fd_dir = opendir(FD_DIR);
+ if (!proc_fd_dir) {
+ /* No way to get a list of open fds. */
+ _close_fds_by_brute_force(start_fd, py_fds_to_keep);
+ } else {
+ struct dirent *dir_entry;
+#ifdef HAVE_DIRFD
+ int fd_used_by_opendir = dirfd(proc_fd_dir);
+#else
+ int fd_used_by_opendir = start_fd - 1;
+#endif
+ errno = 0;
+ while ((dir_entry = readdir(proc_fd_dir))) {
+ int fd;
+ if ((fd = _pos_int_from_ascii(dir_entry->d_name)) < 0)
+ continue; /* Not a number. */
+ if (fd != fd_used_by_opendir && fd >= start_fd &&
+ !_is_fd_in_sorted_fd_sequence(fd, py_fds_to_keep)) {
+ while (close(fd) < 0 && errno == EINTR);
+ }
+ errno = 0;
+ }
+ if (errno) {
+ /* readdir error, revert behavior. Highly Unlikely. */
+ _close_fds_by_brute_force(start_fd, py_fds_to_keep);
+ }
+ closedir(proc_fd_dir);
+ }
+}
+
+#define _close_open_fds _close_open_fds_maybe_unsafe
+
+#endif /* else NOT (defined(__linux__) && defined(HAVE_SYS_SYSCALL_H)) */
+
+
+/*
+ * This function is code executed in the child process immediately after fork
+ * to set things up and call exec().
+ *
+ * All of the code in this function must only use async-signal-safe functions,
+ * listed at `man 7 signal` or
+ * http://www.opengroup.org/onlinepubs/009695399/functions/xsh_chap02_04.html.
+ *
+ * This restriction is documented at
+ * http://www.opengroup.org/onlinepubs/009695399/functions/fork.html.
+ */
+static void
+child_exec(char *const exec_array[],
+ char *const argv[],
+ char *const envp[],
+ const char *cwd,
+ int p2cread, int p2cwrite,
+ int c2pread, int c2pwrite,
+ int errread, int errwrite,
+ int errpipe_read, int errpipe_write,
+ int close_fds, int restore_signals,
+ int call_setsid,
+ PyObject *py_fds_to_keep,
+ PyObject *preexec_fn,
+ PyObject *preexec_fn_args_tuple)
+{
+ int i, saved_errno, unused, reached_preexec = 0;
+ PyObject *result;
+ const char* err_msg = "";
+ /* Buffer large enough to hold a hex integer. We can't malloc. */
+ char hex_errno[sizeof(saved_errno)*2+1];
+
+ /* Close parent's pipe ends. */
+ if (p2cwrite != -1) {
+ POSIX_CALL(close(p2cwrite));
+ }
+ if (c2pread != -1) {
+ POSIX_CALL(close(c2pread));
+ }
+ if (errread != -1) {
+ POSIX_CALL(close(errread));
+ }
+ POSIX_CALL(close(errpipe_read));
+
+ /* When duping fds, if there arises a situation where one of the fds is
+ either 0, 1 or 2, it is possible that it is overwritten (#12607). */
+ if (c2pwrite == 0)
+ POSIX_CALL(c2pwrite = dup(c2pwrite));
+ if (errwrite == 0 || errwrite == 1)
+ POSIX_CALL(errwrite = dup(errwrite));
+
+ /* Dup fds for child.
+ dup2() removes the CLOEXEC flag but we must do it ourselves if dup2()
+ would be a no-op (issue #10806). */
+ if (p2cread == 0) {
+ int old = fcntl(p2cread, F_GETFD);
+ if (old != -1)
+ fcntl(p2cread, F_SETFD, old & ~FD_CLOEXEC);
+ } else if (p2cread != -1) {
+ POSIX_CALL(dup2(p2cread, 0)); /* stdin */
+ }
+ if (c2pwrite == 1) {
+ int old = fcntl(c2pwrite, F_GETFD);
+ if (old != -1)
+ fcntl(c2pwrite, F_SETFD, old & ~FD_CLOEXEC);
+ } else if (c2pwrite != -1) {
+ POSIX_CALL(dup2(c2pwrite, 1)); /* stdout */
+ }
+ if (errwrite == 2) {
+ int old = fcntl(errwrite, F_GETFD);
+ if (old != -1)
+ fcntl(errwrite, F_SETFD, old & ~FD_CLOEXEC);
+ } else if (errwrite != -1) {
+ POSIX_CALL(dup2(errwrite, 2)); /* stderr */
+ }
+
+ /* Close pipe fds. Make sure we don't close the same fd more than */
+ /* once, or standard fds. */
+ if (p2cread > 2) {
+ POSIX_CALL(close(p2cread));
+ }
+ if (c2pwrite > 2 && c2pwrite != p2cread) {
+ POSIX_CALL(close(c2pwrite));
+ }
+ if (errwrite != c2pwrite && errwrite != p2cread && errwrite > 2) {
+ POSIX_CALL(close(errwrite));
+ }
+
+ if (cwd)
+ POSIX_CALL(chdir(cwd));
+
+ if (restore_signals)
+ _Py_RestoreSignals();
+
+#ifdef HAVE_SETSID
+ if (call_setsid)
+ POSIX_CALL(setsid());
+#endif
+
+ reached_preexec = 1;
+ if (preexec_fn != Py_None && preexec_fn_args_tuple) {
+ /* This is where the user has asked us to deadlock their program. */
+ result = PyObject_Call(preexec_fn, preexec_fn_args_tuple, NULL);
+ if (result == NULL) {
+ /* Stringifying the exception or traceback would involve
+ * memory allocation and thus potential for deadlock.
+ * We've already faced potential deadlock by calling back
+ * into Python in the first place, so it probably doesn't
+ * matter but we avoid it to minimize the possibility. */
+ err_msg = "Exception occurred in preexec_fn.";
+ errno = 0; /* We don't want to report an OSError. */
+ goto error;
+ }
+ /* Py_DECREF(result); - We're about to exec so why bother? */
+ }
+
+ _unset_cloexec_on_fds(py_fds_to_keep, errpipe_write);
+ if (close_fds) {
+ /* TODO HP-UX could use pstat_getproc() if anyone cares about it. */
+ _close_open_fds(3, py_fds_to_keep);
+ }
+
+ /* This loop matches the Lib/os.py _execvpe()'s PATH search when */
+ /* given the executable_list generated by Lib/subprocess.py. */
+ saved_errno = 0;
+ for (i = 0; exec_array[i] != NULL; ++i) {
+ const char *executable = exec_array[i];
+ if (envp) {
+ execve(executable, argv, envp);
+ } else {
+ execv(executable, argv);
+ }
+ if (errno != ENOENT && errno != ENOTDIR && saved_errno == 0) {
+ saved_errno = errno;
+ }
+ }
+ /* Report the first exec error, not the last. */
+ if (saved_errno)
+ errno = saved_errno;
+
+error:
+ saved_errno = errno;
+ /* Report the posix error to our parent process. */
+ /* We ignore all write() return values as the total size of our writes is
+ * less than PIPEBUF and we cannot do anything about an error anyways. */
+ if (saved_errno) {
+ char *cur;
+ unused = write(errpipe_write, "OSError:", 8);
+ cur = hex_errno + sizeof(hex_errno);
+ while (saved_errno != 0 && cur > hex_errno) {
+ *--cur = "0123456789ABCDEF"[saved_errno % 16];
+ saved_errno /= 16;
+ }
+ unused = write(errpipe_write, cur, hex_errno + sizeof(hex_errno) - cur);
+ unused = write(errpipe_write, ":", 1);
+ if (!reached_preexec) {
+ /* Indicate to the parent that the error happened before exec(). */
+ unused = write(errpipe_write, "noexec", 6);
+ }
+ /* We can't call strerror(saved_errno). It is not async signal safe.
+ * The parent process will look the error message up. */
+ } else {
+ unused = write(errpipe_write, "RuntimeError:0:", 15);
+ unused = write(errpipe_write, err_msg, strlen(err_msg));
+ }
+ if (unused) return; /* silly? yes! avoids gcc compiler warning. */
+}
+
+
+static PyObject *
+subprocess_fork_exec(PyObject* self, PyObject *args)
+{
+ PyObject *gc_module = NULL;
+ PyObject *executable_list, *py_close_fds, *py_fds_to_keep;
+ PyObject *env_list, *preexec_fn;
+ PyObject *process_args, *converted_args = NULL, *fast_args = NULL;
+ PyObject *preexec_fn_args_tuple = NULL;
+ int p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite;
+ int errpipe_read, errpipe_write, close_fds, restore_signals;
+ int call_setsid;
+ PyObject *cwd_obj, *cwd_obj2;
+ const char *cwd;
+ pid_t pid;
+ int need_to_reenable_gc = 0;
+ char *const *exec_array, *const *argv = NULL, *const *envp = NULL;
+ Py_ssize_t arg_num;
+
+ if (!PyArg_ParseTuple(
+ args, "OOOOOOiiiiiiiiiiO:fork_exec",
+ &process_args, &executable_list, &py_close_fds, &py_fds_to_keep,
+ &cwd_obj, &env_list,
+ &p2cread, &p2cwrite, &c2pread, &c2pwrite,
+ &errread, &errwrite, &errpipe_read, &errpipe_write,
+ &restore_signals, &call_setsid, &preexec_fn))
+ return NULL;
+
+ close_fds = PyObject_IsTrue(py_close_fds);
+ if (close_fds < 0)
+ return NULL;
+ if (close_fds && errpipe_write < 3) { /* precondition */
+ PyErr_SetString(PyExc_ValueError, "errpipe_write must be >= 3");
+ return NULL;
+ }
+ if (PySequence_Length(py_fds_to_keep) < 0) {
+ PyErr_SetString(PyExc_ValueError, "cannot get length of fds_to_keep");
+ return NULL;
+ }
+ if (_sanity_check_python_fd_sequence(py_fds_to_keep)) {
+ PyErr_SetString(PyExc_ValueError, "bad value(s) in fds_to_keep");
+ return NULL;
+ }
+
+ /* We need to call gc.disable() when we'll be calling preexec_fn */
+ if (preexec_fn != Py_None) {
+ PyObject *result;
+ gc_module = PyImport_ImportModule("gc");
+ if (gc_module == NULL)
+ return NULL;
+ result = PyObject_CallMethod(gc_module, "isenabled", NULL);
+ if (result == NULL) {
+ Py_DECREF(gc_module);
+ return NULL;
+ }
+ need_to_reenable_gc = PyObject_IsTrue(result);
+ Py_DECREF(result);
+ if (need_to_reenable_gc == -1) {
+ Py_DECREF(gc_module);
+ return NULL;
+ }
+ result = PyObject_CallMethod(gc_module, "disable", NULL);
+ if (result == NULL) {
+ Py_DECREF(gc_module);
+ return NULL;
+ }
+ Py_DECREF(result);
+ }
+
+ exec_array = _PySequence_BytesToCharpArray(executable_list);
+ if (!exec_array) {
+ Py_XDECREF(gc_module);
+ return NULL;
+ }
+
+ /* Convert args and env into appropriate arguments for exec() */
+ /* These conversions are done in the parent process to avoid allocating
+ or freeing memory in the child process. */
+ if (process_args != Py_None) {
+ Py_ssize_t num_args;
+ /* Equivalent to: */
+ /* tuple(PyUnicode_FSConverter(arg) for arg in process_args) */
+ fast_args = PySequence_Fast(process_args, "argv must be a tuple");
+ if (fast_args == NULL)
+ goto cleanup;
+ num_args = PySequence_Fast_GET_SIZE(fast_args);
+ converted_args = PyTuple_New(num_args);
+ if (converted_args == NULL)
+ goto cleanup;
+ for (arg_num = 0; arg_num < num_args; ++arg_num) {
+ PyObject *borrowed_arg, *converted_arg;
+ borrowed_arg = PySequence_Fast_GET_ITEM(fast_args, arg_num);
+ if (PyUnicode_FSConverter(borrowed_arg, &converted_arg) == 0)
+ goto cleanup;
+ PyTuple_SET_ITEM(converted_args, arg_num, converted_arg);
+ }
+
+ argv = _PySequence_BytesToCharpArray(converted_args);
+ Py_CLEAR(converted_args);
+ Py_CLEAR(fast_args);
+ if (!argv)
+ goto cleanup;
+ }
+
+ if (env_list != Py_None) {
+ envp = _PySequence_BytesToCharpArray(env_list);
+ if (!envp)
+ goto cleanup;
+ }
+
+ if (preexec_fn != Py_None) {
+ preexec_fn_args_tuple = PyTuple_New(0);
+ if (!preexec_fn_args_tuple)
+ goto cleanup;
+ _PyImport_AcquireLock();
+ }
+
+ if (cwd_obj != Py_None) {
+ if (PyUnicode_FSConverter(cwd_obj, &cwd_obj2) == 0)
+ goto cleanup;
+ cwd = PyString_AsString(cwd_obj2);
+ } else {
+ cwd = NULL;
+ cwd_obj2 = NULL;
+ }
+
+ pid = fork();
+ if (pid == 0) {
+ /* Child process */
+ /*
+ * Code from here to _exit() must only use async-signal-safe functions,
+ * listed at `man 7 signal` or
+ * http://www.opengroup.org/onlinepubs/009695399/functions/xsh_chap02_04.html.
+ */
+
+ if (preexec_fn != Py_None) {
+ /* We'll be calling back into Python later so we need to do this.
+ * This call may not be async-signal-safe but neither is calling
+ * back into Python. The user asked us to use hope as a strategy
+ * to avoid deadlock... */
+ PyOS_AfterFork();
+ }
+
+ child_exec(exec_array, argv, envp, cwd,
+ p2cread, p2cwrite, c2pread, c2pwrite,
+ errread, errwrite, errpipe_read, errpipe_write,
+ close_fds, restore_signals, call_setsid,
+ py_fds_to_keep, preexec_fn, preexec_fn_args_tuple);
+ _exit(255);
+ return NULL; /* Dead code to avoid a potential compiler warning. */
+ }
+ Py_XDECREF(cwd_obj2);
+
+ if (pid == -1) {
+ /* Capture the errno exception before errno can be clobbered. */
+ PyErr_SetFromErrno(PyExc_OSError);
+ }
+ if (preexec_fn != Py_None &&
+ _PyImport_ReleaseLock() < 0 && !PyErr_Occurred()) {
+ PyErr_SetString(PyExc_RuntimeError,
+ "not holding the import lock");
+ }
+
+ /* Parent process */
+ if (envp)
+ _Py_FreeCharPArray(envp);
+ if (argv)
+ _Py_FreeCharPArray(argv);
+ _Py_FreeCharPArray(exec_array);
+
+ /* Reenable gc in the parent process (or if fork failed). */
+ if (need_to_reenable_gc && _enable_gc(gc_module)) {
+ Py_XDECREF(gc_module);
+ return NULL;
+ }
+ Py_XDECREF(preexec_fn_args_tuple);
+ Py_XDECREF(gc_module);
+
+ if (pid == -1)
+ return NULL; /* fork() failed. Exception set earlier. */
+
+ return PyLong_FromPid(pid);
+
+cleanup:
+ if (envp)
+ _Py_FreeCharPArray(envp);
+ if (argv)
+ _Py_FreeCharPArray(argv);
+ _Py_FreeCharPArray(exec_array);
+ Py_XDECREF(converted_args);
+ Py_XDECREF(fast_args);
+ Py_XDECREF(preexec_fn_args_tuple);
+
+ /* Reenable gc if it was disabled. */
+ if (need_to_reenable_gc)
+ _enable_gc(gc_module);
+ Py_XDECREF(gc_module);
+ return NULL;
+}
+
+
+PyDoc_STRVAR(subprocess_fork_exec_doc,
+"fork_exec(args, executable_list, close_fds, cwd, env,\n\
+ p2cread, p2cwrite, c2pread, c2pwrite,\n\
+ errread, errwrite, errpipe_read, errpipe_write,\n\
+ restore_signals, call_setsid, preexec_fn)\n\
+\n\
+Forks a child process, closes parent file descriptors as appropriate in the\n\
+child and dups the few that are needed before calling exec() in the child\n\
+process.\n\
+\n\
+The preexec_fn, if supplied, will be called immediately before exec.\n\
+WARNING: preexec_fn is NOT SAFE if your application uses threads.\n\
+ It may trigger infrequent, difficult to debug deadlocks.\n\
+\n\
+If an error occurs in the child process before the exec, it is\n\
+serialized and written to the errpipe_write fd per subprocess.py.\n\
+\n\
+Returns: the child process's PID.\n\
+\n\
+Raises: Only on an error in the parent process.\n\
+");
+
+PyDoc_STRVAR(subprocess_cloexec_pipe_doc,
+"cloexec_pipe() -> (read_end, write_end)\n\n\
+Create a pipe whose ends have the cloexec flag set; write_end will be >= 3.");
+
+static PyObject *
+subprocess_cloexec_pipe(PyObject *self, PyObject *noargs)
+{
+ int fds[2];
+ int res, saved_errno;
+ long oldflags;
+#if (defined(HAVE_PIPE2) && defined(O_CLOEXEC))
+ Py_BEGIN_ALLOW_THREADS
+ res = pipe2(fds, O_CLOEXEC);
+ Py_END_ALLOW_THREADS
+ if (res != 0 && errno == ENOSYS)
+ {
+#endif
+ /* We hold the GIL which offers some protection from other code calling
+ * fork() before the CLOEXEC flags have been set but we can't guarantee
+ * anything without pipe2(). */
+ res = pipe(fds);
+
+ if (res == 0) {
+ oldflags = fcntl(fds[0], F_GETFD, 0);
+ if (oldflags < 0) res = oldflags;
+ }
+ if (res == 0)
+ res = fcntl(fds[0], F_SETFD, oldflags | FD_CLOEXEC);
+
+ if (res == 0) {
+ oldflags = fcntl(fds[1], F_GETFD, 0);
+ if (oldflags < 0) res = oldflags;
+ }
+ if (res == 0)
+ res = fcntl(fds[1], F_SETFD, oldflags | FD_CLOEXEC);
+#if (defined(HAVE_PIPE2) && defined(O_CLOEXEC))
+ }
+#endif
+ if (res == 0 && fds[1] < 3) {
+ /* We always want the write end of the pipe to avoid fds 0, 1 and 2
+ * as our child may claim those for stdio connections. */
+ int write_fd = fds[1];
+ int fds_to_close[3] = {-1, -1, -1};
+ int fds_to_close_idx = 0;
+#ifdef F_DUPFD_CLOEXEC
+ fds_to_close[fds_to_close_idx++] = write_fd;
+ write_fd = fcntl(write_fd, F_DUPFD_CLOEXEC, 3);
+ if (write_fd < 0) /* We don't support F_DUPFD_CLOEXEC / other error */
+#endif
+ {
+ /* Use dup a few times until we get a desirable fd. */
+ for (; fds_to_close_idx < 3; ++fds_to_close_idx) {
+ fds_to_close[fds_to_close_idx] = write_fd;
+ write_fd = dup(write_fd);
+ if (write_fd >= 3)
+ break;
+ /* We may dup a few extra times if it returns an error but
+ * that is okay. Repeat calls should return the same error. */
+ }
+ if (write_fd < 0) res = write_fd;
+ if (res == 0) {
+ oldflags = fcntl(write_fd, F_GETFD, 0);
+ if (oldflags < 0) res = oldflags;
+ if (res == 0)
+ res = fcntl(write_fd, F_SETFD, oldflags | FD_CLOEXEC);
+ }
+ }
+ saved_errno = errno;
+ /* Close fds we tried for the write end that were too low. */
+ for (fds_to_close_idx=0; fds_to_close_idx < 3; ++fds_to_close_idx) {
+ int temp_fd = fds_to_close[fds_to_close_idx];
+ while (temp_fd >= 0 && close(temp_fd) < 0 && errno == EINTR);
+ }
+ errno = saved_errno; /* report dup or fcntl errors, not close. */
+ fds[1] = write_fd;
+ } /* end if write fd was too small */
+
+ if (res != 0)
+ return PyErr_SetFromErrno(PyExc_OSError);
+ return Py_BuildValue("(ii)", fds[0], fds[1]);
+}
+
+/* module level code ********************************************************/
+
+#define MIN_PY_VERSION_WITH_PYIMPORT_ACQUIRELOCK 0x02060300
+#if (PY_VERSION_HEX < MIN_PY_VERSION_WITH_PYIMPORT_ACQUIRELOCK)
+static PyObject* imp_module;
+
+static void
+_PyImport_AcquireLock(void)
+{
+ PyObject *result;
+ result = PyObject_CallMethod(imp_module, "acquire_lock", NULL);
+ if (result == NULL) {
+ fprintf(stderr, "imp.acquire_lock() failed.\n");
+ return;
+ }
+ Py_DECREF(result);
+}
+
+static int
+_PyImport_ReleaseLock(void)
+{
+ PyObject *result;
+ result = PyObject_CallMethod(imp_module, "release_lock", NULL);
+ if (result == NULL) {
+ fprintf(stderr, "imp.release_lock() failed.\n");
+ return -1;
+ }
+ Py_DECREF(result);
+ return 0;
+}
+#endif /* Python <= 2.5 */
+
+
+PyDoc_STRVAR(module_doc,
+"A POSIX helper for the subprocess module.");
+
+
+static PyMethodDef module_methods[] = {
+ {"fork_exec", subprocess_fork_exec, METH_VARARGS, subprocess_fork_exec_doc},
+ {"cloexec_pipe", subprocess_cloexec_pipe, METH_NOARGS, subprocess_cloexec_pipe_doc},
+ {NULL, NULL} /* sentinel */
+};
+
+
+PyMODINIT_FUNC
+init_posixsubprocess32(void)
+{
+ PyObject *m;
+
+#if (PY_VERSION_HEX < MIN_PY_VERSION_WITH_PYIMPORT_ACQUIRELOCK)
+ imp_module = PyImport_ImportModule("imp");
+ if (imp_module == NULL)
+ return;
+#endif
+
+ m = Py_InitModule3("_posixsubprocess32", module_methods, module_doc);
+ if (m == NULL)
+ return;
+}
diff --git a/contrib/deprecated/python/subprocess32/_posixsubprocess_config.h b/contrib/deprecated/python/subprocess32/_posixsubprocess_config.h
new file mode 100644
index 0000000000..0e13698f99
--- /dev/null
+++ b/contrib/deprecated/python/subprocess32/_posixsubprocess_config.h
@@ -0,0 +1,115 @@
+/* _posixsubprocess_config.h. Generated from _posixsubprocess_config.h.in by configure. */
+/* _posixsubprocess_config.h.in. Generated from configure.ac by autoheader. */
+
+/* Define to 1 if you have the <dirent.h> header file, and it defines `DIR'.
+ */
+#define HAVE_DIRENT_H 1
+
+/* Define if you have the 'dirfd' function or macro. */
+#define HAVE_DIRFD 1
+
+/* Define to 1 if you have the <fcntl.h> header file. */
+#define HAVE_FCNTL_H 1
+
+/* Define to 1 if you have the <inttypes.h> header file. */
+#define HAVE_INTTYPES_H 1
+
+/* Define to 1 if you have the <memory.h> header file. */
+#define HAVE_MEMORY_H 1
+
+/* Define to 1 if you have the <ndir.h> header file, and it defines `DIR'. */
+/* #undef HAVE_NDIR_H */
+
+/* Define to 1 if you have the `pipe2' function. */
+#if defined(__linux__)
+#define HAVE_PIPE2 1
+#endif
+
+/* Define to 1 if you have the `setsid' function. */
+#define HAVE_SETSID 1
+
+/* Define to 1 if you have the <signal.h> header file. */
+#define HAVE_SIGNAL_H 1
+
+/* Define to 1 if you have the <stdint.h> header file. */
+#define HAVE_STDINT_H 1
+
+/* Define to 1 if you have the <stdlib.h> header file. */
+#define HAVE_STDLIB_H 1
+
+/* Define to 1 if you have the <strings.h> header file. */
+#define HAVE_STRINGS_H 1
+
+/* Define to 1 if you have the <string.h> header file. */
+#define HAVE_STRING_H 1
+
+/* Define to 1 if you have the <sys/cdefs.h> header file. */
+#define HAVE_SYS_CDEFS_H 1
+
+/* Define to 1 if you have the <sys/dir.h> header file, and it defines `DIR'.
+ */
+/* #undef HAVE_SYS_DIR_H */
+
+/* Define to 1 if you have the <sys/ndir.h> header file, and it defines `DIR'.
+ */
+/* #undef HAVE_SYS_NDIR_H */
+
+/* Define to 1 if you have the <sys/stat.h> header file. */
+#define HAVE_SYS_STAT_H 1
+
+/* Define to 1 if you have the <sys/syscall.h> header file. */
+#define HAVE_SYS_SYSCALL_H 1
+
+/* Define to 1 if you have the <sys/types.h> header file. */
+#define HAVE_SYS_TYPES_H 1
+
+/* Define to 1 if you have the <unistd.h> header file. */
+#define HAVE_UNISTD_H 1
+
+/* Define to the address where bug reports for this package should be sent. */
+#define PACKAGE_BUGREPORT "https://github.com/google/python-subprocess32/"
+
+/* Define to the full name of this package. */
+#define PACKAGE_NAME "_posixsubprocess32"
+
+/* Define to the full name and version of this package. */
+#define PACKAGE_STRING "_posixsubprocess32 3.5"
+
+/* Define to the one symbol short name of this package. */
+#define PACKAGE_TARNAME "_posixsubprocess32"
+
+/* Define to the home page for this package. */
+#define PACKAGE_URL ""
+
+/* Define to the version of this package. */
+#define PACKAGE_VERSION "3.5"
+
+/* Define to 1 if you have the ANSI C header files. */
+#define STDC_HEADERS 1
+
+/* Define on OpenBSD to activate all library features */
+/* #undef _BSD_SOURCE */
+
+/* Define on Irix to enable u_int */
+#define _BSD_TYPES 1
+
+/* Define on Darwin to activate all library features */
+#define _DARWIN_C_SOURCE 1
+
+/* Define on Linux to activate all library features */
+#define _GNU_SOURCE 1
+
+/* Define on NetBSD to activate all library features */
+#define _NETBSD_SOURCE 1
+
+/* Define to activate features from IEEE Stds 1003.1-2008 */
+#define _POSIX_C_SOURCE 200809L
+
+/* Define to the level of X/Open that your system supports */
+#define _XOPEN_SOURCE 700
+
+/* Define to activate Unix95-and-earlier features */
+#define _XOPEN_SOURCE_EXTENDED 1
+
+/* Define on FreeBSD to activate all library features */
+#define __BSD_VISIBLE 1
diff --git a/contrib/deprecated/python/subprocess32/_posixsubprocess_helpers.c b/contrib/deprecated/python/subprocess32/_posixsubprocess_helpers.c
new file mode 100644
index 0000000000..73f6b6c05a
--- /dev/null
+++ b/contrib/deprecated/python/subprocess32/_posixsubprocess_helpers.c
@@ -0,0 +1,174 @@
+/* Functions and macros from Python 3.2 not found in 2.x.
+ This file is #included by _posixsubprocess.c and the functions
+ are declared static to avoid exposing them outside this module. */
+
+/* _posixsubprocess_config.h was already included by _posixsubprocess.c
+ * which is #include'ing us despite the .c name. HAVE_SIGNAL_H comes
+ * from there. Yes, confusing! */
+#ifdef HAVE_SIGNAL_H
+#include <signal.h>
+#endif
+#include "unicodeobject.h"
+
+#if (PY_VERSION_HEX < 0x02050000)
+#define Py_ssize_t int
+#endif
+
+#define Py_CLEANUP_SUPPORTED 0x20000
+
+/* Issue #1983: pid_t can be longer than a C long on some systems */
+#if !defined(SIZEOF_PID_T) || SIZEOF_PID_T == SIZEOF_INT
+#define PyLong_FromPid PyLong_FromLong
+#elif SIZEOF_PID_T == SIZEOF_LONG
+#define PyLong_FromPid PyLong_FromLong
+#elif defined(SIZEOF_LONG_LONG) && SIZEOF_PID_T == SIZEOF_LONG_LONG
+#define PyLong_FromPid PyLong_FromLongLong
+#else
+#error "sizeof(pid_t) is neither sizeof(int), sizeof(long) or sizeof(long long)"
+#endif /* SIZEOF_PID_T */
+
+
+static PyObject *PyUnicode_EncodeFSDefault(PyObject *unicode)
+{
+ if (Py_FileSystemDefaultEncoding)
+ return PyUnicode_AsEncodedString(unicode,
+ Py_FileSystemDefaultEncoding,
+ "strict");
+ else
+ return PyUnicode_EncodeUTF8(PyUnicode_AS_UNICODE(unicode),
+ PyUnicode_GET_SIZE(unicode),
+ "strict");
+}
+
+
+/* Convert the argument to a bytes object, according to the file
+ system encoding. The addr param must be a PyObject**.
+ This is designed to be used with "O&" in PyArg_Parse APIs. */
+
+static int
+PyUnicode_FSConverter(PyObject* arg, void* addr)
+{
+ PyObject *output = NULL;
+ Py_ssize_t size;
+ void *data;
+ if (arg == NULL) {
+ Py_DECREF(*(PyObject**)addr);
+ return 1;
+ }
+ if (PyString_Check(arg)) {
+ output = arg;
+ Py_INCREF(output);
+ }
+ else {
+ arg = PyUnicode_FromObject(arg);
+ if (!arg)
+ return 0;
+ output = PyUnicode_EncodeFSDefault(arg);
+ Py_DECREF(arg);
+ if (!output)
+ return 0;
+ if (!PyString_Check(output)) {
+ Py_DECREF(output);
+ PyErr_SetString(PyExc_TypeError, "encoder failed to return bytes");
+ return 0;
+ }
+ }
+ size = PyString_GET_SIZE(output);
+ data = PyString_AS_STRING(output);
+ if (size != strlen(data)) {
+ PyErr_SetString(PyExc_TypeError, "embedded NUL character");
+ Py_DECREF(output);
+ return 0;
+ }
+ *(PyObject**)addr = output;
+ return Py_CLEANUP_SUPPORTED;
+}
+
+
+/* Free's a NULL terminated char** array of C strings. */
+static void
+_Py_FreeCharPArray(char *const array[])
+{
+ Py_ssize_t i;
+ for (i = 0; array[i] != NULL; ++i) {
+ free(array[i]);
+ }
+ free((void*)array);
+}
+
+
+/*
+ * Flatten a sequence of bytes() objects into a C array of
+ * NULL terminated string pointers with a NULL char* terminating the array.
+ * (ie: an argv or env list)
+ *
+ * Memory allocated for the returned list is allocated using malloc() and MUST
+ * be freed by the caller using a free() loop or _Py_FreeCharPArray().
+ */
+static char *const *
+_PySequence_BytesToCharpArray(PyObject* self)
+{
+ char **array;
+ Py_ssize_t i, argc;
+ PyObject *item = NULL;
+
+ argc = PySequence_Size(self);
+ if (argc == -1)
+ return NULL;
+ /* Avoid 32-bit overflows to malloc() from unreasonable values. */
+ if (argc > 0x10000000) {
+ PyErr_NoMemory();
+ return NULL;
+ }
+
+ array = malloc((argc + 1) * sizeof(char *));
+ if (array == NULL) {
+ PyErr_NoMemory();
+ return NULL;
+ }
+ for (i = 0; i < argc; ++i) {
+ char *data;
+ item = PySequence_GetItem(self, i);
+ data = PyString_AsString(item);
+ if (data == NULL) {
+ /* NULL terminate before freeing. */
+ array[i] = NULL;
+ goto fail;
+ }
+ array[i] = strdup(data);
+ if (!array[i]) {
+ PyErr_NoMemory();
+ goto fail;
+ }
+ Py_DECREF(item);
+ }
+ array[argc] = NULL;
+
+ return array;
+
+fail:
+ Py_XDECREF(item);
+ _Py_FreeCharPArray(array);
+ return NULL;
+}
+
+
+/* Restore signals that the interpreter has called SIG_IGN on to SIG_DFL.
+ *
+ * All of the code in this function must only use async-signal-safe functions,
+ * listed at `man 7 signal` or
+ * http://www.opengroup.org/onlinepubs/009695399/functions/xsh_chap02_04.html.
+ */
+static void
+_Py_RestoreSignals(void)
+{
+#ifdef SIGPIPE
+ PyOS_setsig(SIGPIPE, SIG_DFL);
+#endif
+#ifdef SIGXFZ
+ PyOS_setsig(SIGXFZ, SIG_DFL);
+#endif
+#ifdef SIGXFSZ
+ PyOS_setsig(SIGXFSZ, SIG_DFL);
+#endif
+}
diff --git a/contrib/deprecated/python/subprocess32/subprocess32.py b/contrib/deprecated/python/subprocess32/subprocess32.py
new file mode 100644
index 0000000000..8ab9b14fd0
--- /dev/null
+++ b/contrib/deprecated/python/subprocess32/subprocess32.py
@@ -0,0 +1,1752 @@
+# subprocess - Subprocesses with accessible I/O streams
+#
+# For more information about this module, see PEP 324.
+#
+# Copyright (c) 2003-2005 by Peter Astrand <astrand@lysator.liu.se>
+#
+# Licensed to PSF under a Contributor Agreement.
+# See http://www.python.org/3.3/license for licensing details.
+
+r"""Subprocesses with accessible I/O streams
+
+This module allows you to spawn processes, connect to their
+input/output/error pipes, and obtain their return codes.
+
+For a complete description of this module see the Python documentation.
+
+Main API
+========
+run(...): Runs a command, waits for it to complete, then returns a
+ CompletedProcess instance.
+Popen(...): A class for flexibly executing a command in a new process
+
+Constants
+---------
+DEVNULL: Special value that indicates that os.devnull should be used
+PIPE: Special value that indicates a pipe should be created
+STDOUT: Special value that indicates that stderr should go to stdout
+
+
+Older API
+=========
+call(...): Runs a command, waits for it to complete, then returns
+ the return code.
+check_call(...): Same as call() but raises CalledProcessError()
+ if return code is not 0
+check_output(...): Same as check_call() but returns the contents of
+ stdout instead of a return code
+"""
+
+import sys
+mswindows = (sys.platform == "win32")
+
+import os
+import errno
+import exceptions
+import types
+import time
+import traceback
+import gc
+import signal
+
+# Exception classes used by this module.
+class SubprocessError(Exception): pass
+
+
+class CalledProcessError(SubprocessError):
+ """Raised when run() is called with check=True and the process
+ returns a non-zero exit status.
+
+ Attributes:
+ cmd, returncode, stdout, stderr, output
+ """
+ def __init__(self, returncode, cmd, output=None, stderr=None):
+ self.returncode = returncode
+ self.cmd = cmd
+ self.output = output
+ self.stderr = stderr
+ super(CalledProcessError, self).__init__(returncode, cmd,
+ output, stderr)
+
+ def __str__(self):
+ if self.returncode and self.returncode < 0:
+ return "Command '%s' died with signal %d." % (
+ self.cmd, -self.returncode)
+ else:
+ return "Command '%s' returned non-zero exit status %d." % (
+ self.cmd, self.returncode)
+
+ #@property
+ def __stdout_getter(self):
+ """Alias for output attribute, to match stderr"""
+ return self.output
+
+ #@stdout.setter # Required Python 2.6
+ def __stdout_setter(self, value):
+ # There's no obvious reason to set this, but allow it anyway so
+ # .stdout is a transparent alias for .output
+ self.output = value
+
+ stdout = property(__stdout_getter, __stdout_setter) # Python 2.4
+
+
+class TimeoutExpired(SubprocessError):
+ """This exception is raised when the timeout expires while waiting for a
+ child process.
+
+ Attributes:
+ cmd, output, stdout, stderr, timeout
+ """
+ def __init__(self, cmd, timeout, output=None, stderr=None):
+ self.cmd = cmd
+ self.timeout = timeout
+ self.output = output
+ self.stderr = stderr
+ super(TimeoutExpired, self).__init__(cmd, timeout, output, stderr)
+
+ def __str__(self):
+ return ("Command '%s' timed out after %s seconds" %
+ (self.cmd, self.timeout))
+
+ #@property
+ def __stdout_getter(self):
+ return self.output
+
+ #@stdout.setter # Required Python 2.6
+ def __stdout_setter(self, value):
+ # There's no obvious reason to set this, but allow it anyway so
+ # .stdout is a transparent alias for .output
+ self.output = value
+
+ stdout = property(__stdout_getter, __stdout_setter) # Python 2.4
+
+
+if mswindows:
+ import threading
+ import msvcrt
+ import _subprocess
+ class STARTUPINFO:
+ dwFlags = 0
+ hStdInput = None
+ hStdOutput = None
+ hStdError = None
+ wShowWindow = 0
+ class pywintypes:
+ error = IOError
+else:
+ import select
+ _has_poll = hasattr(select, 'poll')
+ import fcntl
+ import pickle
+
+ try:
+ import _posixsubprocess32 as _posixsubprocess
+ except ImportError:
+ _posixsubprocess = None
+ import warnings
+ warnings.warn("The _posixsubprocess module is not being used. "
+ "Child process reliability may suffer if your "
+ "program uses threads.", RuntimeWarning)
+ try:
+ import threading
+ except ImportError:
+ import dummy_threading as threading
+
+ # When select or poll has indicated that the file is writable,
+ # we can write up to _PIPE_BUF bytes without risk of blocking.
+ # POSIX defines PIPE_BUF as >= 512.
+ _PIPE_BUF = getattr(select, 'PIPE_BUF', 512)
+
+ _FD_CLOEXEC = getattr(fcntl, 'FD_CLOEXEC', 1)
+
+ def _set_cloexec(fd, cloexec):
+ old = fcntl.fcntl(fd, fcntl.F_GETFD)
+ if cloexec:
+ fcntl.fcntl(fd, fcntl.F_SETFD, old | _FD_CLOEXEC)
+ else:
+ fcntl.fcntl(fd, fcntl.F_SETFD, old & ~_FD_CLOEXEC)
+
+ if _posixsubprocess:
+ _create_pipe = _posixsubprocess.cloexec_pipe
+ else:
+ def _create_pipe():
+ fds = os.pipe()
+ _set_cloexec(fds[0], True)
+ _set_cloexec(fds[1], True)
+ return fds
+
+__all__ = ["Popen", "PIPE", "STDOUT", "call", "check_call",
+ "check_output", "CalledProcessError"]
+
+if mswindows:
+ from _subprocess import (CREATE_NEW_CONSOLE, CREATE_NEW_PROCESS_GROUP,
+ STD_INPUT_HANDLE, STD_OUTPUT_HANDLE,
+ STD_ERROR_HANDLE, SW_HIDE,
+ STARTF_USESTDHANDLES, STARTF_USESHOWWINDOW)
+ # https://msdn.microsoft.com/en-us/library/windows/desktop/ms687032(v=vs.85).aspx
+ # Note: In Python 3.3 this constant is found in the _winapi module.
+ _WAIT_TIMEOUT = 0x102
+
+ __all__.extend(["CREATE_NEW_CONSOLE", "CREATE_NEW_PROCESS_GROUP",
+ "STD_INPUT_HANDLE", "STD_OUTPUT_HANDLE",
+ "STD_ERROR_HANDLE", "SW_HIDE",
+ "STARTF_USESTDHANDLES", "STARTF_USESHOWWINDOW"])
+try:
+ MAXFD = os.sysconf("SC_OPEN_MAX")
+except:
+ MAXFD = 256
+
+# This lists holds Popen instances for which the underlying process had not
+# exited at the time its __del__ method got called: those processes are wait()ed
+# for synchronously from _cleanup() when a new Popen object is created, to avoid
+# zombie processes.
+_active = []
+
+def _cleanup():
+ for inst in _active[:]:
+ res = inst._internal_poll(_deadstate=sys.maxint)
+ if res is not None:
+ try:
+ _active.remove(inst)
+ except ValueError:
+ # This can happen if two threads create a new Popen instance.
+ # It's harmless that it was already removed, so ignore.
+ pass
+
+PIPE = -1
+STDOUT = -2
+DEVNULL = -3
+
+# This function is only used by multiprocessing, it is here so that people
+# can drop subprocess32 in as a replacement for the stdlib subprocess module.
+
+def _args_from_interpreter_flags():
+ """Return a list of command-line arguments reproducing the current
+ settings in sys.flags and sys.warnoptions."""
+ flag_opt_map = {
+ 'debug': 'd',
+ # 'inspect': 'i',
+ # 'interactive': 'i',
+ 'optimize': 'O',
+ 'dont_write_bytecode': 'B',
+ 'no_user_site': 's',
+ 'no_site': 'S',
+ 'ignore_environment': 'E',
+ 'verbose': 'v',
+ 'bytes_warning': 'b',
+ 'py3k_warning': '3',
+ }
+ args = []
+ for flag, opt in flag_opt_map.items():
+ v = getattr(sys.flags, flag)
+ if v > 0:
+ args.append('-' + opt * v)
+ if getattr(sys.flags, 'hash_randomization') != 0:
+ args.append('-R')
+ for opt in sys.warnoptions:
+ args.append('-W' + opt)
+ return args
+
+
+def _eintr_retry_call(func, *args):
+ while True:
+ try:
+ return func(*args)
+ except (OSError, IOError), e:
+ if e.errno == errno.EINTR:
+ continue
+ raise
+
+
+def _get_exec_path(env=None):
+ """Returns the sequence of directories that will be searched for the
+ named executable (similar to a shell) when launching a process.
+
+ *env* must be an environment variable dict or None. If *env* is None,
+ os.environ will be used.
+ """
+ if env is None:
+ env = os.environ
+ return env.get('PATH', os.defpath).split(os.pathsep)
+
+
+if hasattr(os, 'get_exec_path'):
+ _get_exec_path = os.get_exec_path
+
+
+def call(*popenargs, **kwargs):
+ """Run command with arguments. Wait for command to complete or
+ timeout, then return the returncode attribute.
+
+ The arguments are the same as for the Popen constructor. Example:
+
+ retcode = call(["ls", "-l"])
+ """
+ timeout = kwargs.pop('timeout', None)
+ p = Popen(*popenargs, **kwargs)
+ try:
+ return p.wait(timeout=timeout)
+ except TimeoutExpired:
+ p.kill()
+ p.wait()
+ raise
+
+
+def check_call(*popenargs, **kwargs):
+ """Run command with arguments. Wait for command to complete. If
+ the exit code was zero then return, otherwise raise
+ CalledProcessError. The CalledProcessError object will have the
+ return code in the returncode attribute.
+
+ The arguments are the same as for the call function. Example:
+
+ check_call(["ls", "-l"])
+ """
+ retcode = call(*popenargs, **kwargs)
+ if retcode:
+ cmd = kwargs.get("args")
+ if cmd is None:
+ cmd = popenargs[0]
+ raise CalledProcessError(retcode, cmd)
+ return 0
+
+
+def check_output(*popenargs, **kwargs):
+ r"""Run command with arguments and return its output as a byte string.
+
+ If the exit code was non-zero it raises a CalledProcessError. The
+ CalledProcessError object will have the return code in the returncode
+ attribute and output in the output attribute.
+
+ The arguments are the same as for the Popen constructor. Example:
+
+ >>> check_output(["ls", "-l", "/dev/null"])
+ 'crw-rw-rw- 1 root root 1, 3 Oct 18 2007 /dev/null\n'
+
+ The stdout argument is not allowed as it is used internally.
+ To capture standard error in the result, use stderr=STDOUT.
+
+ >>> check_output(["/bin/sh", "-c",
+ ... "ls -l non_existent_file ; exit 0"],
+ ... stderr=STDOUT)
+ 'ls: non_existent_file: No such file or directory\n'
+ """
+ timeout = kwargs.pop('timeout', None)
+ if 'stdout' in kwargs:
+ raise ValueError('stdout argument not allowed, it will be overridden.')
+ process = Popen(stdout=PIPE, *popenargs, **kwargs)
+ try:
+ output, unused_err = process.communicate(timeout=timeout)
+ except TimeoutExpired:
+ process.kill()
+ output, unused_err = process.communicate()
+ raise TimeoutExpired(process.args, timeout, output=output)
+ retcode = process.poll()
+ if retcode:
+ raise CalledProcessError(retcode, process.args, output=output)
+ return output
+
+
+class CompletedProcess(object):
+ """A process that has finished running.
+ This is returned by run().
+ Attributes:
+ args: The list or str args passed to run().
+ returncode: The exit code of the process, negative for signals.
+ stdout: The standard output (None if not captured).
+ stderr: The standard error (None if not captured).
+ """
+ def __init__(self, args, returncode, stdout=None, stderr=None):
+ self.args = args
+ self.returncode = returncode
+ self.stdout = stdout
+ self.stderr = stderr
+
+ def __repr__(self):
+ args = ['args={!r}'.format(self.args),
+ 'returncode={!r}'.format(self.returncode)]
+ if self.stdout is not None:
+ args.append('stdout={!r}'.format(self.stdout))
+ if self.stderr is not None:
+ args.append('stderr={!r}'.format(self.stderr))
+ return "{}({})".format(type(self).__name__, ', '.join(args))
+
+ def check_returncode(self):
+ """Raise CalledProcessError if the exit code is non-zero."""
+ if self.returncode:
+ raise CalledProcessError(self.returncode, self.args, self.stdout,
+ self.stderr)
+
+
+def run(*popenargs, **kwargs):
+ """Run command with arguments and return a CompletedProcess instance.
+ The returned instance will have attributes args, returncode, stdout and
+ stderr. By default, stdout and stderr are not captured, and those attributes
+ will be None. Pass stdout=PIPE and/or stderr=PIPE in order to capture them.
+ If check is True and the exit code was non-zero, it raises a
+ CalledProcessError. The CalledProcessError object will have the return code
+ in the returncode attribute, and output & stderr attributes if those streams
+ were captured.
+ If timeout is given, and the process takes too long, a TimeoutExpired
+ exception will be raised.
+ There is an optional argument "input", allowing you to
+ pass a string to the subprocess's stdin. If you use this argument
+ you may not also use the Popen constructor's "stdin" argument, as
+ it will be used internally.
+ The other arguments are the same as for the Popen constructor.
+ If universal_newlines=True is passed, the "input" argument must be a
+ string and stdout/stderr in the returned object will be strings rather than
+ bytes.
+ """
+ input = kwargs.pop('input', None)
+ timeout = kwargs.pop('timeout', None)
+ check = kwargs.pop('check', False)
+ if input is not None:
+ if 'stdin' in kwargs:
+ raise ValueError('stdin and input arguments may not both be used.')
+ kwargs['stdin'] = PIPE
+
+ process = Popen(*popenargs, **kwargs)
+ try:
+ process.__enter__() # No-Op really... illustrate "with in 2.4"
+ try:
+ stdout, stderr = process.communicate(input, timeout=timeout)
+ except TimeoutExpired:
+ process.kill()
+ stdout, stderr = process.communicate()
+ raise TimeoutExpired(process.args, timeout, output=stdout,
+ stderr=stderr)
+ except:
+ process.kill()
+ process.wait()
+ raise
+ retcode = process.poll()
+ if check and retcode:
+ raise CalledProcessError(retcode, process.args,
+ output=stdout, stderr=stderr)
+ finally:
+ # None because our context manager __exit__ does not use them.
+ process.__exit__(None, None, None)
+ return CompletedProcess(process.args, retcode, stdout, stderr)
+
+
+def list2cmdline(seq):
+ """
+ Translate a sequence of arguments into a command line
+ string, using the same rules as the MS C runtime:
+
+ 1) Arguments are delimited by white space, which is either a
+ space or a tab.
+
+ 2) A string surrounded by double quotation marks is
+ interpreted as a single argument, regardless of white space
+ contained within. A quoted string can be embedded in an
+ argument.
+
+ 3) A double quotation mark preceded by a backslash is
+ interpreted as a literal double quotation mark.
+
+ 4) Backslashes are interpreted literally, unless they
+ immediately precede a double quotation mark.
+
+ 5) If backslashes immediately precede a double quotation mark,
+ every pair of backslashes is interpreted as a literal
+ backslash. If the number of backslashes is odd, the last
+ backslash escapes the next double quotation mark as
+ described in rule 3.
+ """
+
+ # See
+ # http://msdn.microsoft.com/en-us/library/17w5ykft.aspx
+ # or search http://msdn.microsoft.com for
+ # "Parsing C++ Command-Line Arguments"
+ result = []
+ needquote = False
+ for arg in seq:
+ bs_buf = []
+
+ # Add a space to separate this argument from the others
+ if result:
+ result.append(' ')
+
+ needquote = (" " in arg) or ("\t" in arg) or not arg
+ if needquote:
+ result.append('"')
+
+ for c in arg:
+ if c == '\\':
+ # Don't know if we need to double yet.
+ bs_buf.append(c)
+ elif c == '"':
+ # Double backslashes.
+ result.append('\\' * len(bs_buf)*2)
+ bs_buf = []
+ result.append('\\"')
+ else:
+ # Normal char
+ if bs_buf:
+ result.extend(bs_buf)
+ bs_buf = []
+ result.append(c)
+
+ # Add remaining backslashes, if any.
+ if bs_buf:
+ result.extend(bs_buf)
+
+ if needquote:
+ result.extend(bs_buf)
+ result.append('"')
+
+ return ''.join(result)
+
+
+_PLATFORM_DEFAULT_CLOSE_FDS = object()
+
+
+class Popen(object):
+ def __init__(self, args, bufsize=0, executable=None,
+ stdin=None, stdout=None, stderr=None,
+ preexec_fn=None, close_fds=_PLATFORM_DEFAULT_CLOSE_FDS,
+ shell=False, cwd=None, env=None, universal_newlines=False,
+ startupinfo=None, creationflags=0,
+ restore_signals=True, start_new_session=False,
+ pass_fds=()):
+ """Create new Popen instance."""
+ _cleanup()
+ # Held while anything is calling waitpid before returncode has been
+ # updated to prevent clobbering returncode if wait() or poll() are
+ # called from multiple threads at once. After acquiring the lock,
+ # code must re-check self.returncode to see if another thread just
+ # finished a waitpid() call.
+ self._waitpid_lock = threading.Lock()
+
+ self._child_created = False
+ self._input = None
+ self._communication_started = False
+ if not isinstance(bufsize, (int, long)):
+ raise TypeError("bufsize must be an integer")
+
+ if mswindows:
+ if preexec_fn is not None:
+ raise ValueError("preexec_fn is not supported on Windows "
+ "platforms")
+ any_stdio_set = (stdin is not None or stdout is not None or
+ stderr is not None)
+ if close_fds is _PLATFORM_DEFAULT_CLOSE_FDS:
+ if any_stdio_set:
+ close_fds = False
+ else:
+ close_fds = True
+ elif close_fds and any_stdio_set:
+ raise ValueError(
+ "close_fds is not supported on Windows platforms"
+ " if you redirect stdin/stdout/stderr")
+ else:
+ # POSIX
+ if close_fds is _PLATFORM_DEFAULT_CLOSE_FDS:
+ close_fds = True
+ if pass_fds and not close_fds:
+ import warnings
+ warnings.warn("pass_fds overriding close_fds.", RuntimeWarning)
+ close_fds = True
+ if startupinfo is not None:
+ raise ValueError("startupinfo is only supported on Windows "
+ "platforms")
+ if creationflags != 0:
+ raise ValueError("creationflags is only supported on Windows "
+ "platforms")
+
+ self.args = args
+ self.stdin = None
+ self.stdout = None
+ self.stderr = None
+ self.pid = None
+ self.returncode = None
+ self.universal_newlines = universal_newlines
+
+ # Input and output objects. The general principle is like
+ # this:
+ #
+ # Parent Child
+ # ------ -----
+ # p2cwrite ---stdin---> p2cread
+ # c2pread <--stdout--- c2pwrite
+ # errread <--stderr--- errwrite
+ #
+ # On POSIX, the child objects are file descriptors. On
+ # Windows, these are Windows file handles. The parent objects
+ # are file descriptors on both platforms. The parent objects
+ # are -1 when not using PIPEs. The child objects are -1
+ # when not redirecting.
+
+ (p2cread, p2cwrite,
+ c2pread, c2pwrite,
+ errread, errwrite) = self._get_handles(stdin, stdout, stderr)
+
+ if mswindows:
+ if p2cwrite != -1:
+ p2cwrite = msvcrt.open_osfhandle(p2cwrite.Detach(), 0)
+ if c2pread != -1:
+ c2pread = msvcrt.open_osfhandle(c2pread.Detach(), 0)
+ if errread != -1:
+ errread = msvcrt.open_osfhandle(errread.Detach(), 0)
+
+ if p2cwrite != -1:
+ self.stdin = os.fdopen(p2cwrite, 'wb', bufsize)
+ if c2pread != -1:
+ if universal_newlines:
+ self.stdout = os.fdopen(c2pread, 'rU', bufsize)
+ else:
+ self.stdout = os.fdopen(c2pread, 'rb', bufsize)
+ if errread != -1:
+ if universal_newlines:
+ self.stderr = os.fdopen(errread, 'rU', bufsize)
+ else:
+ self.stderr = os.fdopen(errread, 'rb', bufsize)
+
+ self._closed_child_pipe_fds = False
+ exception_cleanup_needed = False
+ try:
+ try:
+ self._execute_child(args, executable, preexec_fn, close_fds,
+ pass_fds, cwd, env, universal_newlines,
+ startupinfo, creationflags, shell,
+ p2cread, p2cwrite,
+ c2pread, c2pwrite,
+ errread, errwrite,
+ restore_signals, start_new_session)
+ except:
+ # The cleanup is performed within the finally block rather
+ # than simply within this except block before the raise so
+ # that any exceptions raised and handled within it do not
+ # clobber the exception context we want to propagate upwards.
+ # This is only necessary in Python 2.
+ exception_cleanup_needed = True
+ raise
+ finally:
+ if exception_cleanup_needed:
+ for f in filter(None, (self.stdin, self.stdout, self.stderr)):
+ try:
+ f.close()
+ except EnvironmentError:
+ pass # Ignore EBADF or other errors
+
+ if not self._closed_child_pipe_fds:
+ to_close = []
+ if stdin == PIPE:
+ to_close.append(p2cread)
+ if stdout == PIPE:
+ to_close.append(c2pwrite)
+ if stderr == PIPE:
+ to_close.append(errwrite)
+ if hasattr(self, '_devnull'):
+ to_close.append(self._devnull)
+ for fd in to_close:
+ try:
+ os.close(fd)
+ except EnvironmentError:
+ pass
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, type, value, traceback):
+ if self.stdout:
+ self.stdout.close()
+ if self.stderr:
+ self.stderr.close()
+ if self.stdin:
+ self.stdin.close()
+ # Wait for the process to terminate, to avoid zombies.
+ self.wait()
+
+ def _translate_newlines(self, data):
+ data = data.replace("\r\n", "\n")
+ data = data.replace("\r", "\n")
+ return data
+
+
+ def __del__(self, _maxint=sys.maxint, _active=_active):
+ # If __init__ hasn't had a chance to execute (e.g. if it
+ # was passed an undeclared keyword argument), we don't
+ # have a _child_created attribute at all.
+ if not getattr(self, '_child_created', False):
+ # We didn't get to successfully create a child process.
+ return
+ # In case the child hasn't been waited on, check if it's done.
+ self._internal_poll(_deadstate=_maxint)
+ if self.returncode is None and _active is not None:
+ # Child is still running, keep us alive until we can wait on it.
+ _active.append(self)
+
+
+ def _get_devnull(self):
+ if not hasattr(self, '_devnull'):
+ self._devnull = os.open(os.devnull, os.O_RDWR)
+ return self._devnull
+
+ def _stdin_write(self, input):
+ if input:
+ try:
+ self.stdin.write(input)
+ except EnvironmentError as e:
+ if e.errno == errno.EPIPE:
+ # communicate() must ignore broken pipe error
+ pass
+ elif e.errno == errno.EINVAL :
+ # bpo-19612, bpo-30418: On Windows, stdin.write() fails
+ # with EINVAL if the child process exited or if the child
+ # process is still running but closed the pipe.
+ pass
+ else:
+ raise
+
+ try:
+ self.stdin.close()
+ except EnvironmentError as e:
+ if e.errno in (errno.EPIPE, errno.EINVAL):
+ pass
+ else:
+ raise
+
+ def communicate(self, input=None, timeout=None):
+ """Interact with process: Send data to stdin. Read data from
+ stdout and stderr, until end-of-file is reached. Wait for
+ process to terminate. The optional input argument should be a
+ string to be sent to the child process, or None, if no data
+ should be sent to the child.
+
+ communicate() returns a tuple (stdout, stderr)."""
+
+ if self._communication_started and input:
+ raise ValueError("Cannot send input after starting communication")
+
+ if timeout is not None:
+ endtime = time.time() + timeout
+ else:
+ endtime = None
+
+ # Optimization: If we are not worried about timeouts, we haven't
+ # started communicating, and we have one or zero pipes, using select()
+ # or threads is unnecessary.
+ if (endtime is None and not self._communication_started and
+ [self.stdin, self.stdout, self.stderr].count(None) >= 2):
+ stdout = None
+ stderr = None
+ if self.stdin:
+ self._stdin_write(input)
+ elif self.stdout:
+ stdout = _eintr_retry_call(self.stdout.read)
+ self.stdout.close()
+ elif self.stderr:
+ stderr = _eintr_retry_call(self.stderr.read)
+ self.stderr.close()
+ self.wait()
+ return (stdout, stderr)
+
+ try:
+ stdout, stderr = self._communicate(input, endtime, timeout)
+ finally:
+ self._communication_started = True
+
+ sts = self.wait(timeout=self._remaining_time(endtime))
+
+ return (stdout, stderr)
+
+
+ def poll(self):
+ return self._internal_poll()
+
+
+ def _remaining_time(self, endtime):
+ """Convenience for _communicate when computing timeouts."""
+ if endtime is None:
+ return None
+ else:
+ return endtime - time.time()
+
+
+ def _check_timeout(self, endtime, orig_timeout):
+ """Convenience for checking if a timeout has expired."""
+ if endtime is None:
+ return
+ if time.time() > endtime:
+ raise TimeoutExpired(self.args, orig_timeout)
+
+
+ if mswindows:
+ #
+ # Windows methods
+ #
+ def _get_handles(self, stdin, stdout, stderr):
+ """Construct and return tuple with IO objects:
+ p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite
+ """
+ if stdin is None and stdout is None and stderr is None:
+ return (-1, -1, -1, -1, -1, -1)
+
+ p2cread, p2cwrite = -1, -1
+ c2pread, c2pwrite = -1, -1
+ errread, errwrite = -1, -1
+
+ if stdin is None:
+ p2cread = _subprocess.GetStdHandle(_subprocess.STD_INPUT_HANDLE)
+ if p2cread is None:
+ p2cread, _ = _subprocess.CreatePipe(None, 0)
+ elif stdin == PIPE:
+ p2cread, p2cwrite = _subprocess.CreatePipe(None, 0)
+ elif stdin == DEVNULL:
+ p2cread = msvcrt.get_osfhandle(self._get_devnull())
+ elif isinstance(stdin, int):
+ p2cread = msvcrt.get_osfhandle(stdin)
+ else:
+ # Assuming file-like object
+ p2cread = msvcrt.get_osfhandle(stdin.fileno())
+ p2cread = self._make_inheritable(p2cread)
+
+ if stdout is None:
+ c2pwrite = _subprocess.GetStdHandle(_subprocess.STD_OUTPUT_HANDLE)
+ if c2pwrite is None:
+ _, c2pwrite = _subprocess.CreatePipe(None, 0)
+ elif stdout == PIPE:
+ c2pread, c2pwrite = _subprocess.CreatePipe(None, 0)
+ elif stdout == DEVNULL:
+ c2pwrite = msvcrt.get_osfhandle(self._get_devnull())
+ elif isinstance(stdout, int):
+ c2pwrite = msvcrt.get_osfhandle(stdout)
+ else:
+ # Assuming file-like object
+ c2pwrite = msvcrt.get_osfhandle(stdout.fileno())
+ c2pwrite = self._make_inheritable(c2pwrite)
+
+ if stderr is None:
+ errwrite = _subprocess.GetStdHandle(_subprocess.STD_ERROR_HANDLE)
+ if errwrite is None:
+ _, errwrite = _subprocess.CreatePipe(None, 0)
+ elif stderr == PIPE:
+ errread, errwrite = _subprocess.CreatePipe(None, 0)
+ elif stderr == STDOUT:
+ errwrite = c2pwrite
+ elif stderr == DEVNULL:
+ errwrite = msvcrt.get_osfhandle(self._get_devnull())
+ elif isinstance(stderr, int):
+ errwrite = msvcrt.get_osfhandle(stderr)
+ else:
+ # Assuming file-like object
+ errwrite = msvcrt.get_osfhandle(stderr.fileno())
+ errwrite = self._make_inheritable(errwrite)
+
+ return (p2cread, p2cwrite,
+ c2pread, c2pwrite,
+ errread, errwrite)
+
+
+ def _make_inheritable(self, handle):
+ """Return a duplicate of handle, which is inheritable"""
+ return _subprocess.DuplicateHandle(_subprocess.GetCurrentProcess(),
+ handle, _subprocess.GetCurrentProcess(), 0, 1,
+ _subprocess.DUPLICATE_SAME_ACCESS)
+
+
+ def _find_w9xpopen(self):
+ """Find and return absolut path to w9xpopen.exe"""
+ w9xpopen = os.path.join(
+ os.path.dirname(_subprocess.GetModuleFileName(0)),
+ "w9xpopen.exe")
+ if not os.path.exists(w9xpopen):
+ # Eeek - file-not-found - possibly an embedding
+ # situation - see if we can locate it in sys.exec_prefix
+ w9xpopen = os.path.join(os.path.dirname(sys.exec_prefix),
+ "w9xpopen.exe")
+ if not os.path.exists(w9xpopen):
+ raise RuntimeError("Cannot locate w9xpopen.exe, which is "
+ "needed for Popen to work with your "
+ "shell or platform.")
+ return w9xpopen
+
+
+ def _execute_child(self, args, executable, preexec_fn, close_fds,
+ pass_fds, cwd, env, universal_newlines,
+ startupinfo, creationflags, shell,
+ p2cread, p2cwrite,
+ c2pread, c2pwrite,
+ errread, errwrite,
+ unused_restore_signals, unused_start_new_session):
+ """Execute program (MS Windows version)"""
+
+ assert not pass_fds, "pass_fds not supported on Windows."
+
+ if not isinstance(args, types.StringTypes):
+ args = list2cmdline(args)
+
+ # Process startup details
+ if startupinfo is None:
+ startupinfo = STARTUPINFO()
+ if -1 not in (p2cread, c2pwrite, errwrite):
+ startupinfo.dwFlags |= _subprocess.STARTF_USESTDHANDLES
+ startupinfo.hStdInput = p2cread
+ startupinfo.hStdOutput = c2pwrite
+ startupinfo.hStdError = errwrite
+
+ if shell:
+ startupinfo.dwFlags |= _subprocess.STARTF_USESHOWWINDOW
+ startupinfo.wShowWindow = _subprocess.SW_HIDE
+ comspec = os.environ.get("COMSPEC", "cmd.exe")
+ args = comspec + " /c " + '"%s"' % args
+ if (_subprocess.GetVersion() >= 0x80000000L or
+ os.path.basename(comspec).lower() == "command.com"):
+ # Win9x, or using command.com on NT. We need to
+ # use the w9xpopen intermediate program. For more
+ # information, see KB Q150956
+ # (http://web.archive.org/web/20011105084002/http://support.microsoft.com/support/kb/articles/Q150/9/56.asp)
+ w9xpopen = self._find_w9xpopen()
+ args = '"%s" %s' % (w9xpopen, args)
+ # Not passing CREATE_NEW_CONSOLE has been known to
+ # cause random failures on win9x. Specifically a
+ # dialog: "Your program accessed mem currently in
+ # use at xxx" and a hopeful warning about the
+ # stability of your system. Cost is Ctrl+C wont
+ # kill children.
+ creationflags |= _subprocess.CREATE_NEW_CONSOLE
+
+ # Start the process
+ try:
+ try:
+ hp, ht, pid, tid = _subprocess.CreateProcess(executable, args,
+ # no special security
+ None, None,
+ int(not close_fds),
+ creationflags,
+ env,
+ cwd,
+ startupinfo)
+ except pywintypes.error, e:
+ # Translate pywintypes.error to WindowsError, which is
+ # a subclass of OSError. FIXME: We should really
+ # translate errno using _sys_errlist (or similar), but
+ # how can this be done from Python?
+ raise WindowsError(*e.args)
+ finally:
+ # Child is launched. Close the parent's copy of those pipe
+ # handles that only the child should have open. You need
+ # to make sure that no handles to the write end of the
+ # output pipe are maintained in this process or else the
+ # pipe will not close when the child process exits and the
+ # ReadFile will hang.
+ if p2cread != -1:
+ p2cread.Close()
+ if c2pwrite != -1:
+ c2pwrite.Close()
+ if errwrite != -1:
+ errwrite.Close()
+ if hasattr(self, '_devnull'):
+ os.close(self._devnull)
+
+ # Retain the process handle, but close the thread handle
+ self._child_created = True
+ self._handle = hp
+ self.pid = pid
+ ht.Close()
+
+ def _internal_poll(self, _deadstate=None,
+ _WaitForSingleObject=_subprocess.WaitForSingleObject,
+ _WAIT_OBJECT_0=_subprocess.WAIT_OBJECT_0,
+ _GetExitCodeProcess=_subprocess.GetExitCodeProcess):
+ """Check if child process has terminated. Returns returncode
+ attribute.
+
+ This method is called by __del__, so it can only refer to objects
+ in its local scope.
+
+ """
+ if self.returncode is None:
+ if _WaitForSingleObject(self._handle, 0) == _WAIT_OBJECT_0:
+ self.returncode = _GetExitCodeProcess(self._handle)
+ return self.returncode
+
+
+ def wait(self, timeout=None, endtime=None):
+ """Wait for child process to terminate. Returns returncode
+ attribute."""
+ if endtime is not None:
+ timeout_millis = self._remaining_time(endtime)
+ if timeout is None:
+ timeout_millis = _subprocess.INFINITE
+ else:
+ timeout_millis = int(timeout * 1000)
+ if self.returncode is None:
+ result = _subprocess.WaitForSingleObject(self._handle,
+ timeout_millis)
+ if result == _WAIT_TIMEOUT:
+ raise TimeoutExpired(self.args, timeout)
+ self.returncode = _subprocess.GetExitCodeProcess(self._handle)
+ return self.returncode
+
+
+ def _readerthread(self, fh, buffer):
+ buffer.append(fh.read())
+ fh.close()
+
+
+ def _communicate(self, input, endtime, orig_timeout):
+ # Start reader threads feeding into a list hanging off of this
+ # object, unless they've already been started.
+ if self.stdout and not hasattr(self, "_stdout_buff"):
+ self._stdout_buff = []
+ self.stdout_thread = \
+ threading.Thread(target=self._readerthread,
+ args=(self.stdout, self._stdout_buff))
+ self.stdout_thread.daemon = True
+ self.stdout_thread.start()
+ if self.stderr and not hasattr(self, "_stderr_buff"):
+ self._stderr_buff = []
+ self.stderr_thread = \
+ threading.Thread(target=self._readerthread,
+ args=(self.stderr, self._stderr_buff))
+ self.stderr_thread.daemon = True
+ self.stderr_thread.start()
+
+ if self.stdin:
+ self._stdin_write(input)
+
+ # Wait for the reader threads, or time out. If we time out, the
+ # threads remain reading and the fds left open in case the user
+ # calls communicate again.
+ if self.stdout is not None:
+ self.stdout_thread.join(self._remaining_time(endtime))
+ if self.stdout_thread.isAlive():
+ raise TimeoutExpired(self.args, orig_timeout)
+ if self.stderr is not None:
+ self.stderr_thread.join(self._remaining_time(endtime))
+ if self.stderr_thread.isAlive():
+ raise TimeoutExpired(self.args, orig_timeout)
+
+ # Collect the output from and close both pipes, now that we know
+ # both have been read successfully.
+ stdout = None
+ stderr = None
+ if self.stdout:
+ stdout = self._stdout_buff
+ self.stdout.close()
+ if self.stderr:
+ stderr = self._stderr_buff
+ self.stderr.close()
+
+ # All data exchanged. Translate lists into strings.
+ if stdout is not None:
+ stdout = stdout[0]
+ if stderr is not None:
+ stderr = stderr[0]
+
+ # Translate newlines, if requested. We cannot let the file
+ # object do the translation: It is based on stdio, which is
+ # impossible to combine with select (unless forcing no
+ # buffering).
+ if self.universal_newlines and hasattr(file, 'newlines'):
+ if stdout:
+ stdout = self._translate_newlines(stdout)
+ if stderr:
+ stderr = self._translate_newlines(stderr)
+
+ return (stdout, stderr)
+
+ def send_signal(self, sig):
+ """Send a signal to the process."""
+ # Don't signal a process that we know has already died.
+ if self.returncode is not None:
+ return
+ if sig == signal.SIGTERM:
+ self.terminate()
+ elif sig == signal.CTRL_C_EVENT:
+ os.kill(self.pid, signal.CTRL_C_EVENT)
+ elif sig == signal.CTRL_BREAK_EVENT:
+ os.kill(self.pid, signal.CTRL_BREAK_EVENT)
+ else:
+ raise ValueError("Unsupported signal: %s" % sig)
+
+ def terminate(self):
+ """Terminates the process."""
+ # Don't terminate a process that we know has already died.
+ if self.returncode is not None:
+ return
+ _subprocess.TerminateProcess(self._handle, 1)
+
+ kill = terminate
+
+ else:
+ #
+ # POSIX methods
+ #
+ def _get_handles(self, stdin, stdout, stderr):
+ """Construct and return tuple with IO objects:
+ p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite
+ """
+ p2cread, p2cwrite = -1, -1
+ c2pread, c2pwrite = -1, -1
+ errread, errwrite = -1, -1
+
+ if stdin is None:
+ pass
+ elif stdin == PIPE:
+ p2cread, p2cwrite = _create_pipe()
+ elif stdin == DEVNULL:
+ p2cread = self._get_devnull()
+ elif isinstance(stdin, int):
+ p2cread = stdin
+ else:
+ # Assuming file-like object
+ p2cread = stdin.fileno()
+
+ if stdout is None:
+ pass
+ elif stdout == PIPE:
+ c2pread, c2pwrite = _create_pipe()
+ elif stdout == DEVNULL:
+ c2pwrite = self._get_devnull()
+ elif isinstance(stdout, int):
+ c2pwrite = stdout
+ else:
+ # Assuming file-like object
+ c2pwrite = stdout.fileno()
+
+ if stderr is None:
+ pass
+ elif stderr == PIPE:
+ errread, errwrite = _create_pipe()
+ elif stderr == STDOUT:
+ if c2pwrite != -1:
+ errwrite = c2pwrite
+ else: # child's stdout is not set, use parent's stdout
+ errwrite = sys.__stdout__.fileno()
+ elif stderr == DEVNULL:
+ errwrite = self._get_devnull()
+ elif isinstance(stderr, int):
+ errwrite = stderr
+ else:
+ # Assuming file-like object
+ errwrite = stderr.fileno()
+
+ return (p2cread, p2cwrite,
+ c2pread, c2pwrite,
+ errread, errwrite)
+
+
+ if hasattr(os, 'closerange'): # Introduced in 2.6
+ @staticmethod
+ def _closerange(fd_low, fd_high):
+ os.closerange(fd_low, fd_high)
+ else:
+ @staticmethod
+ def _closerange(fd_low, fd_high):
+ for fd in xrange(fd_low, fd_high):
+ while True:
+ try:
+ os.close(fd)
+ except (OSError, IOError), e:
+ if e.errno == errno.EINTR:
+ continue
+ break
+
+
+ def _close_fds(self, but):
+ self._closerange(3, but)
+ self._closerange(but + 1, MAXFD)
+
+
+ def _close_all_but_a_sorted_few_fds(self, fds_to_keep):
+ # precondition: fds_to_keep must be sorted and unique
+ start_fd = 3
+ for fd in fds_to_keep:
+ if fd >= start_fd:
+ self._closerange(start_fd, fd)
+ start_fd = fd + 1
+ if start_fd <= MAXFD:
+ self._closerange(start_fd, MAXFD)
+
+
+ def _execute_child(self, args, executable, preexec_fn, close_fds,
+ pass_fds, cwd, env, universal_newlines,
+ startupinfo, creationflags, shell,
+ p2cread, p2cwrite,
+ c2pread, c2pwrite,
+ errread, errwrite,
+ restore_signals, start_new_session):
+ """Execute program (POSIX version)"""
+
+ if isinstance(args, types.StringTypes):
+ args = [args]
+ else:
+ args = list(args)
+
+ if shell:
+ args = ["/bin/sh", "-c"] + args
+ if executable:
+ args[0] = executable
+
+ if executable is None:
+ executable = args[0]
+ orig_executable = executable
+
+ # For transferring possible exec failure from child to parent.
+ # Data format: "exception name:hex errno:description"
+ # Pickle is not used; it is complex and involves memory allocation.
+ errpipe_read, errpipe_write = _create_pipe()
+ try:
+ try:
+
+ if _posixsubprocess:
+ fs_encoding = sys.getfilesystemencoding()
+ def fs_encode(s):
+ """Encode s for use in the env, fs or cmdline."""
+ if isinstance(s, str):
+ return s
+ else:
+ return s.encode(fs_encoding, 'strict')
+
+ # We must avoid complex work that could involve
+ # malloc or free in the child process to avoid
+ # potential deadlocks, thus we do all this here.
+ # and pass it to fork_exec()
+
+ if env is not None:
+ env_list = [fs_encode(k) + '=' + fs_encode(v)
+ for k, v in env.items()]
+ else:
+ env_list = None # Use execv instead of execve.
+ if os.path.dirname(executable):
+ executable_list = (fs_encode(executable),)
+ else:
+ # This matches the behavior of os._execvpe().
+ path_list = _get_exec_path(env)
+ executable_list = (os.path.join(dir, executable)
+ for dir in path_list)
+ executable_list = tuple(fs_encode(exe)
+ for exe in executable_list)
+ fds_to_keep = set(pass_fds)
+ fds_to_keep.add(errpipe_write)
+ self.pid = _posixsubprocess.fork_exec(
+ args, executable_list,
+ close_fds, sorted(fds_to_keep), cwd, env_list,
+ p2cread, p2cwrite, c2pread, c2pwrite,
+ errread, errwrite,
+ errpipe_read, errpipe_write,
+ restore_signals, start_new_session, preexec_fn)
+ self._child_created = True
+ else:
+ # Pure Python implementation: It is not thread safe.
+ # This implementation may deadlock in the child if your
+ # parent process has any other threads running.
+
+ gc_was_enabled = gc.isenabled()
+ # Disable gc to avoid bug where gc -> file_dealloc ->
+ # write to stderr -> hang. See issue1336
+ gc.disable()
+ try:
+ self.pid = os.fork()
+ except:
+ if gc_was_enabled:
+ gc.enable()
+ raise
+ self._child_created = True
+ if self.pid == 0:
+ # Child
+ reached_preexec = False
+ try:
+ # Close parent's pipe ends
+ if p2cwrite != -1:
+ os.close(p2cwrite)
+ if c2pread != -1:
+ os.close(c2pread)
+ if errread != -1:
+ os.close(errread)
+ os.close(errpipe_read)
+
+ # When duping fds, if there arises a situation
+ # where one of the fds is either 0, 1 or 2, it
+ # is possible that it is overwritten (#12607).
+ if c2pwrite == 0:
+ c2pwrite = os.dup(c2pwrite)
+ if errwrite == 0 or errwrite == 1:
+ errwrite = os.dup(errwrite)
+
+ # Dup fds for child
+ def _dup2(a, b):
+ # dup2() removes the CLOEXEC flag but
+ # we must do it ourselves if dup2()
+ # would be a no-op (issue #10806).
+ if a == b:
+ _set_cloexec(a, False)
+ elif a != -1:
+ os.dup2(a, b)
+ _dup2(p2cread, 0)
+ _dup2(c2pwrite, 1)
+ _dup2(errwrite, 2)
+
+ # Close pipe fds. Make sure we don't close the
+ # same fd more than once, or standard fds.
+ closed = set()
+ for fd in [p2cread, c2pwrite, errwrite]:
+ if fd > 2 and fd not in closed:
+ os.close(fd)
+ closed.add(fd)
+
+ if cwd is not None:
+ os.chdir(cwd)
+
+ # This is a copy of Python/pythonrun.c
+ # _Py_RestoreSignals(). If that were exposed
+ # as a sys._py_restoresignals func it would be
+ # better.. but this pure python implementation
+ # isn't likely to be used much anymore.
+ if restore_signals:
+ signals = ('SIGPIPE', 'SIGXFZ', 'SIGXFSZ')
+ for sig in signals:
+ if hasattr(signal, sig):
+ signal.signal(getattr(signal, sig),
+ signal.SIG_DFL)
+
+ if start_new_session and hasattr(os, 'setsid'):
+ os.setsid()
+
+ reached_preexec = True
+ if preexec_fn:
+ preexec_fn()
+
+ # Close all other fds, if asked for - after
+ # preexec_fn(), which may open FDs.
+ if close_fds:
+ if pass_fds:
+ fds_to_keep = set(pass_fds)
+ fds_to_keep.add(errpipe_write)
+ self._close_all_but_a_sorted_few_fds(
+ sorted(fds_to_keep))
+ else:
+ self._close_fds(but=errpipe_write)
+
+ if env is None:
+ os.execvp(executable, args)
+ else:
+ os.execvpe(executable, args, env)
+
+ except:
+ try:
+ exc_type, exc_value = sys.exc_info()[:2]
+ if isinstance(exc_value, OSError):
+ errno_num = exc_value.errno
+ else:
+ errno_num = 0
+ if not reached_preexec:
+ exc_value = "noexec"
+ message = '%s:%x:%s' % (exc_type.__name__,
+ errno_num, exc_value)
+ os.write(errpipe_write, message)
+ except Exception:
+ # We MUST not allow anything odd happening
+ # above to prevent us from exiting below.
+ pass
+
+ # This exitcode won't be reported to applications
+ # so it really doesn't matter what we return.
+ os._exit(255)
+
+ # Parent
+ if gc_was_enabled:
+ gc.enable()
+ finally:
+ # be sure the FD is closed no matter what
+ os.close(errpipe_write)
+
+ # A pair of non -1s means we created both fds and are
+ # responsible for closing them.
+ # self._devnull is not always defined.
+ devnull_fd = getattr(self, '_devnull', None)
+ if p2cread != -1 and p2cwrite != -1 and p2cread != devnull_fd:
+ os.close(p2cread)
+ if c2pwrite != -1 and c2pread != -1 and c2pwrite != devnull_fd:
+ os.close(c2pwrite)
+ if errwrite != -1 and errread != -1 and errwrite != devnull_fd:
+ os.close(errwrite)
+ if devnull_fd is not None:
+ os.close(devnull_fd)
+ # Prevent a double close of these fds from __init__ on error.
+ self._closed_child_pipe_fds = True
+
+ # Wait for exec to fail or succeed; possibly raising exception
+ # exception (limited in size)
+ errpipe_data = ''
+ while True:
+ part = _eintr_retry_call(os.read, errpipe_read, 50000)
+ errpipe_data += part
+ if not part or len(errpipe_data) > 50000:
+ break
+ finally:
+ # be sure the FD is closed no matter what
+ os.close(errpipe_read)
+
+ if errpipe_data != "":
+ try:
+ _eintr_retry_call(os.waitpid, self.pid, 0)
+ except OSError, e:
+ if e.errno != errno.ECHILD:
+ raise
+ try:
+ exception_name, hex_errno, err_msg = (
+ errpipe_data.split(':', 2))
+ except ValueError:
+ exception_name = 'RuntimeError'
+ hex_errno = '0'
+ err_msg = ('Bad exception data from child: ' +
+ repr(errpipe_data))
+ child_exception_type = getattr(
+ exceptions, exception_name, RuntimeError)
+ if issubclass(child_exception_type, OSError) and hex_errno:
+ errno_num = int(hex_errno, 16)
+ child_exec_never_called = (err_msg == "noexec")
+ if child_exec_never_called:
+ err_msg = ""
+ if errno_num != 0:
+ err_msg = os.strerror(errno_num)
+ if errno_num == errno.ENOENT:
+ if child_exec_never_called:
+ # The error must be from chdir(cwd).
+ err_msg += ': ' + repr(cwd)
+ else:
+ err_msg += ': ' + repr(orig_executable)
+ raise child_exception_type(errno_num, err_msg)
+ try:
+ exception = child_exception_type(err_msg)
+ except Exception:
+ exception = RuntimeError(
+ 'Could not re-raise %r exception from the'
+ ' child with error message %r' %
+ (child_exception_type, err_msg))
+ raise exception
+
+
+ def _handle_exitstatus(self, sts, _WIFSIGNALED=os.WIFSIGNALED,
+ _WTERMSIG=os.WTERMSIG, _WIFEXITED=os.WIFEXITED,
+ _WEXITSTATUS=os.WEXITSTATUS, _WIFSTOPPED=os.WIFSTOPPED,
+ _WSTOPSIG=os.WSTOPSIG):
+ """All callers to this function MUST hold self._waitpid_lock."""
+ # This method is called (indirectly) by __del__, so it cannot
+ # refer to anything outside of its local scope."""
+ if _WIFSIGNALED(sts):
+ self.returncode = -_WTERMSIG(sts)
+ elif _WIFEXITED(sts):
+ self.returncode = _WEXITSTATUS(sts)
+ elif _WIFSTOPPED(sts):
+ self.returncode = -_WSTOPSIG(sts)
+ else:
+ # Should never happen
+ raise RuntimeError("Unknown child exit status!")
+
+
+ def _internal_poll(self, _deadstate=None, _waitpid=os.waitpid,
+ _WNOHANG=os.WNOHANG, _os_error=os.error, _ECHILD=errno.ECHILD):
+ """Check if child process has terminated. Returns returncode
+ attribute.
+
+ This method is called by __del__, so it cannot reference anything
+ outside of the local scope (nor can any methods it calls).
+
+ """
+ if self.returncode is None:
+ if not self._waitpid_lock.acquire(False):
+ # Something else is busy calling waitpid. Don't allow two
+ # at once. We know nothing yet.
+ return None
+ try:
+ try:
+ if self.returncode is not None:
+ return self.returncode # Another thread waited.
+ pid, sts = _waitpid(self.pid, _WNOHANG)
+ if pid == self.pid:
+ self._handle_exitstatus(sts)
+ except _os_error, e:
+ if _deadstate is not None:
+ self.returncode = _deadstate
+ elif e.errno == _ECHILD:
+ # This happens if SIGCLD is set to be ignored or
+ # waiting for child processes has otherwise been
+ # disabled for our process. This child is dead, we
+ # can't get the status.
+ # http://bugs.python.org/issue15756
+ self.returncode = 0
+ finally:
+ self._waitpid_lock.release()
+ return self.returncode
+
+
+ def _try_wait(self, wait_flags):
+ """All callers to this function MUST hold self._waitpid_lock."""
+ try:
+ (pid, sts) = _eintr_retry_call(os.waitpid, self.pid, wait_flags)
+ except OSError, e:
+ if e.errno != errno.ECHILD:
+ raise
+ # This happens if SIGCLD is set to be ignored or waiting
+ # for child processes has otherwise been disabled for our
+ # process. This child is dead, we can't get the status.
+ pid = self.pid
+ sts = 0
+ return (pid, sts)
+
+
+ def wait(self, timeout=None, endtime=None):
+ """Wait for child process to terminate. Returns returncode
+ attribute."""
+ if self.returncode is not None:
+ return self.returncode
+
+ # endtime is preferred to timeout. timeout is only used for
+ # printing.
+ if endtime is not None or timeout is not None:
+ if endtime is None:
+ endtime = time.time() + timeout
+ elif timeout is None:
+ timeout = self._remaining_time(endtime)
+
+ if endtime is not None:
+ # Enter a busy loop if we have a timeout. This busy loop was
+ # cribbed from Lib/threading.py in Thread.wait() at r71065.
+ delay = 0.0005 # 500 us -> initial delay of 1 ms
+ while True:
+ if self._waitpid_lock.acquire(False):
+ try:
+ if self.returncode is not None:
+ break # Another thread waited.
+ (pid, sts) = self._try_wait(os.WNOHANG)
+ assert pid == self.pid or pid == 0
+ if pid == self.pid:
+ self._handle_exitstatus(sts)
+ break
+ finally:
+ self._waitpid_lock.release()
+ remaining = self._remaining_time(endtime)
+ if remaining <= 0:
+ raise TimeoutExpired(self.args, timeout)
+ delay = min(delay * 2, remaining, .05)
+ time.sleep(delay)
+ else:
+ while self.returncode is None:
+ self._waitpid_lock.acquire()
+ try:
+ if self.returncode is not None:
+ break # Another thread waited.
+ (pid, sts) = self._try_wait(0)
+ # Check the pid and loop as waitpid has been known to
+ # return 0 even without WNOHANG in odd situations.
+ # http://bugs.python.org/issue14396.
+ if pid == self.pid:
+ self._handle_exitstatus(sts)
+ finally:
+ self._waitpid_lock.release()
+ return self.returncode
+
+
+ def _communicate(self, input, endtime, orig_timeout):
+ if self.stdin and not self._communication_started:
+ # Flush stdio buffer. This might block, if the user has
+ # been writing to .stdin in an uncontrolled fashion.
+ self.stdin.flush()
+ if not input:
+ self.stdin.close()
+
+ if _has_poll:
+ stdout, stderr = self._communicate_with_poll(input, endtime,
+ orig_timeout)
+ else:
+ stdout, stderr = self._communicate_with_select(input, endtime,
+ orig_timeout)
+
+ self.wait(timeout=self._remaining_time(endtime))
+
+ # All data exchanged. Translate lists into strings.
+ if stdout is not None:
+ stdout = ''.join(stdout)
+ if stderr is not None:
+ stderr = ''.join(stderr)
+
+ # Translate newlines, if requested. We cannot let the file
+ # object do the translation: It is based on stdio, which is
+ # impossible to combine with select (unless forcing no
+ # buffering).
+ if self.universal_newlines and hasattr(file, 'newlines'):
+ if stdout:
+ stdout = self._translate_newlines(stdout)
+ if stderr:
+ stderr = self._translate_newlines(stderr)
+
+ return (stdout, stderr)
+
+
+ def _communicate_with_poll(self, input, endtime, orig_timeout):
+ stdout = None # Return
+ stderr = None # Return
+
+ if not self._communication_started:
+ self._fd2file = {}
+
+ poller = select.poll()
+ def register_and_append(file_obj, eventmask):
+ poller.register(file_obj.fileno(), eventmask)
+ self._fd2file[file_obj.fileno()] = file_obj
+
+ def close_unregister_and_remove(fd):
+ poller.unregister(fd)
+ self._fd2file[fd].close()
+ self._fd2file.pop(fd)
+
+ if self.stdin and input:
+ register_and_append(self.stdin, select.POLLOUT)
+
+ # Only create this mapping if we haven't already.
+ if not self._communication_started:
+ self._fd2output = {}
+ if self.stdout:
+ self._fd2output[self.stdout.fileno()] = []
+ if self.stderr:
+ self._fd2output[self.stderr.fileno()] = []
+
+ select_POLLIN_POLLPRI = select.POLLIN | select.POLLPRI
+ if self.stdout:
+ register_and_append(self.stdout, select_POLLIN_POLLPRI)
+ stdout = self._fd2output[self.stdout.fileno()]
+ if self.stderr:
+ register_and_append(self.stderr, select_POLLIN_POLLPRI)
+ stderr = self._fd2output[self.stderr.fileno()]
+
+ # Save the input here so that if we time out while communicating,
+ # we can continue sending input if we retry.
+ if self.stdin and self._input is None:
+ self._input_offset = 0
+ self._input = input
+ if self.universal_newlines and isinstance(self._input, unicode):
+ self._input = self._input.encode(
+ self.stdin.encoding or sys.getdefaultencoding())
+
+ while self._fd2file:
+ try:
+ ready = poller.poll(self._remaining_time(endtime))
+ except select.error, e:
+ if e.args[0] == errno.EINTR:
+ continue
+ raise
+ self._check_timeout(endtime, orig_timeout)
+
+ for fd, mode in ready:
+ if mode & select.POLLOUT:
+ chunk = self._input[self._input_offset :
+ self._input_offset + _PIPE_BUF]
+ self._input_offset += os.write(fd, chunk)
+ if self._input_offset >= len(self._input):
+ close_unregister_and_remove(fd)
+ elif mode & select_POLLIN_POLLPRI:
+ data = os.read(fd, 4096)
+ if not data:
+ close_unregister_and_remove(fd)
+ self._fd2output[fd].append(data)
+ else:
+ # Ignore hang up or errors.
+ close_unregister_and_remove(fd)
+
+ return (stdout, stderr)
+
+
+ def _communicate_with_select(self, input, endtime, orig_timeout):
+ if not self._communication_started:
+ self._read_set = []
+ self._write_set = []
+ if self.stdin and input:
+ self._write_set.append(self.stdin)
+ if self.stdout:
+ self._read_set.append(self.stdout)
+ if self.stderr:
+ self._read_set.append(self.stderr)
+
+ if self.stdin and self._input is None:
+ self._input_offset = 0
+ self._input = input
+ if self.universal_newlines and isinstance(self._input, unicode):
+ self._input = self._input.encode(
+ self.stdin.encoding or sys.getdefaultencoding())
+
+ stdout = None # Return
+ stderr = None # Return
+
+ if self.stdout:
+ if not self._communication_started:
+ self._stdout_buff = []
+ stdout = self._stdout_buff
+ if self.stderr:
+ if not self._communication_started:
+ self._stderr_buff = []
+ stderr = self._stderr_buff
+
+ while self._read_set or self._write_set:
+ try:
+ (rlist, wlist, xlist) = \
+ select.select(self._read_set, self._write_set, [],
+ self._remaining_time(endtime))
+ except select.error, e:
+ if e.args[0] == errno.EINTR:
+ continue
+ raise
+
+ # According to the docs, returning three empty lists indicates
+ # that the timeout expired.
+ if not (rlist or wlist or xlist):
+ raise TimeoutExpired(self.args, orig_timeout)
+ # We also check what time it is ourselves for good measure.
+ self._check_timeout(endtime, orig_timeout)
+
+ if self.stdin in wlist:
+ chunk = self._input[self._input_offset :
+ self._input_offset + _PIPE_BUF]
+ try:
+ bytes_written = os.write(self.stdin.fileno(), chunk)
+ except EnvironmentError as e:
+ if e.errno == errno.EPIPE:
+ self._write_set.remove(self.stdin)
+ self.stdin.close()
+ else:
+ raise
+ else:
+ self._input_offset += bytes_written
+ if self._input_offset >= len(self._input):
+ self.stdin.close()
+ self._write_set.remove(self.stdin)
+
+ if self.stdout in rlist:
+ data = os.read(self.stdout.fileno(), 1024)
+ if data == "":
+ self.stdout.close()
+ self._read_set.remove(self.stdout)
+ stdout.append(data)
+
+ if self.stderr in rlist:
+ data = os.read(self.stderr.fileno(), 1024)
+ if data == "":
+ self.stderr.close()
+ self._read_set.remove(self.stderr)
+ stderr.append(data)
+
+ return (stdout, stderr)
+
+
+ def send_signal(self, sig):
+ """Send a signal to the process
+ """
+ # Skip signalling a process that we know has already died.
+ if self.returncode is None:
+ os.kill(self.pid, sig)
+
+ def terminate(self):
+ """Terminate the process with SIGTERM
+ """
+ self.send_signal(signal.SIGTERM)
+
+ def kill(self):
+ """Kill the process with SIGKILL
+ """
+ self.send_signal(signal.SIGKILL)
diff --git a/contrib/deprecated/python/subprocess32/test_subprocess32.py b/contrib/deprecated/python/subprocess32/test_subprocess32.py
new file mode 100644
index 0000000000..bd4276a936
--- /dev/null
+++ b/contrib/deprecated/python/subprocess32/test_subprocess32.py
@@ -0,0 +1,2485 @@
+import unittest
+from test import test_support
+import subprocess32
+subprocess = subprocess32
+import sys
+try:
+ import ctypes
+except ImportError:
+ ctypes = None
+else:
+ import ctypes.util
+import signal
+import os
+import errno
+import tempfile
+import textwrap
+import time
+try:
+ import threading
+except ImportError:
+ threading = None
+import re
+#import sysconfig
+import select
+import shutil
+try:
+ import gc
+except ImportError:
+ gc = None
+import pickle
+
+mswindows = (sys.platform == "win32")
+yenv = '''
+import os
+os.environ['Y_PYTHON_ENTRY_POINT'] = ':main'
+'''
+
+#
+# Depends on the following external programs: Python
+#
+
+if mswindows:
+ SETBINARY = ('import msvcrt; msvcrt.setmode(sys.stdout.fileno(), '
+ 'os.O_BINARY);')
+else:
+ SETBINARY = ''
+
+
+try:
+ mkstemp = tempfile.mkstemp
+except AttributeError:
+ # tempfile.mkstemp is not available
+ def mkstemp():
+ """Replacement for mkstemp, calling mktemp."""
+ fname = tempfile.mktemp()
+ return os.open(fname, os.O_RDWR|os.O_CREAT), fname
+
+try:
+ strip_python_stderr = test_support.strip_python_stderr
+except AttributeError:
+ # Copied from the test.test_support module in 2.7.
+ def strip_python_stderr(stderr):
+ """Strip the stderr of a Python process from potential debug output
+ emitted by the interpreter.
+
+ This will typically be run on the result of the communicate() method
+ of a subprocess.Popen object.
+ """
+ stderr = re.sub(r"\[\d+ refs\]\r?\n?$", "", stderr).strip()
+ return stderr
+
+class BaseTestCase(unittest.TestCase):
+ def setUp(self):
+ os.environ['Y_PYTHON_ENTRY_POINT'] = ':main'
+ # Try to minimize the number of children we have so this test
+ # doesn't crash on some buildbots (Alphas in particular).
+ reap_children()
+ if not hasattr(unittest.TestCase, 'addCleanup'):
+ self._cleanups = []
+
+ def tearDown(self):
+ try:
+ for inst in subprocess._active:
+ inst.wait()
+ subprocess._cleanup()
+ self.assertFalse(subprocess._active, "subprocess._active not empty")
+ finally:
+ if self._use_our_own_cleanup_implementation:
+ self._doCleanups()
+
+ if not hasattr(unittest.TestCase, 'assertIn'):
+ def assertIn(self, a, b, msg=None):
+ self.assert_((a in b), msg or ('%r not in %r' % (a, b)))
+ def assertNotIn(self, a, b, msg=None):
+ self.assert_((a not in b), msg or ('%r in %r' % (a, b)))
+
+ if not hasattr(unittest.TestCase, 'skipTest'):
+ def skipTest(self, message):
+ """These will still fail but it'll be clear that it is okay."""
+ self.fail('SKIPPED - %s\n' % (message,))
+
+ def _addCleanup(self, function, *args, **kwargs):
+ """Add a function, with arguments, to be called when the test is
+ completed. Functions added are called on a LIFO basis and are
+ called after tearDown on test failure or success.
+
+ Unlike unittest2 or python 2.7, cleanups are not if setUp fails.
+ That is easier to implement in this subclass and is all we need.
+ """
+ self._cleanups.append((function, args, kwargs))
+
+ def _doCleanups(self):
+ """Execute all cleanup functions. Normally called for you after
+ tearDown."""
+ while self._cleanups:
+ function, args, kwargs = self._cleanups.pop()
+ try:
+ function(*args, **kwargs)
+ except KeyboardInterrupt:
+ raise
+ except:
+ pass
+
+ _use_our_own_cleanup_implementation = False
+ if not hasattr(unittest.TestCase, 'addCleanup'):
+ _use_our_own_cleanup_implementation = True
+ addCleanup = _addCleanup
+
+ def assertStderrEqual(self, stderr, expected, msg=None):
+ # In a debug build, stuff like "[6580 refs]" is printed to stderr at
+ # shutdown time. That frustrates tests trying to check stderr produced
+ # from a spawned Python process.
+ actual = strip_python_stderr(stderr)
+ # strip_python_stderr also strips whitespace, so we do too.
+ expected = expected.strip()
+ self.assertEqual(actual, expected, msg)
+
+
+class PopenTestException(Exception):
+ pass
+
+
+class PopenExecuteChildRaises(subprocess32.Popen):
+ """Popen subclass for testing cleanup of subprocess.PIPE filehandles when
+ _execute_child fails.
+ """
+ def _execute_child(self, *args, **kwargs):
+ raise PopenTestException("Forced Exception for Test")
+
+
+class ProcessTestCase(BaseTestCase):
+
+ def test_call_seq(self):
+ # call() function with sequence argument
+ rc = subprocess.call([sys.executable, "-c", yenv +
+ "import sys; sys.exit(47)"])
+ self.assertEqual(rc, 47)
+
+ def test_call_timeout(self):
+ # call() function with timeout argument; we want to test that the child
+ # process gets killed when the timeout expires. If the child isn't
+ # killed, this call will deadlock since subprocess.call waits for the
+ # child.
+ self.assertRaises(subprocess.TimeoutExpired, subprocess.call,
+ [sys.executable, "-c", yenv + "while True: pass"],
+ timeout=0.1)
+
+ def test_check_call_zero(self):
+ # check_call() function with zero return code
+ rc = subprocess.check_call([sys.executable, "-c", yenv +
+ "import sys; sys.exit(0)"])
+ self.assertEqual(rc, 0)
+
+ def test_check_call_nonzero(self):
+ # check_call() function with non-zero return code
+ try:
+ subprocess.check_call([sys.executable, "-c", yenv +
+ "import sys; sys.exit(47)"])
+ except subprocess.CalledProcessError, c:
+ self.assertEqual(c.returncode, 47)
+
+ def test_check_output(self):
+ # check_output() function with zero return code
+ output = subprocess.check_output(
+ [sys.executable, "-c", yenv + "print 'BDFL'"])
+ self.assertIn('BDFL', output)
+
+ def test_check_output_nonzero(self):
+ # check_call() function with non-zero return code
+ try:
+ subprocess.check_output(
+ [sys.executable, "-c", yenv + "import sys; sys.exit(5)"])
+ except subprocess.CalledProcessError, c:
+ self.assertEqual(c.returncode, 5)
+
+ def test_check_output_stderr(self):
+ # check_output() function stderr redirected to stdout
+ output = subprocess.check_output(
+ [sys.executable, "-c", yenv + "import sys; sys.stderr.write('BDFL')"],
+ stderr=subprocess.STDOUT)
+ self.assertIn('BDFL', output)
+
+ def test_check_output_stdout_arg(self):
+ # check_output() function stderr redirected to stdout
+ try:
+ output = subprocess.check_output(
+ [sys.executable, "-c", yenv + "print 'will not be run'"],
+ stdout=sys.stdout)
+ self.fail("Expected ValueError when stdout arg supplied.")
+ except ValueError, c:
+ self.assertIn('stdout', c.args[0])
+
+ def test_check_output_timeout(self):
+ # check_output() function with timeout arg
+ try:
+ output = subprocess.check_output(
+ [sys.executable, "-c", yenv +
+ "import sys; sys.stdout.write('BDFL')\n"
+ "sys.stdout.flush()\n"
+ "while True: pass"],
+ timeout=0.5)
+ except subprocess.TimeoutExpired, exception:
+ self.assertEqual(exception.output, 'BDFL')
+ else:
+ self.fail("Expected TimeoutExpired.")
+
+ def test_call_kwargs(self):
+ # call() function with keyword args
+ newenv = os.environ.copy()
+ newenv["FRUIT"] = "banana"
+ rc = subprocess.call([sys.executable, "-c", yenv +
+ 'import sys, os;'
+ 'sys.exit(os.getenv("FRUIT")=="banana")'],
+ env=newenv)
+ self.assertEqual(rc, 1)
+
+ def test_stdin_none(self):
+ # .stdin is None when not redirected
+ p = subprocess.Popen([sys.executable, "-c", yenv + 'print "banana"'],
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ p.wait()
+ self.assertEqual(p.stdin, None)
+
+ def test_stdout_none(self):
+ # .stdout is None when not redirected, and the child's stdout will
+ # be inherited from the parent. In order to test this we run a
+ # subprocess in a subprocess:
+ # this_test
+ # \-- subprocess created by this test (parent)
+ # \-- subprocess created by the parent subprocess (child)
+ # The parent doesn't specify stdout, so the child will use the
+ # parent's stdout. This test checks that the message printed by the
+ # child goes to the parent stdout. The parent also checks that the
+ # child's stdout is None. See #11963.
+ code = ('import sys; from subprocess32 import Popen, PIPE;'
+ 'p = Popen([sys.executable, "-c", "print \'test_stdout_none\'"],'
+ ' stdin=PIPE, stderr=PIPE);'
+ 'p.wait(); assert p.stdout is None;')
+ p = subprocess.Popen([sys.executable, "-c", yenv + code],
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ self.addCleanup(p.stdout.close)
+ self.addCleanup(p.stderr.close)
+ out, err = p.communicate()
+ self.assertEqual(p.returncode, 0, err)
+ self.assertEqual(out.rstrip(), 'test_stdout_none')
+
+ def test_stderr_none(self):
+ # .stderr is None when not redirected
+ p = subprocess.Popen([sys.executable, "-c", yenv + 'print "banana"'],
+ stdin=subprocess.PIPE, stdout=subprocess.PIPE)
+ p.wait()
+ self.assertEqual(p.stderr, None)
+
+ # For use in the test_cwd* tests below.
+ def _normalize_cwd(self, cwd):
+ # Normalize an expected cwd (for Tru64 support).
+ # We can't use os.path.realpath since it doesn't expand Tru64 {memb}
+ # strings. See bug #1063571.
+ original_cwd = os.getcwd()
+ os.chdir(cwd)
+ cwd = os.getcwd()
+ os.chdir(original_cwd)
+ return cwd
+
+ # For use in the test_cwd* tests below.
+ def _split_python_path(self):
+ # Return normalized (python_dir, python_base).
+ python_path = os.path.realpath(sys.executable)
+ return os.path.split(python_path)
+
+ # For use in the test_cwd* tests below.
+ def _assert_cwd(self, expected_cwd, python_arg, **kwargs):
+ # Invoke Python via Popen, and assert that (1) the call succeeds,
+ # and that (2) the current working directory of the child process
+ # matches *expected_cwd*.
+ p = subprocess.Popen([python_arg, "-c", yenv +
+ "import os, sys; "
+ "sys.stdout.write(os.getcwd()); "
+ "sys.exit(47)"],
+ stdout=subprocess.PIPE,
+ **kwargs)
+ self.addCleanup(p.stdout.close)
+ p.wait()
+ self.assertEqual(47, p.returncode)
+ normcase = os.path.normcase
+ self.assertEqual(normcase(expected_cwd),
+ normcase(p.stdout.read().decode("utf-8")))
+
+ def test_cwd(self):
+ # Check that cwd changes the cwd for the child process.
+ temp_dir = tempfile.gettempdir()
+ temp_dir = self._normalize_cwd(temp_dir)
+ self._assert_cwd(temp_dir, sys.executable, cwd=temp_dir)
+
+ if not mswindows: # pending resolution of issue #15533
+ def test_cwd_with_relative_arg(self):
+ # Check that Popen looks for args[0] relative to cwd if args[0]
+ # is relative.
+ python_dir, python_base = self._split_python_path()
+ rel_python = os.path.join(os.curdir, python_base)
+
+ path = 'tempcwd'
+ saved_dir = os.getcwd()
+ os.mkdir(path)
+ try:
+ os.chdir(path)
+ wrong_dir = os.getcwd()
+ # Before calling with the correct cwd, confirm that the call fails
+ # without cwd and with the wrong cwd.
+ self.assertRaises(OSError, subprocess.Popen,
+ [rel_python])
+ self.assertRaises(OSError, subprocess.Popen,
+ [rel_python], cwd=wrong_dir)
+ python_dir = self._normalize_cwd(python_dir)
+ self._assert_cwd(python_dir, rel_python, cwd=python_dir)
+ finally:
+ os.chdir(saved_dir)
+ shutil.rmtree(path)
+
+ def test_cwd_with_relative_executable(self):
+ # Check that Popen looks for executable relative to cwd if executable
+ # is relative (and that executable takes precedence over args[0]).
+ python_dir, python_base = self._split_python_path()
+ rel_python = os.path.join(os.curdir, python_base)
+ doesntexist = "somethingyoudonthave"
+
+ path = 'tempcwd'
+ saved_dir = os.getcwd()
+ os.mkdir(path)
+ try:
+ os.chdir(path)
+ wrong_dir = os.getcwd()
+ # Before calling with the correct cwd, confirm that the call fails
+ # without cwd and with the wrong cwd.
+ self.assertRaises(OSError, subprocess.Popen,
+ [doesntexist], executable=rel_python)
+ self.assertRaises(OSError, subprocess.Popen,
+ [doesntexist], executable=rel_python,
+ cwd=wrong_dir)
+ python_dir = self._normalize_cwd(python_dir)
+ self._assert_cwd(python_dir, doesntexist, executable=rel_python,
+ cwd=python_dir)
+ finally:
+ os.chdir(saved_dir)
+ shutil.rmtree(path)
+
+ def test_cwd_with_absolute_arg(self):
+ # Check that Popen can find the executable when the cwd is wrong
+ # if args[0] is an absolute path.
+ python_dir, python_base = self._split_python_path()
+ abs_python = os.path.join(python_dir, python_base)
+ rel_python = os.path.join(os.curdir, python_base)
+ wrong_dir = tempfile.mkdtemp()
+ wrong_dir = os.path.realpath(wrong_dir)
+ try:
+ # Before calling with an absolute path, confirm that using a
+ # relative path fails.
+ self.assertRaises(OSError, subprocess.Popen,
+ [rel_python], cwd=wrong_dir)
+ wrong_dir = self._normalize_cwd(wrong_dir)
+ self._assert_cwd(wrong_dir, abs_python, cwd=wrong_dir)
+ finally:
+ shutil.rmtree(wrong_dir)
+
+ def test_executable_with_cwd(self):
+ python_dir, python_base = self._split_python_path()
+ python_dir = self._normalize_cwd(python_dir)
+ self._assert_cwd(python_dir, "somethingyoudonthave",
+ executable=sys.executable, cwd=python_dir)
+
+ #@unittest.skipIf(sysconfig.is_python_build(),
+ # "need an installed Python. See #7774")
+ #def test_executable_without_cwd(self):
+ # # For a normal installation, it should work without 'cwd'
+ # # argument. For test runs in the build directory, see #7774.
+ # self._assert_cwd('', "somethingyoudonthave", executable=sys.executable)
+
+ def test_stdin_pipe(self):
+ # stdin redirection
+ p = subprocess.Popen([sys.executable, "-c", yenv +
+ 'import sys; sys.exit(sys.stdin.read() == "pear")'],
+ stdin=subprocess.PIPE)
+ p.stdin.write("pear")
+ p.stdin.close()
+ p.wait()
+ self.assertEqual(p.returncode, 1)
+
+ def test_stdin_filedes(self):
+ # stdin is set to open file descriptor
+ tf = tempfile.TemporaryFile()
+ d = tf.fileno()
+ os.write(d, "pear")
+ os.lseek(d, 0, 0)
+ p = subprocess.Popen([sys.executable, "-c", yenv +
+ 'import sys; sys.exit(sys.stdin.read() == "pear")'],
+ stdin=d)
+ p.wait()
+ self.assertEqual(p.returncode, 1)
+
+ def test_stdin_fileobj(self):
+ # stdin is set to open file object
+ tf = tempfile.TemporaryFile()
+ tf.write("pear")
+ tf.seek(0)
+ p = subprocess.Popen([sys.executable, "-c", yenv +
+ 'import sys; sys.exit(sys.stdin.read() == "pear")'],
+ stdin=tf)
+ p.wait()
+ self.assertEqual(p.returncode, 1)
+
+ def test_stdout_pipe(self):
+ # stdout redirection
+ p = subprocess.Popen([sys.executable, "-c", yenv +
+ 'import sys; sys.stdout.write("orange")'],
+ stdout=subprocess.PIPE)
+ self.assertEqual(p.stdout.read(), "orange")
+
+ def test_stdout_filedes(self):
+ # stdout is set to open file descriptor
+ tf = tempfile.TemporaryFile()
+ d = tf.fileno()
+ p = subprocess.Popen([sys.executable, "-c", yenv +
+ 'import sys; sys.stdout.write("orange")'],
+ stdout=d)
+ p.wait()
+ os.lseek(d, 0, 0)
+ self.assertEqual(os.read(d, 1024), "orange")
+
+ def test_stdout_fileobj(self):
+ # stdout is set to open file object
+ tf = tempfile.TemporaryFile()
+ p = subprocess.Popen([sys.executable, "-c", yenv +
+ 'import sys; sys.stdout.write("orange")'],
+ stdout=tf)
+ p.wait()
+ tf.seek(0)
+ self.assertEqual(tf.read(), "orange")
+
+ def test_stderr_pipe(self):
+ # stderr redirection
+ p = subprocess.Popen([sys.executable, "-c", yenv +
+ 'import sys; sys.stderr.write("strawberry")'],
+ stderr=subprocess.PIPE)
+ self.assertStderrEqual(p.stderr.read(), "strawberry")
+
+ def test_stderr_filedes(self):
+ # stderr is set to open file descriptor
+ tf = tempfile.TemporaryFile()
+ d = tf.fileno()
+ p = subprocess.Popen([sys.executable, "-c", yenv +
+ 'import sys; sys.stderr.write("strawberry")'],
+ stderr=d)
+ p.wait()
+ os.lseek(d, 0, 0)
+ self.assertStderrEqual(os.read(d, 1024), "strawberry")
+
+ def test_stderr_fileobj(self):
+ # stderr is set to open file object
+ tf = tempfile.TemporaryFile()
+ p = subprocess.Popen([sys.executable, "-c", yenv +
+ 'import sys; sys.stderr.write("strawberry")'],
+ stderr=tf)
+ p.wait()
+ tf.seek(0)
+ self.assertStderrEqual(tf.read(), "strawberry")
+
+ def test_stderr_redirect_with_no_stdout_redirect(self):
+ # test stderr=STDOUT while stdout=None (not set)
+
+ # - grandchild prints to stderr
+ # - child redirects grandchild's stderr to its stdout
+ # - the parent should get grandchild's stderr in child's stdout
+ p = subprocess.Popen([sys.executable, "-c", yenv +
+ 'import sys, subprocess32 as subprocess;'
+ 'rc = subprocess.call([sys.executable, "-c",'
+ ' "import sys;"'
+ ' "sys.stderr.write(\'42\')"],'
+ ' stderr=subprocess.STDOUT);'
+ 'sys.exit(rc)'],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ stdout, stderr = p.communicate()
+ #NOTE: stdout should get stderr from grandchild
+ self.assertStderrEqual(stdout, '42')
+ self.assertStderrEqual(stderr, '') # should be empty
+ self.assertEqual(p.returncode, 0)
+
+ def test_stdout_stderr_pipe(self):
+ # capture stdout and stderr to the same pipe
+ p = subprocess.Popen([sys.executable, "-c", yenv +
+ 'import sys;'
+ 'sys.stdout.write("apple");'
+ 'sys.stdout.flush();'
+ 'sys.stderr.write("orange")'],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT)
+ self.assertStderrEqual(p.stdout.read(), "appleorange")
+
+ def test_stdout_stderr_file(self):
+ # capture stdout and stderr to the same open file
+ tf = tempfile.TemporaryFile()
+ p = subprocess.Popen([sys.executable, "-c", yenv +
+ 'import sys;'
+ 'sys.stdout.write("apple");'
+ 'sys.stdout.flush();'
+ 'sys.stderr.write("orange")'],
+ stdout=tf,
+ stderr=tf)
+ p.wait()
+ tf.seek(0)
+ self.assertStderrEqual(tf.read(), "appleorange")
+
+ def test_stdout_filedes_of_stdout(self):
+ # stdout is set to 1 (#1531862).
+ # To avoid printing the text on stdout, we do something similar to
+ # test_stdout_none (see above). The parent subprocess calls the child
+ # subprocess passing stdout=1, and this test uses stdout=PIPE in
+ # order to capture and check the output of the parent. See #11963.
+ code = ('import sys, subprocess32; '
+ 'rc = subprocess32.call([sys.executable, "-c", '
+ ' "import os, sys; sys.exit(os.write(sys.stdout.fileno(), '
+ '\'test with stdout=1\'))"], stdout=1); '
+ 'assert rc == 18')
+ p = subprocess.Popen([sys.executable, "-c", yenv + code],
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ self.addCleanup(p.stdout.close)
+ self.addCleanup(p.stderr.close)
+ out, err = p.communicate()
+ self.assertEqual(p.returncode, 0, err)
+ self.assertEqual(out.rstrip(), 'test with stdout=1')
+
+ def test_stdout_devnull(self):
+ p = subprocess.Popen([sys.executable, "-c", yenv +
+ 'for i in range(10240):'
+ 'print("x" * 1024)'],
+ stdout=subprocess.DEVNULL)
+ p.wait()
+ self.assertEqual(p.stdout, None)
+
+ def test_stderr_devnull(self):
+ p = subprocess.Popen([sys.executable, "-c", yenv +
+ 'import sys\n'
+ 'for i in range(10240):'
+ 'sys.stderr.write("x" * 1024)'],
+ stderr=subprocess.DEVNULL)
+ p.wait()
+ self.assertEqual(p.stderr, None)
+
+ def test_stdin_devnull(self):
+ p = subprocess.Popen([sys.executable, "-c", yenv +
+ 'import sys;'
+ 'sys.stdin.read(1)'],
+ stdin=subprocess.DEVNULL)
+ p.wait()
+ self.assertEqual(p.stdin, None)
+
+ def test_env(self):
+ newenv = os.environ.copy()
+ newenv["FRUIT"] = "orange"
+ p = subprocess.Popen([sys.executable, "-c", yenv +
+ 'import sys,os;'
+ 'sys.stdout.write(os.getenv("FRUIT"))'],
+ stdout=subprocess.PIPE,
+ env=newenv)
+ try:
+ stdout, stderr = p.communicate()
+ self.assertEqual(stdout, "orange")
+ finally:
+ p.__exit__(None, None, None)
+
+ def test_empty_env(self):
+ """test_empty_env() - verify that env={} is as empty as possible."""
+
+ def is_env_var_to_ignore(n):
+ """Determine if an environment variable is under our control."""
+ # This excludes some __CF_* and VERSIONER_* keys MacOS insists
+ # on adding even when the environment in exec is empty.
+ # Gentoo sandboxes also force LD_PRELOAD and SANDBOX_* to exist.
+ return ('VERSIONER' in n or '__CF' in n or # MacOS
+ n == 'LD_PRELOAD' or n.startswith('SANDBOX')) # Gentoo
+
+ p = subprocess.Popen(
+ [sys.executable, '-c',
+ 'import os; print(list(os.environ.keys()))'],
+ stdout=subprocess.PIPE, env={'Y_PYTHON_ENTRY_POINT': ':main'})
+ try:
+ stdout, stderr = p.communicate()
+ child_env_names = eval(stdout.strip())
+ self.assertTrue(isinstance(child_env_names, list),
+ msg=repr(child_env_names))
+ child_env_names = [k for k in child_env_names
+ if not is_env_var_to_ignore(k)]
+ self.assertEqual(child_env_names, [])
+ finally:
+ p.__exit__(None, None, None)
+
+ def test_communicate_stdin(self):
+ p = subprocess.Popen([sys.executable, "-c", yenv +
+ 'import sys;'
+ 'sys.exit(sys.stdin.read() == "pear")'],
+ stdin=subprocess.PIPE)
+ p.communicate("pear")
+ self.assertEqual(p.returncode, 1)
+
+ def test_communicate_stdout(self):
+ p = subprocess.Popen([sys.executable, "-c", yenv +
+ 'import sys; sys.stdout.write("pineapple")'],
+ stdout=subprocess.PIPE)
+ (stdout, stderr) = p.communicate()
+ self.assertEqual(stdout, "pineapple")
+ self.assertEqual(stderr, None)
+
+ def test_communicate_stderr(self):
+ p = subprocess.Popen([sys.executable, "-c", yenv +
+ 'import sys; sys.stderr.write("pineapple")'],
+ stderr=subprocess.PIPE)
+ (stdout, stderr) = p.communicate()
+ self.assertEqual(stdout, None)
+ self.assertStderrEqual(stderr, "pineapple")
+
+ def test_communicate(self):
+ p = subprocess.Popen([sys.executable, "-c", yenv +
+ 'import sys,os;'
+ 'sys.stderr.write("pineapple");'
+ 'sys.stdout.write(sys.stdin.read())'],
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ (stdout, stderr) = p.communicate("banana")
+ self.assertEqual(stdout, "banana")
+ self.assertStderrEqual(stderr, "pineapple")
+
+ def test_communicate_timeout(self):
+ p = subprocess.Popen([sys.executable, "-c", yenv +
+ 'import sys,os,time;'
+ 'sys.stderr.write("pineapple\\n");'
+ 'time.sleep(1);'
+ 'sys.stderr.write("pear\\n");'
+ 'sys.stdout.write(sys.stdin.read())'],
+ universal_newlines=True,
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ self.assertRaises(subprocess.TimeoutExpired, p.communicate, u"banana",
+ timeout=0.3)
+ # Make sure we can keep waiting for it, and that we get the whole output
+ # after it completes.
+ (stdout, stderr) = p.communicate()
+ self.assertEqual(stdout, "banana")
+ self.assertStderrEqual(stderr.encode(), "pineapple\npear\n")
+
+ def test_communicate_timeout_large_ouput(self):
+ # Test a expring timeout while the child is outputting lots of data.
+ p = subprocess.Popen([sys.executable, "-c", yenv +
+ 'import sys,os,time;'
+ 'sys.stdout.write("a" * (64 * 1024));'
+ 'time.sleep(0.2);'
+ 'sys.stdout.write("a" * (64 * 1024));'
+ 'time.sleep(0.2);'
+ 'sys.stdout.write("a" * (64 * 1024));'
+ 'time.sleep(0.2);'
+ 'sys.stdout.write("a" * (64 * 1024));'],
+ stdout=subprocess.PIPE)
+ self.assertRaises(subprocess.TimeoutExpired, p.communicate, timeout=0.4)
+ (stdout, _) = p.communicate()
+ self.assertEqual(len(stdout), 4 * 64 * 1024)
+
+ # Test for the fd leak reported in http://bugs.python.org/issue2791.
+ def test_communicate_pipe_fd_leak(self):
+ for stdin_pipe in (False, True):
+ for stdout_pipe in (False, True):
+ for stderr_pipe in (False, True):
+ options = {}
+ if stdin_pipe:
+ options['stdin'] = subprocess.PIPE
+ if stdout_pipe:
+ options['stdout'] = subprocess.PIPE
+ if stderr_pipe:
+ options['stderr'] = subprocess.PIPE
+ if not options:
+ continue
+ p = subprocess.Popen((sys.executable, "-c", yenv + "pass"), **options)
+ p.communicate()
+ if p.stdin is not None:
+ self.assertTrue(p.stdin.closed)
+ if p.stdout is not None:
+ self.assertTrue(p.stdout.closed)
+ if p.stderr is not None:
+ self.assertTrue(p.stderr.closed)
+
+ def test_communicate_returns(self):
+ # communicate() should return None if no redirection is active
+ p = subprocess.Popen([sys.executable, "-c", yenv +
+ "import sys; sys.exit(47)"])
+ (stdout, stderr) = p.communicate()
+ self.assertEqual(stdout, None)
+ self.assertEqual(stderr, None)
+
+ def test_communicate_pipe_buf(self):
+ # communicate() with writes larger than pipe_buf
+ # This test will probably deadlock rather than fail, if
+ # communicate() does not work properly.
+ x, y = os.pipe()
+ if mswindows:
+ pipe_buf = 512
+ else:
+ pipe_buf = os.fpathconf(x, "PC_PIPE_BUF")
+ os.close(x)
+ os.close(y)
+ p = subprocess.Popen([sys.executable, "-c", yenv +
+ 'import sys,os;'
+ 'sys.stdout.write(sys.stdin.read(47));'
+ 'sys.stderr.write("xyz"*%d);'
+ 'sys.stdout.write(sys.stdin.read())' % pipe_buf],
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ string_to_write = "abc"*pipe_buf
+ (stdout, stderr) = p.communicate(string_to_write)
+ self.assertEqual(stdout, string_to_write)
+
+ def test_writes_before_communicate(self):
+ # stdin.write before communicate()
+ p = subprocess.Popen([sys.executable, "-c", yenv +
+ 'import sys,os;'
+ 'sys.stdout.write(sys.stdin.read())'],
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ p.stdin.write("banana")
+ (stdout, stderr) = p.communicate("split")
+ self.assertEqual(stdout, "bananasplit")
+ self.assertStderrEqual(stderr, "")
+
+ def test_universal_newlines(self):
+ p = subprocess.Popen([sys.executable, "-c", yenv +
+ 'import sys,os;' + SETBINARY +
+ 'sys.stdout.write("line1\\n");'
+ 'sys.stdout.flush();'
+ 'sys.stdout.write("line2\\r");'
+ 'sys.stdout.flush();'
+ 'sys.stdout.write("line3\\r\\n");'
+ 'sys.stdout.flush();'
+ 'sys.stdout.write("line4\\r");'
+ 'sys.stdout.flush();'
+ 'sys.stdout.write("\\nline5");'
+ 'sys.stdout.flush();'
+ 'sys.stdout.write("\\nline6");'],
+ stdout=subprocess.PIPE,
+ universal_newlines=1)
+ stdout = p.stdout.read()
+ if hasattr(file, 'newlines'):
+ # Interpreter with universal newline support
+ self.assertEqual(stdout,
+ "line1\nline2\nline3\nline4\nline5\nline6")
+ else:
+ # Interpreter without universal newline support
+ self.assertEqual(stdout,
+ "line1\nline2\rline3\r\nline4\r\nline5\nline6")
+
+ def test_universal_newlines_communicate(self):
+ # universal newlines through communicate()
+ p = subprocess.Popen([sys.executable, "-c", yenv +
+ 'import sys,os;' + SETBINARY +
+ 'sys.stdout.write("line1\\n");'
+ 'sys.stdout.flush();'
+ 'sys.stdout.write("line2\\r");'
+ 'sys.stdout.flush();'
+ 'sys.stdout.write("line3\\r\\n");'
+ 'sys.stdout.flush();'
+ 'sys.stdout.write("line4\\r");'
+ 'sys.stdout.flush();'
+ 'sys.stdout.write("\\nline5");'
+ 'sys.stdout.flush();'
+ 'sys.stdout.write("\\nline6");'],
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE,
+ universal_newlines=1)
+ (stdout, stderr) = p.communicate()
+ if hasattr(file, 'newlines'):
+ # Interpreter with universal newline support
+ self.assertEqual(stdout,
+ "line1\nline2\nline3\nline4\nline5\nline6")
+ else:
+ # Interpreter without universal newline support
+ self.assertEqual(stdout,
+ "line1\nline2\rline3\r\nline4\r\nline5\nline6")
+
+ def test_universal_newlines_communicate_input_none(self):
+ # Test communicate(input=None) with universal newlines.
+ #
+ # We set stdout to PIPE because, as of this writing, a different
+ # code path is tested when the number of pipes is zero or one.
+ p = subprocess.Popen([sys.executable, "-c", yenv + "pass"],
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ universal_newlines=True)
+ p.communicate()
+ self.assertEqual(p.returncode, 0)
+
+ def test_no_leaking(self):
+ # Make sure we leak no resources
+ if not hasattr(test_support, "is_resource_enabled") \
+ or test_support.is_resource_enabled("subprocess") and not mswindows:
+ max_handles = 1026 # too much for most UNIX systems
+ else:
+ max_handles = 65
+ for i in range(max_handles):
+ p = subprocess.Popen([sys.executable, "-c", yenv +
+ "import sys;sys.stdout.write(sys.stdin.read())"],
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ data = p.communicate("lime")[0]
+ self.assertEqual(data, "lime")
+
+ def test_universal_newlines_communicate_stdin_stdout_stderr(self):
+ # universal newlines through communicate(), with stdin, stdout, stderr
+ p = subprocess.Popen([sys.executable, "-c", yenv +
+ 'import sys,os;' + SETBINARY + '''\nif True:
+ s = sys.stdin.readline()
+ sys.stdout.write(s)
+ sys.stdout.write("line2\\r")
+ sys.stderr.write("eline2\\n")
+ s = sys.stdin.read()
+ sys.stdout.write(s)
+ sys.stdout.write("line4\\n")
+ sys.stdout.write("line5\\r\\n")
+ sys.stderr.write("eline6\\r")
+ sys.stderr.write("eline7\\r\\nz")
+ '''],
+ stdin=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ universal_newlines=True)
+ self.addCleanup(p.stdout.close)
+ self.addCleanup(p.stderr.close)
+ (stdout, stderr) = p.communicate(u"line1\nline3\n")
+ self.assertEqual(p.returncode, 0)
+ self.assertEqual(u"line1\nline2\nline3\nline4\nline5\n", stdout)
+ # Python debug build push something like "[42442 refs]\n"
+ # to stderr at exit of subprocess.
+ # Don't use assertStderrEqual because it strips CR and LF from output.
+ self.assertTrue(stderr.startswith(u"eline2\neline6\neline7\n"))
+
+ def test_list2cmdline(self):
+ self.assertEqual(subprocess.list2cmdline(['a b c', 'd', 'e']),
+ '"a b c" d e')
+ self.assertEqual(subprocess.list2cmdline(['ab"c', '\\', 'd']),
+ 'ab\\"c \\ d')
+ self.assertEqual(subprocess.list2cmdline(['ab"c', ' \\', 'd']),
+ 'ab\\"c " \\\\" d')
+ self.assertEqual(subprocess.list2cmdline(['a\\\\\\b', 'de fg', 'h']),
+ 'a\\\\\\b "de fg" h')
+ self.assertEqual(subprocess.list2cmdline(['a\\"b', 'c', 'd']),
+ 'a\\\\\\"b c d')
+ self.assertEqual(subprocess.list2cmdline(['a\\\\b c', 'd', 'e']),
+ '"a\\\\b c" d e')
+ self.assertEqual(subprocess.list2cmdline(['a\\\\b\\ c', 'd', 'e']),
+ '"a\\\\b\\ c" d e')
+ self.assertEqual(subprocess.list2cmdline(['ab', '']),
+ 'ab ""')
+
+
+ def test_poll(self):
+ p = subprocess.Popen([sys.executable,
+ "-c", yenv + "import time; time.sleep(1)"])
+ count = 0
+ while p.poll() is None:
+ time.sleep(0.1)
+ count += 1
+ # We expect that the poll loop probably went around about 10 times,
+ # but, based on system scheduling we can't control, it's possible
+ # poll() never returned None. It "should be" very rare that it
+ # didn't go around at least twice.
+ self.assert_(count >= 2)
+ # Subsequent invocations should just return the returncode
+ self.assertEqual(p.poll(), 0)
+
+
+ def test_wait(self):
+ p = subprocess.Popen([sys.executable,
+ "-c", yenv + "import time; time.sleep(2)"])
+ self.assertEqual(p.wait(), 0)
+ # Subsequent invocations should just return the returncode
+ self.assertEqual(p.wait(), 0)
+
+
+ def test_wait_timeout(self):
+ p = subprocess.Popen([sys.executable,
+ "-c", yenv + "import time; time.sleep(0.1)"])
+ try:
+ p.wait(timeout=0.01)
+ except subprocess.TimeoutExpired, e:
+ self.assertIn("0.01", str(e)) # For coverage of __str__.
+ else:
+ self.fail("subprocess.TimeoutExpired expected but not raised.")
+ self.assertEqual(p.wait(timeout=2), 0)
+
+
+ def test_invalid_bufsize(self):
+ # an invalid type of the bufsize argument should raise
+ # TypeError.
+ try:
+ subprocess.Popen([sys.executable, "-c", yenv + "pass"], "orange")
+ except TypeError:
+ pass
+
+ def test_leaking_fds_on_error(self):
+ # see bug #5179: Popen leaks file descriptors to PIPEs if
+ # the child fails to execute; this will eventually exhaust
+ # the maximum number of open fds. 1024 seems a very common
+ # value for that limit, but Windows has 2048, so we loop
+ # 1024 times (each call leaked two fds).
+ for i in range(1024):
+ # Windows raises IOError. Others raise OSError.
+ try:
+ subprocess.Popen(['nonexisting_i_hope'],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ except EnvironmentError, c:
+ if c.errno != 2: # ignore "no such file"
+ raise
+
+ #@unittest.skipIf(threading is None, "threading required")
+ def test_threadsafe_wait(self):
+ """Issue21291: Popen.wait() needs to be threadsafe for returncode."""
+ proc = subprocess.Popen([sys.executable, '-c', yenv +
+ 'import time; time.sleep(12)'])
+ self.assertEqual(proc.returncode, None)
+ results = []
+
+ def kill_proc_timer_thread():
+ results.append(('thread-start-poll-result', proc.poll()))
+ # terminate it from the thread and wait for the result.
+ proc.kill()
+ proc.wait()
+ results.append(('thread-after-kill-and-wait', proc.returncode))
+ # this wait should be a no-op given the above.
+ proc.wait()
+ results.append(('thread-after-second-wait', proc.returncode))
+
+ # This is a timing sensitive test, the failure mode is
+ # triggered when both the main thread and this thread are in
+ # the wait() call at once. The delay here is to allow the
+ # main thread to most likely be blocked in its wait() call.
+ t = threading.Timer(0.2, kill_proc_timer_thread)
+ t.start()
+
+ if mswindows:
+ expected_errorcode = 1
+ else:
+ # Should be -9 because of the proc.kill() from the thread.
+ expected_errorcode = -9
+
+ # Wait for the process to finish; the thread should kill it
+ # long before it finishes on its own. Supplying a timeout
+ # triggers a different code path for better coverage.
+ proc.wait(timeout=20)
+ self.assertEqual(proc.returncode, expected_errorcode,
+ msg="unexpected result in wait from main thread")
+
+ # This should be a no-op with no change in returncode.
+ proc.wait()
+ self.assertEqual(proc.returncode, expected_errorcode,
+ msg="unexpected result in second main wait.")
+
+ t.join()
+ # Ensure that all of the thread results are as expected.
+ # When a race condition occurs in wait(), the returncode could
+ # be set by the wrong thread that doesn't actually have it
+ # leading to an incorrect value.
+ self.assertEqual([('thread-start-poll-result', None),
+ ('thread-after-kill-and-wait', expected_errorcode),
+ ('thread-after-second-wait', expected_errorcode)],
+ results)
+
+ def test_issue8780(self):
+ # Ensure that stdout is inherited from the parent
+ # if stdout=PIPE is not used
+ code = ';'.join((
+ 'import subprocess32, sys',
+ 'retcode = subprocess32.call('
+ "[sys.executable, '-c', 'print(\"Hello World!\")'])",
+ 'assert retcode == 0'))
+ output = subprocess.check_output([sys.executable, '-c', yenv + code])
+ self.assert_(output.startswith('Hello World!'), output)
+
+ def test_communicate_epipe(self):
+ # Issue 10963: communicate() should hide EPIPE
+ p = subprocess.Popen([sys.executable, "-c", yenv + 'pass'],
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ self.addCleanup(p.stdout.close)
+ self.addCleanup(p.stderr.close)
+ self.addCleanup(p.stdin.close)
+ p.communicate(b"x" * 2**20)
+
+ def test_communicate_epipe_only_stdin(self):
+ # Issue 10963: communicate() should hide EPIPE
+ p = subprocess.Popen([sys.executable, "-c", yenv + 'pass'],
+ stdin=subprocess.PIPE)
+ self.addCleanup(p.stdin.close)
+ p.wait()
+ p.communicate(b"x" * 2**20)
+
+ if not mswindows: # Signal tests are POSIX specific.
+ def test_communicate_eintr(self):
+ # Issue #12493: communicate() should handle EINTR
+ def handler(signum, frame):
+ pass
+ old_handler = signal.signal(signal.SIGALRM, handler)
+ self.addCleanup(signal.signal, signal.SIGALRM, old_handler)
+
+ # the process is running for 2 seconds
+ args = [sys.executable, "-c", yenv + 'import time; time.sleep(2)']
+ for stream in ('stdout', 'stderr'):
+ kw = {stream: subprocess.PIPE}
+ process = subprocess.Popen(args, **kw)
+ try:
+ signal.alarm(1)
+ # communicate() will be interrupted by SIGALRM
+ process.communicate()
+ finally:
+ process.__exit__(None, None, None)
+
+
+ # This test is Linux-ish specific for simplicity to at least have
+ # some coverage. It is not a platform specific bug.
+ #@unittest.skipUnless(os.path.isdir('/proc/%d/fd' % os.getpid()),
+ # "Linux specific")
+ def test_failed_child_execute_fd_leak(self):
+ """Test for the fork() failure fd leak reported in issue16327."""
+ if not os.path.isdir('/proc/%d/fd' % os.getpid()):
+ self.skipTest("Linux specific")
+ fd_directory = '/proc/%d/fd' % os.getpid()
+ fds_before_popen = os.listdir(fd_directory)
+ try:
+ PopenExecuteChildRaises(
+ [sys.executable, '-c', yenv + 'pass'], stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ except PopenTestException:
+ pass # Yay! Because 2.4 doesn't support with statements.
+ else:
+ self.fail("PopenTestException expected but not raised.")
+
+ # NOTE: This test doesn't verify that the real _execute_child
+ # does not close the file descriptors itself on the way out
+ # during an exception. Code inspection has confirmed that.
+
+ fds_after_exception = os.listdir(fd_directory)
+ self.assertEqual(fds_before_popen, fds_after_exception)
+
+
+class RunFuncTestCase(BaseTestCase):
+ def run_python(self, code, **kwargs):
+ """Run Python code in a subprocess using subprocess.run"""
+ argv = [sys.executable, "-c", yenv + code]
+ return subprocess.run(argv, **kwargs)
+
+ def test_returncode(self):
+ # call() function with sequence argument
+ cp = self.run_python("import sys; sys.exit(47)")
+ self.assertEqual(cp.returncode, 47)
+ try:
+ cp.check_returncode()
+ except subprocess.CalledProcessError:
+ pass
+ else:
+ self.fail("CalledProcessError not raised")
+
+ def test_check(self):
+ try:
+ self.run_python("import sys; sys.exit(47)", check=True)
+ except subprocess.CalledProcessError, exception:
+ self.assertEqual(exception.returncode, 47)
+ else:
+ self.fail("CalledProcessError not raised")
+
+ def test_check_zero(self):
+ # check_returncode shouldn't raise when returncode is zero
+ cp = self.run_python("import sys; sys.exit(0)", check=True)
+ self.assertEqual(cp.returncode, 0)
+
+ def test_timeout(self):
+ # run() function with timeout argument; we want to test that the child
+ # process gets killed when the timeout expires. If the child isn't
+ # killed, this call will deadlock since subprocess.run waits for the
+ # child.
+ try:
+ self.run_python("while True: pass", timeout=0.0001)
+ except subprocess.TimeoutExpired:
+ pass
+ else:
+ self.fail("TimeoutExpired not raised")
+
+ def test_capture_stdout(self):
+ # capture stdout with zero return code
+ cp = self.run_python("print('BDFL')", stdout=subprocess.PIPE)
+ self.assertIn('BDFL', cp.stdout)
+
+ def test_capture_stderr(self):
+ cp = self.run_python("import sys; sys.stderr.write('BDFL')",
+ stderr=subprocess.PIPE)
+ self.assertIn('BDFL', cp.stderr)
+
+ def test_check_output_stdin_arg(self):
+ # run() can be called with stdin set to a file
+ tf = tempfile.TemporaryFile()
+ self.addCleanup(tf.close)
+ tf.write('pear')
+ tf.seek(0)
+ cp = self.run_python(
+ "import sys; sys.stdout.write(sys.stdin.read().upper())",
+ stdin=tf, stdout=subprocess.PIPE)
+ self.assertIn('PEAR', cp.stdout)
+
+ def test_check_output_input_arg(self):
+ # check_output() can be called with input set to a string
+ cp = self.run_python(
+ "import sys; sys.stdout.write(sys.stdin.read().upper())",
+ input='pear', stdout=subprocess.PIPE)
+ self.assertIn('PEAR', cp.stdout)
+
+ def test_check_output_stdin_with_input_arg(self):
+ # run() refuses to accept 'stdin' with 'input'
+ tf = tempfile.TemporaryFile()
+ self.addCleanup(tf.close)
+ tf.write('pear')
+ tf.seek(0)
+ try:
+ output = self.run_python("print('will not be run')",
+ stdin=tf, input='hare')
+ except ValueError, exception:
+ self.assertIn('stdin', exception.args[0])
+ self.assertIn('input', exception.args[0])
+ else:
+ self.fail("Expected ValueError when stdin and input args supplied.")
+
+ def test_check_output_timeout(self):
+ try:
+ cp = self.run_python((
+ "import sys, time\n"
+ "sys.stdout.write('BDFL')\n"
+ "sys.stdout.flush()\n"
+ "time.sleep(3600)"),
+ # Some heavily loaded buildbots (sparc Debian 3.x) require
+ # this much time to start and print.
+ timeout=3, stdout=subprocess.PIPE)
+ except subprocess.TimeoutExpired, exception:
+ self.assertEqual(exception.output, 'BDFL')
+ # output is aliased to stdout
+ self.assertEqual(exception.stdout, 'BDFL')
+ else:
+ self.fail("TimeoutExpired not raised")
+
+ def test_run_kwargs(self):
+ newenv = os.environ.copy()
+ newenv["FRUIT"] = "banana"
+ cp = self.run_python(('import sys, os;'
+ 'os.getenv("FRUIT")=="banana" and sys.exit(33) or sys.exit(31)'),
+ env=newenv)
+ self.assertEqual(cp.returncode, 33)
+
+
+# context manager
+class _SuppressCoreFiles(object):
+ """Try to prevent core files from being created."""
+ old_limit = None
+
+ def __enter__(self):
+ """Try to save previous ulimit, then set it to (0, 0)."""
+ try:
+ import resource
+ self.old_limit = resource.getrlimit(resource.RLIMIT_CORE)
+ resource.setrlimit(resource.RLIMIT_CORE, (0, 0))
+ except (ImportError, ValueError, resource.error):
+ pass
+
+ def __exit__(self, *args):
+ """Return core file behavior to default."""
+ if self.old_limit is None:
+ return
+ try:
+ import resource
+ resource.setrlimit(resource.RLIMIT_CORE, self.old_limit)
+ except (ImportError, ValueError, resource.error):
+ pass
+
+
+#@unittest.skipIf(mswindows, "POSIX specific tests")
+class POSIXProcessTestCase(BaseTestCase):
+
+ def setUp(self):
+ BaseTestCase.setUp(self)
+ self._nonexistent_dir = "/_this/pa.th/does/not/exist"
+
+ def _get_chdir_exception(self):
+ try:
+ os.chdir(self._nonexistent_dir)
+ except OSError, e:
+ # This avoids hard coding the errno value or the OS perror()
+ # string and instead capture the exception that we want to see
+ # below for comparison.
+ desired_exception = e
+ desired_exception.strerror += ': ' + repr(self._nonexistent_dir)
+ else:
+ self.fail("chdir to nonexistant directory %s succeeded." %
+ self._nonexistent_dir)
+ return desired_exception
+
+ def test_exception_cwd(self):
+ """Test error in the child raised in the parent for a bad cwd."""
+ desired_exception = self._get_chdir_exception()
+ try:
+ p = subprocess.Popen([sys.executable, "-c", yenv + ""],
+ cwd=self._nonexistent_dir)
+ except OSError, e:
+ # Test that the child process chdir failure actually makes
+ # it up to the parent process as the correct exception.
+ self.assertEqual(desired_exception.errno, e.errno)
+ self.assertEqual(desired_exception.strerror, e.strerror)
+ else:
+ self.fail("Expected OSError: %s" % desired_exception)
+
+ def test_exception_bad_executable(self):
+ """Test error in the child raised in the parent for a bad executable."""
+ desired_exception = self._get_chdir_exception()
+ try:
+ p = subprocess.Popen([sys.executable, "-c", yenv + ""],
+ executable=self._nonexistent_dir)
+ except OSError, e:
+ # Test that the child process exec failure actually makes
+ # it up to the parent process as the correct exception.
+ self.assertEqual(desired_exception.errno, e.errno)
+ self.assertEqual(desired_exception.strerror, e.strerror)
+ else:
+ self.fail("Expected OSError: %s" % desired_exception)
+
+ def test_exception_bad_args_0(self):
+ """Test error in the child raised in the parent for a bad args[0]."""
+ desired_exception = self._get_chdir_exception()
+ try:
+ p = subprocess.Popen([self._nonexistent_dir, "-c", yenv + ""])
+ except OSError, e:
+ # Test that the child process exec failure actually makes
+ # it up to the parent process as the correct exception.
+ self.assertEqual(desired_exception.errno, e.errno)
+ self.assertEqual(desired_exception.strerror, e.strerror)
+ else:
+ self.fail("Expected OSError: %s" % desired_exception)
+
+ #@unittest.skipIf(not os.path.exists('/proc/self/status'))
+ def test_restore_signals(self):
+ if not os.path.exists('/proc/self/status'):
+ print("SKIP - Functional test requires /proc/self/status.")
+ return
+ # Blindly assume that cat exists on systems with /proc/self/status...
+ default_proc_status = subprocess.check_output(
+ ['cat', '/proc/self/status'],
+ restore_signals=False)
+ for line in default_proc_status.splitlines():
+ if line.startswith(b'SigIgn'):
+ default_sig_ign_mask = line
+ break
+ else:
+ self.skipTest("SigIgn not found in /proc/self/status.")
+ restored_proc_status = subprocess.check_output(
+ ['cat', '/proc/self/status'],
+ restore_signals=True)
+ for line in restored_proc_status.splitlines():
+ if line.startswith(b'SigIgn'):
+ restored_sig_ign_mask = line
+ break
+ # restore_signals=True should've unblocked SIGPIPE and friends.
+ self.assertNotEqual(default_sig_ign_mask, restored_sig_ign_mask)
+
+ def test_start_new_session(self):
+ # For code coverage of calling setsid(). We don't care if we get an
+ # EPERM error from it depending on the test execution environment, that
+ # still indicates that it was called.
+ try:
+ output = subprocess.check_output(
+ [sys.executable, "-c", yenv +
+ "import os; print(os.getpgid(os.getpid()))"],
+ start_new_session=True)
+ except OSError, e:
+ if e.errno != errno.EPERM:
+ raise
+ else:
+ parent_pgid = os.getpgid(os.getpid())
+ child_pgid = int(output)
+ self.assertNotEqual(parent_pgid, child_pgid)
+
+ def test_run_abort(self):
+ # returncode handles signal termination
+ scf = _SuppressCoreFiles()
+ scf.__enter__()
+ try:
+ p = subprocess.Popen([sys.executable, "-c", yenv +
+ "import os; os.abort()"])
+ p.wait()
+ finally:
+ scf.__exit__()
+ self.assertEqual(-p.returncode, signal.SIGABRT)
+
+ def test_preexec(self):
+ # DISCLAIMER: Setting environment variables is *not* a good use
+ # of a preexec_fn. This is merely a test.
+ p = subprocess.Popen([sys.executable, "-c", yenv +
+ "import sys, os;"
+ "sys.stdout.write(os.getenv('FRUIT'))"],
+ stdout=subprocess.PIPE,
+ preexec_fn=lambda: os.putenv("FRUIT", "apple"))
+ self.assertEqual(p.stdout.read(), "apple")
+
+ def test_preexec_exception(self):
+ def raise_it():
+ raise ValueError("What if two swallows carried a coconut?")
+ try:
+ p = subprocess.Popen([sys.executable, "-c", yenv + ""],
+ preexec_fn=raise_it)
+ except RuntimeError, e:
+ self.assertTrue(
+ subprocess._posixsubprocess,
+ "Expected a ValueError from the preexec_fn")
+ except ValueError, e:
+ self.assertIn("coconut", e.args[0])
+ else:
+ self.fail("Exception raised by preexec_fn did not make it "
+ "to the parent process.")
+
+ class _TestExecuteChildPopen(subprocess.Popen):
+ """Used to test behavior at the end of _execute_child."""
+ def __init__(self, testcase, *args, **kwargs):
+ self._testcase = testcase
+ subprocess.Popen.__init__(self, *args, **kwargs)
+
+ def _execute_child(self, *args, **kwargs):
+ try:
+ subprocess.Popen._execute_child(self, *args, **kwargs)
+ finally:
+ # Open a bunch of file descriptors and verify that
+ # none of them are the same as the ones the Popen
+ # instance is using for stdin/stdout/stderr.
+ devzero_fds = [os.open("/dev/zero", os.O_RDONLY)
+ for _ in range(8)]
+ try:
+ for fd in devzero_fds:
+ self._testcase.assertNotIn(
+ fd, (self.stdin.fileno(), self.stdout.fileno(),
+ self.stderr.fileno()),
+ msg="At least one fd was closed early.")
+ finally:
+ map(os.close, devzero_fds)
+
+ #@unittest.skipIf(not os.path.exists("/dev/zero"), "/dev/zero required.")
+ def test_preexec_errpipe_does_not_double_close_pipes(self):
+ """Issue16140: Don't double close pipes on preexec error."""
+
+ def raise_it():
+ raise RuntimeError("force the _execute_child() errpipe_data path.")
+
+ try:
+ self._TestExecuteChildPopen(
+ self, [sys.executable, "-c", yenv + "pass"],
+ stdin=subprocess.PIPE, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE, preexec_fn=raise_it)
+ except RuntimeError:
+ pass # Yay! Because 2.4 doesn't support with statements.
+ else:
+ self.fail("RuntimeError expected but not raised.")
+
+ #@unittest.skipUnless(gc, "Requires a gc module.")
+ def test_preexec_gc_module_failure(self):
+ # This tests the code that disables garbage collection if the child
+ # process will execute any Python.
+ def raise_runtime_error():
+ raise RuntimeError("this shouldn't escape")
+ enabled = gc.isenabled()
+ orig_gc_disable = gc.disable
+ orig_gc_isenabled = gc.isenabled
+ try:
+ gc.disable()
+ self.assertFalse(gc.isenabled())
+ subprocess.call([sys.executable, '-c', yenv + ''],
+ preexec_fn=lambda: None)
+ self.assertFalse(gc.isenabled(),
+ "Popen enabled gc when it shouldn't.")
+
+ gc.enable()
+ self.assertTrue(gc.isenabled())
+ subprocess.call([sys.executable, '-c', yenv + ''],
+ preexec_fn=lambda: None)
+ self.assertTrue(gc.isenabled(), "Popen left gc disabled.")
+
+ gc.disable = raise_runtime_error
+ self.assertRaises(RuntimeError, subprocess.Popen,
+ [sys.executable, '-c', yenv + ''],
+ preexec_fn=lambda: None)
+
+ del gc.isenabled # force an AttributeError
+ self.assertRaises(AttributeError, subprocess.Popen,
+ [sys.executable, '-c', yenv + ''],
+ preexec_fn=lambda: None)
+ finally:
+ gc.disable = orig_gc_disable
+ gc.isenabled = orig_gc_isenabled
+ if not enabled:
+ gc.disable()
+
+ def test_args_string(self):
+ # args is a string
+ f, fname = mkstemp()
+ os.write(f, "#!/bin/sh\n")
+ os.write(f, "exec '%s' -c 'import sys; sys.exit(47)'\n" %
+ sys.executable)
+ os.close(f)
+ os.chmod(fname, 0700)
+ p = subprocess.Popen(fname)
+ p.wait()
+ os.remove(fname)
+ self.assertEqual(p.returncode, 47)
+
+ def test_invalid_args(self):
+ # invalid arguments should raise ValueError
+ self.assertRaises(ValueError, subprocess.call,
+ [sys.executable, "-c", yenv +
+ "import sys; sys.exit(47)"],
+ startupinfo=47)
+ self.assertRaises(ValueError, subprocess.call,
+ [sys.executable, "-c", yenv +
+ "import sys; sys.exit(47)"],
+ creationflags=47)
+
+ def test_shell_sequence(self):
+ # Run command through the shell (sequence)
+ newenv = os.environ.copy()
+ newenv["FRUIT"] = "apple"
+ p = subprocess.Popen(["echo $FRUIT"], shell=1,
+ stdout=subprocess.PIPE,
+ env=newenv)
+ self.assertEqual(p.stdout.read().strip(), "apple")
+
+ def test_shell_string(self):
+ # Run command through the shell (string)
+ newenv = os.environ.copy()
+ newenv["FRUIT"] = "apple"
+ p = subprocess.Popen("echo $FRUIT", shell=1,
+ stdout=subprocess.PIPE,
+ env=newenv)
+ self.assertEqual(p.stdout.read().strip(), "apple")
+
+ def test_call_string(self):
+ # call() function with string argument on UNIX
+ f, fname = mkstemp()
+ os.write(f, "#!/bin/sh\n")
+ os.write(f, "exec '%s' -c 'import sys; sys.exit(47)'\n" %
+ sys.executable)
+ os.close(f)
+ os.chmod(fname, 0700)
+ rc = subprocess.call(fname)
+ os.remove(fname)
+ self.assertEqual(rc, 47)
+
+ def test_specific_shell(self):
+ # Issue #9265: Incorrect name passed as arg[0].
+ shells = []
+ for prefix in ['/bin', '/usr/bin/', '/usr/local/bin']:
+ for name in ['bash', 'ksh']:
+ sh = os.path.join(prefix, name)
+ if os.path.isfile(sh):
+ shells.append(sh)
+ if not shells: # Will probably work for any shell but csh.
+ self.skipTest("bash or ksh required for this test")
+ sh = '/bin/sh'
+ if os.path.isfile(sh) and not os.path.islink(sh):
+ # Test will fail if /bin/sh is a symlink to csh.
+ shells.append(sh)
+ for sh in shells:
+ p = subprocess.Popen("echo $0", executable=sh, shell=True,
+ stdout=subprocess.PIPE)
+ self.assertEqual(p.stdout.read().strip(), sh)
+
+ def _kill_process(self, method, *args):
+ # Do not inherit file handles from the parent.
+ # It should fix failures on some platforms.
+ p = subprocess.Popen([sys.executable, "-c", yenv + """if 1:
+ import sys, time
+ sys.stdout.write('x\\n')
+ sys.stdout.flush()
+ time.sleep(30)
+ """],
+ close_fds=True,
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ # Wait for the interpreter to be completely initialized before
+ # sending any signal.
+ p.stdout.read(1)
+ getattr(p, method)(*args)
+ return p
+
+ def test_send_signal(self):
+ p = self._kill_process('send_signal', signal.SIGINT)
+ _, stderr = p.communicate()
+ self.assertIn('KeyboardInterrupt', stderr)
+ self.assertNotEqual(p.wait(), 0)
+
+ def test_kill(self):
+ p = self._kill_process('kill')
+ _, stderr = p.communicate()
+ self.assertStderrEqual(stderr, '')
+ self.assertEqual(p.wait(), -signal.SIGKILL)
+
+ def test_terminate(self):
+ p = self._kill_process('terminate')
+ _, stderr = p.communicate()
+ self.assertStderrEqual(stderr, '')
+ self.assertEqual(p.wait(), -signal.SIGTERM)
+
+ def check_close_std_fds(self, fds):
+ # Issue #9905: test that subprocess pipes still work properly with
+ # some standard fds closed
+ stdin = 0
+ newfds = []
+ for a in fds:
+ b = os.dup(a)
+ newfds.append(b)
+ if a == 0:
+ stdin = b
+ try:
+ for fd in fds:
+ os.close(fd)
+ out, err = subprocess.Popen([sys.executable, "-c", yenv +
+ 'import sys;'
+ 'sys.stdout.write("apple");'
+ 'sys.stdout.flush();'
+ 'sys.stderr.write("orange")'],
+ stdin=stdin,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE).communicate()
+ err = strip_python_stderr(err)
+ self.assertEqual((out, err), ('apple', 'orange'))
+ finally:
+ for b, a in zip(newfds, fds):
+ os.dup2(b, a)
+ for b in newfds:
+ os.close(b)
+
+ def test_close_fd_0(self):
+ self.check_close_std_fds([0])
+
+ def test_close_fd_1(self):
+ self.check_close_std_fds([1])
+
+ def test_close_fd_2(self):
+ self.check_close_std_fds([2])
+
+ def test_close_fds_0_1(self):
+ self.check_close_std_fds([0, 1])
+
+ def test_close_fds_0_2(self):
+ self.check_close_std_fds([0, 2])
+
+ def test_close_fds_1_2(self):
+ self.check_close_std_fds([1, 2])
+
+ def test_close_fds_0_1_2(self):
+ # Issue #10806: test that subprocess pipes still work properly with
+ # all standard fds closed.
+ self.check_close_std_fds([0, 1, 2])
+
+ def check_swap_fds(self, stdin_no, stdout_no, stderr_no):
+ # open up some temporary files
+ temps = [mkstemp() for i in range(3)]
+ temp_fds = [fd for fd, fname in temps]
+ try:
+ # unlink the files -- we won't need to reopen them
+ for fd, fname in temps:
+ os.unlink(fname)
+
+ # save a copy of the standard file descriptors
+ saved_fds = [os.dup(fd) for fd in range(3)]
+ try:
+ # duplicate the temp files over the standard fd's 0, 1, 2
+ for fd, temp_fd in enumerate(temp_fds):
+ os.dup2(temp_fd, fd)
+
+ # write some data to what will become stdin, and rewind
+ os.write(stdin_no, "STDIN")
+ os.lseek(stdin_no, 0, 0)
+
+ # now use those files in the given order, so that subprocess
+ # has to rearrange them in the child
+ p = subprocess.Popen([sys.executable, "-c", yenv +
+ 'import sys; got = sys.stdin.read();'
+ 'sys.stdout.write("got %s"%got); sys.stderr.write("err")'],
+ stdin=stdin_no,
+ stdout=stdout_no,
+ stderr=stderr_no)
+ p.wait()
+
+ for fd in temp_fds:
+ os.lseek(fd, 0, 0)
+
+ out = os.read(stdout_no, 1024)
+ err = os.read(stderr_no, 1024)
+ finally:
+ for std, saved in enumerate(saved_fds):
+ os.dup2(saved, std)
+ os.close(saved)
+
+ self.assertEqual(out, "got STDIN")
+ self.assertStderrEqual(err, "err")
+
+ finally:
+ for fd in temp_fds:
+ os.close(fd)
+
+ # When duping fds, if there arises a situation where one of the fds is
+ # either 0, 1 or 2, it is possible that it is overwritten (#12607).
+ # This tests all combinations of this.
+ def test_swap_fds(self):
+ self.check_swap_fds(0, 1, 2)
+ self.check_swap_fds(0, 2, 1)
+ self.check_swap_fds(1, 0, 2)
+ self.check_swap_fds(1, 2, 0)
+ self.check_swap_fds(2, 0, 1)
+ self.check_swap_fds(2, 1, 0)
+
+ def test_small_errpipe_write_fd(self):
+ """Issue #15798: Popen should work when stdio fds are available."""
+ new_stdin = os.dup(0)
+ new_stdout = os.dup(1)
+ try:
+ os.close(0)
+ os.close(1)
+
+ subprocess.Popen([
+ sys.executable, "-c", yenv + "pass"]).wait()
+ finally:
+ # Restore original stdin and stdout
+ os.dup2(new_stdin, 0)
+ os.dup2(new_stdout, 1)
+ os.close(new_stdin)
+ os.close(new_stdout)
+
+ def test_remapping_std_fds(self):
+ # open up some temporary files
+ temps = [mkstemp() for i in range(3)]
+ try:
+ temp_fds = [fd for fd, fname in temps]
+
+ # unlink the files -- we won't need to reopen them
+ for fd, fname in temps:
+ os.unlink(fname)
+
+ # write some data to what will become stdin, and rewind
+ os.write(temp_fds[1], "STDIN")
+ os.lseek(temp_fds[1], 0, 0)
+
+ # move the standard file descriptors out of the way
+ saved_fds = [os.dup(fd) for fd in range(3)]
+ try:
+ # duplicate the file objects over the standard fd's
+ for fd, temp_fd in enumerate(temp_fds):
+ os.dup2(temp_fd, fd)
+
+ # now use those files in the "wrong" order, so that subprocess
+ # has to rearrange them in the child
+ p = subprocess.Popen([sys.executable, "-c", yenv +
+ 'import sys; got = sys.stdin.read();'
+ 'sys.stdout.write("got %s"%got); sys.stderr.write("err")'],
+ stdin=temp_fds[1],
+ stdout=temp_fds[2],
+ stderr=temp_fds[0])
+ p.wait()
+ finally:
+ # restore the original fd's underneath sys.stdin, etc.
+ for std, saved in enumerate(saved_fds):
+ os.dup2(saved, std)
+ os.close(saved)
+
+ for fd in temp_fds:
+ os.lseek(fd, 0, 0)
+
+ out = os.read(temp_fds[2], 1024)
+ err = os.read(temp_fds[0], 1024)
+ self.assertEqual(out, "got STDIN")
+ self.assertStderrEqual(err, "err")
+
+ finally:
+ for fd in temp_fds:
+ os.close(fd)
+
+ # NOTE: test_surrogates_error_message makes no sense on python 2.x. omitted.
+ # NOTE: test_undecodable_env makes no sense on python 2.x. omitted.
+ # NOTE: test_bytes_program makes no sense on python 2.x. omitted.
+
+ if sys.version_info[:2] >= (2,7):
+ # Disabling this test on 2.6 and earlier as it fails on Travis CI regardless
+ # of LANG=C being set and is not worth the time to figure out why in such a
+ # legacy environment..
+ # https://travis-ci.org/google/python-subprocess32/jobs/290065729
+ def test_fs_encode_unicode_error(self):
+ fs_encoding = sys.getfilesystemencoding()
+ if fs_encoding.upper() not in ("ANSI_X3.4-1968", "ASCII"):
+ self.skipTest(
+ "Requires a restictive sys.filesystemencoding(), "
+ "not %s. Run python with LANG=C" % fs_encoding)
+ highbit_executable_name = os.path.join(
+ test_support.findfile("testdata"), u"Does\\Not\uDCff\\Exist")
+ try:
+ subprocess.call([highbit_executable_name])
+ except UnicodeEncodeError:
+ return
+ except RuntimeError, e:
+ # The ProcessTestCasePOSIXPurePython version ends up here. It
+ # can't re-construct the unicode error from the child because it
+ # doesn't have all the arguments. BFD. One doesn't use
+ # subprocess32 for the old pure python implementation...
+ if "UnicodeEncodeError" not in str(e):
+ self.fail("Expected a RuntimeError whining about how a "
+ "UnicodeEncodeError from the child could not "
+ "be reraised. Not: %s" % e)
+ return
+ self.fail("Expected a UnicodeEncodeError to be raised.")
+
+ def test_pipe_cloexec(self):
+ sleeper = test_support.findfile("testdata/input_reader.py")
+ fd_status = test_support.findfile("testdata/fd_status.py")
+
+ p1 = subprocess.Popen([sys.executable, sleeper],
+ stdin=subprocess.PIPE, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE, close_fds=False)
+
+ self.addCleanup(p1.communicate, '')
+
+ p2 = subprocess.Popen([sys.executable, fd_status],
+ stdout=subprocess.PIPE, close_fds=False)
+
+ output, error = p2.communicate()
+ result_fds = set(map(int, output.split(',')))
+ unwanted_fds = set([p1.stdin.fileno(), p1.stdout.fileno(),
+ p1.stderr.fileno()])
+
+ self.assertFalse(result_fds & unwanted_fds,
+ "Expected no fds from %r to be open in child, "
+ "found %r" %
+ (unwanted_fds, result_fds & unwanted_fds))
+
+ def test_pipe_cloexec_real_tools(self):
+ qcat = test_support.findfile("testdata/qcat.py")
+ qgrep = test_support.findfile("testdata/qgrep.py")
+
+ subdata = 'zxcvbn'
+ data = subdata * 4 + '\n'
+
+ p1 = subprocess.Popen([sys.executable, qcat],
+ stdin=subprocess.PIPE, stdout=subprocess.PIPE,
+ close_fds=False)
+
+ p2 = subprocess.Popen([sys.executable, qgrep, subdata],
+ stdin=p1.stdout, stdout=subprocess.PIPE,
+ close_fds=False)
+
+ self.addCleanup(p1.wait)
+ self.addCleanup(p2.wait)
+ def kill_p1():
+ try:
+ p1.terminate()
+ except ProcessLookupError:
+ pass
+ def kill_p2():
+ try:
+ p2.terminate()
+ except ProcessLookupError:
+ pass
+ self.addCleanup(kill_p1)
+ self.addCleanup(kill_p2)
+
+ p1.stdin.write(data)
+ p1.stdin.close()
+
+ readfiles, ignored1, ignored2 = select.select([p2.stdout], [], [], 10)
+
+ self.assertTrue(readfiles, "The child hung")
+ self.assertEqual(p2.stdout.read(), data)
+
+ p1.stdout.close()
+ p2.stdout.close()
+
+ def test_close_fds(self):
+ fd_status = test_support.findfile("testdata/fd_status.py")
+
+ fds = os.pipe()
+ self.addCleanup(os.close, fds[0])
+ self.addCleanup(os.close, fds[1])
+
+ open_fds = set(fds)
+ # add a bunch more fds
+ for _ in range(9):
+ fd = os.open("/dev/null", os.O_RDONLY)
+ self.addCleanup(os.close, fd)
+ open_fds.add(fd)
+
+ p = subprocess.Popen([sys.executable, fd_status],
+ stdout=subprocess.PIPE, close_fds=False)
+ output, ignored = p.communicate()
+ remaining_fds = set(map(int, output.split(',')))
+
+ self.assertEqual(remaining_fds & open_fds, open_fds,
+ "Some fds were closed")
+
+ p = subprocess.Popen([sys.executable, fd_status],
+ stdout=subprocess.PIPE, close_fds=True)
+ output, ignored = p.communicate()
+ remaining_fds = set(map(int, output.split(',')))
+
+ self.assertFalse(remaining_fds & open_fds,
+ "Some fds were left open")
+ self.assertIn(1, remaining_fds, "Subprocess failed")
+
+ # Keep some of the fd's we opened open in the subprocess.
+ # This tests _posixsubprocess.c's proper handling of fds_to_keep.
+ fds_to_keep = set(open_fds.pop() for _ in range(8))
+ p = subprocess.Popen([sys.executable, fd_status],
+ stdout=subprocess.PIPE, close_fds=True,
+ pass_fds=())
+ output, ignored = p.communicate()
+ remaining_fds = set(map(int, output.split(',')))
+
+ self.assertFalse(remaining_fds & fds_to_keep & open_fds,
+ "Some fds not in pass_fds were left open")
+ self.assertIn(1, remaining_fds, "Subprocess failed")
+
+
+ def test_close_fds_when_max_fd_is_lowered(self):
+ """Confirm that issue21618 is fixed (may fail under valgrind)."""
+ fd_status = test_support.findfile("testdata/fd_status.py")
+
+ open_fds = set()
+ # Add a bunch more fds to pass down.
+ for _ in range(40):
+ fd = os.open("/dev/null", os.O_RDONLY)
+ open_fds.add(fd)
+
+ # Leave a two pairs of low ones available for use by the
+ # internal child error pipe and the stdout pipe.
+ # We also leave 10 more open for use by the Python 2 startup
+ # import machinery which tends to consume several at once.
+ for fd in sorted(open_fds)[:14]:
+ os.close(fd)
+ open_fds.remove(fd)
+
+ for fd in open_fds:
+ self.addCleanup(os.close, fd)
+
+ max_fd_open = max(open_fds)
+
+ import resource
+ rlim_cur, rlim_max = resource.getrlimit(resource.RLIMIT_NOFILE)
+ try:
+ # 29 is lower than the highest fds we are leaving open.
+ resource.setrlimit(resource.RLIMIT_NOFILE, (29, rlim_max))
+ # Launch a new Python interpreter with our low fd rlim_cur that
+ # inherits open fds above that limit. It then uses subprocess
+ # with close_fds=True to get a report of open fds in the child.
+ # An explicit list of fds to check is passed to fd_status.py as
+ # letting fd_status rely on its default logic would miss the
+ # fds above rlim_cur as it normally only checks up to that limit.
+ p = subprocess.Popen(
+ [sys.executable, '-c', yenv +
+ textwrap.dedent("""
+ import subprocess32, sys
+ subprocess32.Popen([sys.executable, %(fd_status)r] +
+ [str(x) for x in range(%(max_fd)d)],
+ close_fds=True).wait()
+ """ % dict(fd_status=fd_status, max_fd=max_fd_open+1))],
+ stdout=subprocess.PIPE, close_fds=False)
+ finally:
+ resource.setrlimit(resource.RLIMIT_NOFILE, (rlim_cur, rlim_max))
+
+ output, unused_stderr = p.communicate()
+ remaining_fds = set(map(int, output.strip().split(',')))
+
+ self.assertFalse(remaining_fds & open_fds,
+ msg="Some fds were left open.")
+
+
+ def test_pass_fds(self):
+ fd_status = test_support.findfile("testdata/fd_status.py")
+
+ open_fds = set()
+
+ for x in range(5):
+ fds = os.pipe()
+ self.addCleanup(os.close, fds[0])
+ self.addCleanup(os.close, fds[1])
+ open_fds.update(fds)
+
+ for fd in open_fds:
+ p = subprocess.Popen([sys.executable, fd_status],
+ stdout=subprocess.PIPE, close_fds=True,
+ pass_fds=(fd, ))
+ output, ignored = p.communicate()
+
+ remaining_fds = set(map(int, output.split(',')))
+ to_be_closed = open_fds - set((fd,))
+
+ self.assertIn(fd, remaining_fds, "fd to be passed not passed")
+ self.assertFalse(remaining_fds & to_be_closed,
+ "fd to be closed passed")
+
+ # Syntax requires Python 2.5, assertWarns requires Python 2.7.
+ #with self.assertWarns(RuntimeWarning) as context:
+ # self.assertFalse(subprocess.call(
+ # [sys.executable, "-c", yenv + "import sys; sys.exit(0)"],
+ # close_fds=False, pass_fds=(fd, )))
+ #self.assertIn('overriding close_fds', str(context.warning))
+
+ def test_stdout_stdin_are_single_inout_fd(self):
+ inout = open(os.devnull, "r+")
+ try:
+ p = subprocess.Popen([sys.executable, "-c", yenv + "import sys; sys.exit(0)"],
+ stdout=inout, stdin=inout)
+ p.wait()
+ finally:
+ inout.close()
+
+ def test_stdout_stderr_are_single_inout_fd(self):
+ inout = open(os.devnull, "r+")
+ try:
+ p = subprocess.Popen([sys.executable, "-c", yenv + "import sys; sys.exit(0)"],
+ stdout=inout, stderr=inout)
+ p.wait()
+ finally:
+ inout.close()
+
+ def test_stderr_stdin_are_single_inout_fd(self):
+ inout = open(os.devnull, "r+")
+ try:
+ p = subprocess.Popen([sys.executable, "-c", yenv + "import sys; sys.exit(0)"],
+ stderr=inout, stdin=inout)
+ p.wait()
+ finally:
+ inout.close()
+
+ def test_wait_when_sigchild_ignored(self):
+ # NOTE: sigchild_ignore.py may not be an effective test on all OSes.
+ sigchild_ignore = test_support.findfile("testdata/sigchild_ignore.py")
+ p = subprocess.Popen([sys.executable, sigchild_ignore],
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ stdout, stderr = p.communicate()
+ self.assertEqual(0, p.returncode, "sigchild_ignore.py exited"
+ " non-zero with this error:\n%s" % stderr)
+
+ def test_select_unbuffered(self):
+ # Issue #11459: bufsize=0 should really set the pipes as
+ # unbuffered (and therefore let select() work properly).
+ p = subprocess.Popen([sys.executable, "-c", yenv +
+ 'import sys;'
+ 'sys.stdout.write("apple")'],
+ stdout=subprocess.PIPE,
+ bufsize=0)
+ f = p.stdout
+ self.addCleanup(f.close)
+ try:
+ self.assertEqual(f.read(4), "appl")
+ self.assertIn(f, select.select([f], [], [], 0.0)[0])
+ finally:
+ p.wait()
+
+ def test_zombie_fast_process_del(self):
+ # Issue #12650: on Unix, if Popen.__del__() was called before the
+ # process exited, it wouldn't be added to subprocess._active, and would
+ # remain a zombie.
+ # spawn a Popen, and delete its reference before it exits
+ p = subprocess.Popen([sys.executable, "-c", yenv +
+ 'import sys, time;'
+ 'time.sleep(0.2)'],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ self.addCleanup(p.stdout.close)
+ self.addCleanup(p.stderr.close)
+ ident = id(p)
+ pid = p.pid
+ del p
+ # check that p is in the active processes list
+ self.assertIn(ident, [id(o) for o in subprocess._active])
+
+ def test_leak_fast_process_del_killed(self):
+ # Issue #12650: on Unix, if Popen.__del__() was called before the
+ # process exited, and the process got killed by a signal, it would never
+ # be removed from subprocess._active, which triggered a FD and memory
+ # leak.
+ # spawn a Popen, delete its reference and kill it
+ p = subprocess.Popen([sys.executable, "-c", yenv +
+ 'import time;'
+ 'time.sleep(3)'],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ self.addCleanup(p.stdout.close)
+ self.addCleanup(p.stderr.close)
+ ident = id(p)
+ pid = p.pid
+ del p
+ os.kill(pid, signal.SIGKILL)
+ # check that p is in the active processes list
+ self.assertIn(ident, [id(o) for o in subprocess._active])
+
+ # let some time for the process to exit, and create a new Popen: this
+ # should trigger the wait() of p
+ time.sleep(0.2)
+ try:
+ proc = subprocess.Popen(['nonexisting_i_hope'],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ proc.__exit__(None, None, None)
+ except EnvironmentError:
+ pass
+ else:
+ self.fail("EnvironmentError not raised.")
+ # p should have been wait()ed on, and removed from the _active list
+ self.assertRaises(OSError, os.waitpid, pid, 0)
+ self.assertNotIn(ident, [id(o) for o in subprocess._active])
+
+ def test_close_fds_after_preexec(self):
+ fd_status = test_support.findfile("testdata/fd_status.py")
+
+ # this FD is used as dup2() target by preexec_fn, and should be closed
+ # in the child process
+ fd = os.dup(1)
+ self.addCleanup(os.close, fd)
+
+ p = subprocess.Popen([sys.executable, fd_status],
+ stdout=subprocess.PIPE, close_fds=True,
+ preexec_fn=lambda: os.dup2(1, fd))
+ output, ignored = p.communicate()
+
+ remaining_fds = set(map(int, output.split(',')))
+
+ self.assertNotIn(fd, remaining_fds)
+
+ def test_child_terminated_in_stopped_state(self):
+ """Test wait() behavior when waitpid returns WIFSTOPPED; issue29335."""
+ if not ctypes:
+ sys.stderr.write('ctypes module required.\n')
+ return
+ if not sys.executable:
+ self.stderr.write('Test requires sys.executable.\n')
+ return
+ PTRACE_TRACEME = 0 # From glibc and MacOS (PT_TRACE_ME).
+ libc_name = ctypes.util.find_library('c')
+ libc = ctypes.CDLL(libc_name)
+ if not hasattr(libc, 'ptrace'):
+ self.stderr.write('ptrace() required.\n')
+ return
+ test_ptrace = subprocess.Popen(
+ [sys.executable, '-c', yenv + """if True:
+ import ctypes
+ libc = ctypes.CDLL({libc_name!r})
+ libc.ptrace({PTRACE_TRACEME}, 0, 0)
+ """.format(libc_name=libc_name, PTRACE_TRACEME=PTRACE_TRACEME)
+ ])
+ if test_ptrace.wait() != 0:
+ sys.stderr.write('ptrace() failed - unable to test.\n')
+ return
+ child = subprocess.Popen(
+ [sys.executable, '-c', yenv + """if True:
+ import ctypes
+ libc = ctypes.CDLL({libc_name!r})
+ libc.ptrace({PTRACE_TRACEME}, 0, 0)
+ libc.printf(ctypes.c_char_p(0xdeadbeef)) # Crash the process.
+ """.format(libc_name=libc_name, PTRACE_TRACEME=PTRACE_TRACEME)
+ ])
+ try:
+ returncode = child.wait()
+ except Exception, e:
+ child.kill() # Clean up the hung stopped process.
+ raise e
+ self.assertNotEqual(0, returncode)
+ self.assert_(returncode < 0, msg=repr(returncode)) # signal death, likely SIGSEGV.
+
+
+if mswindows:
+ class POSIXProcessTestCase(unittest.TestCase): pass
+
+
+#@unittest.skipUnless(mswindows, "Windows specific tests")
+class Win32ProcessTestCase(BaseTestCase):
+
+ def test_startupinfo(self):
+ # startupinfo argument
+ # We uses hardcoded constants, because we do not want to
+ # depend on win32all.
+ STARTF_USESHOWWINDOW = 1
+ SW_MAXIMIZE = 3
+ startupinfo = subprocess.STARTUPINFO()
+ startupinfo.dwFlags = STARTF_USESHOWWINDOW
+ startupinfo.wShowWindow = SW_MAXIMIZE
+ # Since Python is a console process, it won't be affected
+ # by wShowWindow, but the argument should be silently
+ # ignored
+ subprocess.call([sys.executable, "-c", yenv + "import sys; sys.exit(0)"],
+ startupinfo=startupinfo)
+
+ def test_creationflags(self):
+ # creationflags argument
+ CREATE_NEW_CONSOLE = 16
+ sys.stderr.write(" a DOS box should flash briefly ...\n")
+ subprocess.call(sys.executable +
+ ' -c "import time; time.sleep(0.25)"',
+ creationflags=CREATE_NEW_CONSOLE)
+
+ def test_invalid_args(self):
+ # invalid arguments should raise ValueError
+ self.assertRaises(ValueError, subprocess.call,
+ [sys.executable, "-c", yenv +
+ "import sys; sys.exit(47)"],
+ preexec_fn=lambda: 1)
+ self.assertRaises(ValueError, subprocess.call,
+ [sys.executable, "-c", yenv +
+ "import sys; sys.exit(47)"],
+ stdout=subprocess.PIPE,
+ close_fds=True)
+
+ def test_close_fds(self):
+ # close file descriptors
+ rc = subprocess.call([sys.executable, "-c", yenv +
+ "import sys; sys.exit(47)"],
+ close_fds=True)
+ self.assertEqual(rc, 47)
+
+ def test_shell_sequence(self):
+ # Run command through the shell (sequence)
+ newenv = os.environ.copy()
+ newenv["FRUIT"] = "physalis"
+ p = subprocess.Popen(["set"], shell=1,
+ stdout=subprocess.PIPE,
+ env=newenv)
+ self.assertIn("physalis", p.stdout.read())
+
+ def test_shell_string(self):
+ # Run command through the shell (string)
+ newenv = os.environ.copy()
+ newenv["FRUIT"] = "physalis"
+ p = subprocess.Popen("set", shell=1,
+ stdout=subprocess.PIPE,
+ env=newenv)
+ self.assertIn("physalis", p.stdout.read())
+
+ def test_call_string(self):
+ # call() function with string argument on Windows
+ rc = subprocess.call(sys.executable +
+ ' -c "import sys; sys.exit(47)"')
+ self.assertEqual(rc, 47)
+
+ def _kill_process(self, method, *args):
+ # Some win32 buildbot raises EOFError if stdin is inherited
+ p = subprocess.Popen([sys.executable, "-c", yenv + "input()"],
+ stdin=subprocess.PIPE, stderr=subprocess.PIPE)
+
+ # Let the process initialize (Issue #3137)
+ time.sleep(0.1)
+ # The process should not terminate prematurely
+ self.assert_(p.poll() is None)
+ # Retry if the process do not receive the signal.
+ count, maxcount = 0, 3
+ while count < maxcount and p.poll() is None:
+ getattr(p, method)(*args)
+ time.sleep(0.1)
+ count += 1
+
+ returncode = p.poll()
+ self.assert_(returncode is not None, "the subprocess did not terminate")
+ if count > 1:
+ print >>sys.stderr, ("p.{}{} succeeded after "
+ "{} attempts".format(method, args, count))
+ _, stderr = p.communicate()
+ self.assertStderrEqual(stderr, '')
+ self.assertEqual(p.wait(), returncode)
+ self.assertNotEqual(returncode, 0)
+
+ def test_send_signal(self):
+ self._kill_process('send_signal', signal.SIGTERM)
+
+ def test_kill(self):
+ self._kill_process('kill')
+
+ def test_terminate(self):
+ self._kill_process('terminate')
+
+
+if not mswindows:
+ class Win32ProcessTestCase(unittest.TestCase): pass
+
+
+#@unittest.skipUnless(getattr(subprocess, '_has_poll', False),
+# "poll system call not supported")
+class ProcessTestCaseNoPoll(ProcessTestCase):
+ def setUp(self):
+ subprocess._has_poll = False
+ ProcessTestCase.setUp(self)
+
+ def tearDown(self):
+ subprocess._has_poll = True
+ ProcessTestCase.tearDown(self)
+
+
+if not getattr(subprocess, '_has_poll', False):
+ class ProcessTestCaseNoPoll(unittest.TestCase): pass
+
+
+#@unittest.skipUnless(getattr(subprocess, '_posixsubprocess', False),
+# "_posixsubprocess extension module not found.")
+class ProcessTestCasePOSIXPurePython(ProcessTestCase, POSIXProcessTestCase):
+ def setUp(self):
+ subprocess._posixsubprocess = None
+ ProcessTestCase.setUp(self)
+ POSIXProcessTestCase.setUp(self)
+
+ def tearDown(self):
+ subprocess._posixsubprocess = sys.modules['_posixsubprocess32']
+ POSIXProcessTestCase.tearDown(self)
+ ProcessTestCase.tearDown(self)
+
+
+class POSIXSubprocessModuleTestCase(unittest.TestCase):
+ def test_fork_exec_sorted_fd_sanity_check(self):
+ # Issue #23564: sanity check the fork_exec() fds_to_keep sanity check.
+ _posixsubprocess = subprocess._posixsubprocess
+ gc_enabled = gc.isenabled()
+ try:
+ gc.enable()
+
+ for fds_to_keep in (
+ (-1, 2, 3, 4, 5), # Negative number.
+ ('str', 4), # Not an int.
+ (18, 23, 42, 2**63), # Out of range.
+ (5, 4), # Not sorted.
+ (6, 7, 7, 8), # Duplicate.
+ ):
+ try:
+ _posixsubprocess.fork_exec(
+ ["false"], ["false"],
+ True, fds_to_keep, None, ["env"],
+ -1, -1, -1, -1,
+ 1, 2, 3, 4,
+ True, True, None)
+ except ValueError, exception:
+ self.assertTrue('fds_to_keep' in str(exception),
+ msg=str(exception))
+ else:
+ self.fail("ValueError not raised, fds_to_keep=%s" %
+ (fds_to_keep,))
+ finally:
+ if not gc_enabled:
+ gc.disable()
+
+ def test_cloexec_pass_fds(self):
+ if not os.path.exists('/dev/null') or not os.path.isdir('/dev/fd'):
+ print("Skipped - This test requires /dev/null and /dev/fd/*.")
+ return
+ null_reader_proc = subprocess.Popen(
+ ["cat"],
+ stdin=open('/dev/null', 'rb'),
+ stdout=subprocess.PIPE)
+ try:
+ data = null_reader_proc.stdout
+ fd_name = '/dev/fd/%d' % data.fileno()
+ fd_reader_proc = subprocess.Popen(
+ ["cat", fd_name],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT, # Capture any error from cat.
+ pass_fds=(data.fileno(),))
+ try:
+ fddata = fd_reader_proc.stdout
+ self.assertEqual('', fddata.read())
+ finally:
+ fd_reader_proc.wait()
+ finally:
+ null_reader_proc.wait()
+
+
+if not getattr(subprocess, '_posixsubprocess', False):
+ print >>sys.stderr, "_posixsubprocess extension module not found."
+ class ProcessTestCasePOSIXPurePython(unittest.TestCase): pass
+ class POSIXSubprocessModuleTestCase(unittest.TestCase): pass
+
+
+class HelperFunctionTests(unittest.TestCase):
+ #@unittest.skipIf(mswindows, "errno and EINTR make no sense on windows")
+ def test_eintr_retry_call(self):
+ record_calls = []
+ def fake_os_func(*args):
+ record_calls.append(args)
+ if len(record_calls) == 2:
+ raise OSError(errno.EINTR, "fake interrupted system call")
+ return tuple(reversed(args))
+
+ self.assertEqual((999, 256),
+ subprocess._eintr_retry_call(fake_os_func, 256, 999))
+ self.assertEqual([(256, 999)], record_calls)
+ # This time there will be an EINTR so it will loop once.
+ self.assertEqual((666,),
+ subprocess._eintr_retry_call(fake_os_func, 666))
+ self.assertEqual([(256, 999), (666,), (666,)], record_calls)
+
+ if mswindows:
+ del test_eintr_retry_call
+
+ if not hasattr(unittest.TestCase, 'assertSequenceEqual'):
+ def assertSequenceEqual(self, seq1, seq2):
+ self.assertEqual(list(seq1), list(seq2))
+
+ def test_get_exec_path(self):
+ defpath_list = os.defpath.split(os.pathsep)
+ test_path = ['/monty', '/python', '', '/flying/circus']
+ test_env = {'PATH': os.pathsep.join(test_path)}
+
+ get_exec_path = subprocess._get_exec_path
+ saved_environ = os.environ
+ try:
+ os.environ = dict(test_env)
+ # Test that defaulting to os.environ works.
+ self.assertSequenceEqual(test_path, get_exec_path())
+ self.assertSequenceEqual(test_path, get_exec_path(env=None))
+ finally:
+ os.environ = saved_environ
+
+ # No PATH environment variable
+ self.assertSequenceEqual(defpath_list, get_exec_path({}))
+ # Empty PATH environment variable
+ self.assertSequenceEqual(('',), get_exec_path({'PATH':''}))
+ # Supplied PATH environment variable
+ self.assertSequenceEqual(test_path, get_exec_path(test_env))
+
+ def test_args_from_interpreter_flags(self):
+ if sys.version_info[:2] < (2,6):
+ print "Skipped - only useful on 2.6 and higher."
+ return
+ # Mostly just to call it for code coverage.
+ args_list = subprocess32._args_from_interpreter_flags()
+ self.assertTrue(isinstance(args_list, list), msg=repr(args_list))
+
+ def test_timeout_expired_unpickling(self):
+ """https://github.com/google/python-subprocess32/issues/57"""
+ t = subprocess32.TimeoutExpired(['command', 'arg1'], 5,
+ output='stdout!', stderr='err')
+ t_pickled = pickle.dumps(t)
+ t2 = pickle.loads(t_pickled)
+ self.assertEqual(t.cmd, t2.cmd)
+ self.assertEqual(t.timeout, t2.timeout)
+ self.assertEqual(t.output, t2.output)
+ self.assertEqual(t.stderr, t2.stderr)
+
+ def test_called_process_error_unpickling(self):
+ """https://github.com/google/python-subprocess32/issues/57"""
+ e = subprocess32.CalledProcessError(
+ 2, ['command', 'arg1'], output='stdout!', stderr='err')
+ e_pickled = pickle.dumps(e)
+ e2 = pickle.loads(e_pickled)
+ self.assertEqual(e.returncode, e2.returncode)
+ self.assertEqual(e.cmd, e2.cmd)
+ self.assertEqual(e.output, e2.output)
+ self.assertEqual(e.stderr, e2.stderr)
+
+
+def reap_children():
+ """Use this function at the end of test_main() whenever sub-processes
+ are started. This will help ensure that no extra children (zombies)
+ stick around to hog resources and create problems when looking
+ for refleaks.
+ """
+
+ # Reap all our dead child processes so we don't leave zombies around.
+ # These hog resources and might be causing some of the buildbots to die.
+ if hasattr(os, 'waitpid'):
+ any_process = -1
+ while True:
+ try:
+ # This will raise an exception on Windows. That's ok.
+ pid, status = os.waitpid(any_process, os.WNOHANG)
+ if pid == 0:
+ break
+ except:
+ break
+
+
+
+class ContextManagerTests(BaseTestCase):
+
+ def test_pipe(self):
+ proc = subprocess.Popen([sys.executable, "-c", yenv +
+ "import sys;"
+ "sys.stdout.write('stdout');"
+ "sys.stderr.write('stderr');"],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ try:
+ self.assertEqual(proc.stdout.read(), "stdout")
+ self.assertStderrEqual(proc.stderr.read(), "stderr")
+ finally:
+ proc.__exit__(None, None, None)
+
+ self.assertTrue(proc.stdout.closed)
+ self.assertTrue(proc.stderr.closed)
+
+ def test_returncode(self):
+ proc = subprocess.Popen([sys.executable, "-c", yenv +
+ "import sys; sys.exit(100)"])
+ proc.__exit__(None, None, None)
+ # __exit__ calls wait(), so the returncode should be set
+ self.assertEqual(proc.returncode, 100)
+
+ def test_communicate_stdin(self):
+ proc = subprocess.Popen([sys.executable, "-c", yenv +
+ "import sys;"
+ "sys.exit(sys.stdin.read() == 'context')"],
+ stdin=subprocess.PIPE)
+ try:
+ proc.communicate("context")
+ self.assertEqual(proc.returncode, 1)
+ finally:
+ proc.__exit__(None, None, None)
+
+ def test_invalid_args(self):
+ try:
+ proc = subprocess.Popen(['nonexisting_i_hope'],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ proc.__exit__(None, None, None)
+ except EnvironmentError, exception:
+ # ignore errors that indicate the command was not found
+ if exception.errno not in (errno.ENOENT, errno.EACCES):
+ raise
+ else:
+ self.fail("Expected an EnvironmentError exception.")
+
+
+if sys.version_info[:2] <= (2,4):
+ # The test suite hangs during the pure python test on 2.4. No idea why.
+ # That is not the implementation anyone is using this module for anyways.
+ class ProcessTestCasePOSIXPurePython(unittest.TestCase): pass
+
+
+def main():
+ unit_tests = (ProcessTestCase,
+ POSIXProcessTestCase,
+ POSIXSubprocessModuleTestCase,
+ Win32ProcessTestCase,
+ ProcessTestCasePOSIXPurePython,
+ ProcessTestCaseNoPoll,
+ HelperFunctionTests,
+ ContextManagerTests,
+ RunFuncTestCase,
+ )
+
+ test_support.run_unittest(*unit_tests)
+ reap_children()
+
+if __name__ == "__main__":
+ main()
diff --git a/contrib/deprecated/python/subprocess32/testdata/fd_status.py b/contrib/deprecated/python/subprocess32/testdata/fd_status.py
new file mode 100644
index 0000000000..67fb41c0af
--- /dev/null
+++ b/contrib/deprecated/python/subprocess32/testdata/fd_status.py
@@ -0,0 +1,34 @@
+"""When called as a script, print a comma-separated list of the open
+file descriptors on stdout.
+
+Usage:
+fd_stats.py: check all file descriptors
+fd_status.py fd1 fd2 ...: check only specified file descriptors
+"""
+
+import errno
+import os
+import stat
+import sys
+
+if __name__ == "__main__":
+ fds = []
+ if len(sys.argv) == 1:
+ try:
+ _MAXFD = os.sysconf("SC_OPEN_MAX")
+ except:
+ _MAXFD = 256
+ test_fds = range(0, _MAXFD)
+ else:
+ test_fds = map(int, sys.argv[1:])
+ for fd in test_fds:
+ try:
+ st = os.fstat(fd)
+ except OSError, e:
+ if e.errno == errno.EBADF:
+ continue
+ raise
+ # Ignore Solaris door files
+ if not hasattr(stat, 'S_ISDOOR') or not stat.S_ISDOOR(st.st_mode):
+ fds.append(fd)
+ print ','.join(map(str, fds))
diff --git a/contrib/deprecated/python/subprocess32/testdata/input_reader.py b/contrib/deprecated/python/subprocess32/testdata/input_reader.py
new file mode 100644
index 0000000000..1dc3191ad1
--- /dev/null
+++ b/contrib/deprecated/python/subprocess32/testdata/input_reader.py
@@ -0,0 +1,7 @@
+"""When called as a script, consumes the input"""
+
+import sys
+
+if __name__ == "__main__":
+ for line in sys.stdin:
+ pass
diff --git a/contrib/deprecated/python/subprocess32/testdata/qcat.py b/contrib/deprecated/python/subprocess32/testdata/qcat.py
new file mode 100644
index 0000000000..fe6f9db25c
--- /dev/null
+++ b/contrib/deprecated/python/subprocess32/testdata/qcat.py
@@ -0,0 +1,7 @@
+"""When ran as a script, simulates cat with no arguments."""
+
+import sys
+
+if __name__ == "__main__":
+ for line in sys.stdin:
+ sys.stdout.write(line)
diff --git a/contrib/deprecated/python/subprocess32/testdata/qgrep.py b/contrib/deprecated/python/subprocess32/testdata/qgrep.py
new file mode 100644
index 0000000000..69906379a9
--- /dev/null
+++ b/contrib/deprecated/python/subprocess32/testdata/qgrep.py
@@ -0,0 +1,10 @@
+"""When called with a single argument, simulated fgrep with a single
+argument and no options."""
+
+import sys
+
+if __name__ == "__main__":
+ pattern = sys.argv[1]
+ for line in sys.stdin:
+ if pattern in line:
+ sys.stdout.write(line)
diff --git a/contrib/deprecated/python/subprocess32/testdata/sigchild_ignore.py b/contrib/deprecated/python/subprocess32/testdata/sigchild_ignore.py
new file mode 100644
index 0000000000..ba5ccf2cf0
--- /dev/null
+++ b/contrib/deprecated/python/subprocess32/testdata/sigchild_ignore.py
@@ -0,0 +1,18 @@
+import signal, subprocess32, sys, time
+# On Linux this causes os.waitpid to fail with OSError as the OS has already
+# reaped our child process. The wait() passing the OSError on to the caller
+# and causing us to exit with an error is what we are testing against.
+sig_child = getattr(signal, 'SIGCLD', None)
+if sig_child is None:
+ sig_child = getattr(signal, 'SIGCHLD')
+signal.signal(sig_child, signal.SIG_IGN)
+subprocess32.Popen([sys.executable, '-c', 'print("albatross")']).wait()
+# Also ensure poll() handles an errno.ECHILD appropriately.
+p = subprocess32.Popen([sys.executable, '-c', 'print("albatross")'])
+num_polls = 0
+while p.poll() is None:
+ # Waiting for the process to finish.
+ time.sleep(0.01) # Avoid being a CPU busy loop.
+ num_polls += 1
+ if num_polls > 3000:
+ raise RuntimeError('poll should have returned 0 within 30 seconds')
diff --git a/contrib/deprecated/python/subprocess32/testdata/ya.make b/contrib/deprecated/python/subprocess32/testdata/ya.make
new file mode 100644
index 0000000000..4611e078bb
--- /dev/null
+++ b/contrib/deprecated/python/subprocess32/testdata/ya.make
@@ -0,0 +1,19 @@
+PY2TEST()
+
+SIZE(MEDIUM)
+
+NO_LINT()
+
+SRCDIR(
+ contrib/deprecated/python/subprocess32
+)
+
+TEST_SRCS(
+ test_subprocess32.py
+)
+
+TEST_CWD(
+ contrib/deprecated/python/subprocess32
+)
+
+END()
diff --git a/contrib/deprecated/python/subprocess32/ya.make b/contrib/deprecated/python/subprocess32/ya.make
new file mode 100644
index 0000000000..9613acd8c0
--- /dev/null
+++ b/contrib/deprecated/python/subprocess32/ya.make
@@ -0,0 +1,35 @@
+PY2_LIBRARY() # Backport from Python 3.
+
+LICENSE(PSF-2.0)
+
+VERSION(3.5.4)
+
+COPY_FILE(subprocess32.py subprocess.py)
+
+PY_SRCS(
+ TOP_LEVEL
+ subprocess32.py
+)
+
+IF (NOT OS_WINDOWS)
+ NO_COMPILER_WARNINGS()
+
+ SRCS(
+ _posixsubprocess.c
+ )
+
+ PY_REGISTER(_posixsubprocess32)
+
+ PY_SRCS(
+ TOP_LEVEL
+ subprocess.py
+ )
+ENDIF ()
+
+NO_LINT()
+
+END()
+
+RECURSE_FOR_TESTS(
+ testdata
+)
diff --git a/contrib/deprecated/python/typing/.dist-info/METADATA b/contrib/deprecated/python/typing/.dist-info/METADATA
new file mode 100644
index 0000000000..30047d3e60
--- /dev/null
+++ b/contrib/deprecated/python/typing/.dist-info/METADATA
@@ -0,0 +1,50 @@
+Metadata-Version: 2.1
+Name: typing
+Version: 3.10.0.0
+Summary: Type Hints for Python
+Home-page: https://docs.python.org/3/library/typing.html
+Author: Guido van Rossum, Jukka Lehtosalo, Łukasz Langa, Ivan Levkivskyi
+Author-email: jukka.lehtosalo@iki.fi
+License: PSF
+Project-URL: Source, https://github.com/python/typing
+Keywords: typing function annotations type hints hinting checking checker typehints typehinting typechecking backport
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Console
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Python Software Foundation License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Topic :: Software Development
+Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <3.5
+
+Typing -- Type Hints for Python
+
+This is a backport of the standard library typing module to Python
+versions older than 3.5. (See note below for newer versions.)
+
+Typing defines a standard notation for Python function and variable
+type annotations. The notation can be used for documenting code in a
+concise, standard format, and it has been designed to also be used by
+static and runtime type checkers, static analyzers, IDEs and other
+tools.
+
+NOTE: in Python 3.5 and later, the typing module lives in the stdlib,
+and installing this package has NO EFFECT, because stdlib takes higher
+precedence than the installation directory. To get a newer version of
+the typing module in Python 3.5 or later, you have to upgrade to a
+newer Python (bugfix) version. For example, typing in Python 3.6.0 is
+missing the definition of 'Type' -- upgrading to 3.6.2 will fix this.
+
+Also note that most improvements to the typing module in Python 3.7
+will not be included in this package, since Python 3.7 has some
+built-in support that is not present in older versions (See PEP 560.)
+
+For package maintainers, it is preferred to use
+``typing;python_version<"3.5"`` if your package requires it to support
+earlier Python versions. This will avoid shadowing the stdlib typing
+module when your package is installed via ``pip install -t .`` on
+Python 3.5 or later.
+
+
diff --git a/contrib/deprecated/python/typing/.dist-info/top_level.txt b/contrib/deprecated/python/typing/.dist-info/top_level.txt
new file mode 100644
index 0000000000..c997f364b4
--- /dev/null
+++ b/contrib/deprecated/python/typing/.dist-info/top_level.txt
@@ -0,0 +1 @@
+typing
diff --git a/contrib/deprecated/python/typing/LICENSE b/contrib/deprecated/python/typing/LICENSE
new file mode 100644
index 0000000000..583f9f6e61
--- /dev/null
+++ b/contrib/deprecated/python/typing/LICENSE
@@ -0,0 +1,254 @@
+A. HISTORY OF THE SOFTWARE
+==========================
+
+Python was created in the early 1990s by Guido van Rossum at Stichting
+Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands
+as a successor of a language called ABC. Guido remains Python's
+principal author, although it includes many contributions from others.
+
+In 1995, Guido continued his work on Python at the Corporation for
+National Research Initiatives (CNRI, see http://www.cnri.reston.va.us)
+in Reston, Virginia where he released several versions of the
+software.
+
+In May 2000, Guido and the Python core development team moved to
+BeOpen.com to form the BeOpen PythonLabs team. In October of the same
+year, the PythonLabs team moved to Digital Creations (now Zope
+Corporation, see http://www.zope.com). In 2001, the Python Software
+Foundation (PSF, see http://www.python.org/psf/) was formed, a
+non-profit organization created specifically to own Python-related
+Intellectual Property. Zope Corporation is a sponsoring member of
+the PSF.
+
+All Python releases are Open Source (see http://www.opensource.org for
+the Open Source Definition). Historically, most, but not all, Python
+releases have also been GPL-compatible; the table below summarizes
+the various releases.
+
+ Release Derived Year Owner GPL-
+ from compatible? (1)
+
+ 0.9.0 thru 1.2 1991-1995 CWI yes
+ 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes
+ 1.6 1.5.2 2000 CNRI no
+ 2.0 1.6 2000 BeOpen.com no
+ 1.6.1 1.6 2001 CNRI yes (2)
+ 2.1 2.0+1.6.1 2001 PSF no
+ 2.0.1 2.0+1.6.1 2001 PSF yes
+ 2.1.1 2.1+2.0.1 2001 PSF yes
+ 2.1.2 2.1.1 2002 PSF yes
+ 2.1.3 2.1.2 2002 PSF yes
+ 2.2 and above 2.1.1 2001-now PSF yes
+
+Footnotes:
+
+(1) GPL-compatible doesn't mean that we're distributing Python under
+ the GPL. All Python licenses, unlike the GPL, let you distribute
+ a modified version without making your changes open source. The
+ GPL-compatible licenses make it possible to combine Python with
+ other software that is released under the GPL; the others don't.
+
+(2) According to Richard Stallman, 1.6.1 is not GPL-compatible,
+ because its license has a choice of law clause. According to
+ CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1
+ is "not incompatible" with the GPL.
+
+Thanks to the many outside volunteers who have worked under Guido's
+direction to make these releases possible.
+
+
+B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON
+===============================================================
+
+PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
+--------------------------------------------
+
+1. This LICENSE AGREEMENT is between the Python Software Foundation
+("PSF"), and the Individual or Organization ("Licensee") accessing and
+otherwise using this software ("Python") in source or binary form and
+its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, PSF hereby
+grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce,
+analyze, test, perform and/or display publicly, prepare derivative works,
+distribute, and otherwise use Python alone or in any derivative version,
+provided, however, that PSF's License Agreement and PSF's notice of copyright,
+i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
+2011, 2012, 2013, 2014 Python Software Foundation; All Rights Reserved" are
+retained in Python alone or in any derivative version prepared by Licensee.
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python.
+
+4. PSF is making Python available to Licensee on an "AS IS"
+basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. Nothing in this License Agreement shall be deemed to create any
+relationship of agency, partnership, or joint venture between PSF and
+Licensee. This License Agreement does not grant permission to use PSF
+trademarks or trade name in a trademark sense to endorse or promote
+products or services of Licensee, or any third party.
+
+8. By copying, installing or otherwise using Python, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
+
+BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0
+-------------------------------------------
+
+BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1
+
+1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an
+office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the
+Individual or Organization ("Licensee") accessing and otherwise using
+this software in source or binary form and its associated
+documentation ("the Software").
+
+2. Subject to the terms and conditions of this BeOpen Python License
+Agreement, BeOpen hereby grants Licensee a non-exclusive,
+royalty-free, world-wide license to reproduce, analyze, test, perform
+and/or display publicly, prepare derivative works, distribute, and
+otherwise use the Software alone or in any derivative version,
+provided, however, that the BeOpen Python License is retained in the
+Software, alone or in any derivative version prepared by Licensee.
+
+3. BeOpen is making the Software available to Licensee on an "AS IS"
+basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE
+SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS
+AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY
+DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+5. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+6. This License Agreement shall be governed by and interpreted in all
+respects by the law of the State of California, excluding conflict of
+law provisions. Nothing in this License Agreement shall be deemed to
+create any relationship of agency, partnership, or joint venture
+between BeOpen and Licensee. This License Agreement does not grant
+permission to use BeOpen trademarks or trade names in a trademark
+sense to endorse or promote products or services of Licensee, or any
+third party. As an exception, the "BeOpen Python" logos available at
+http://www.pythonlabs.com/logos.html may be used according to the
+permissions granted on that web page.
+
+7. By copying, installing or otherwise using the software, Licensee
+agrees to be bound by the terms and conditions of this License
+Agreement.
+
+
+CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1
+---------------------------------------
+
+1. This LICENSE AGREEMENT is between the Corporation for National
+Research Initiatives, having an office at 1895 Preston White Drive,
+Reston, VA 20191 ("CNRI"), and the Individual or Organization
+("Licensee") accessing and otherwise using Python 1.6.1 software in
+source or binary form and its associated documentation.
+
+2. Subject to the terms and conditions of this License Agreement, CNRI
+hereby grants Licensee a nonexclusive, royalty-free, world-wide
+license to reproduce, analyze, test, perform and/or display publicly,
+prepare derivative works, distribute, and otherwise use Python 1.6.1
+alone or in any derivative version, provided, however, that CNRI's
+License Agreement and CNRI's notice of copyright, i.e., "Copyright (c)
+1995-2001 Corporation for National Research Initiatives; All Rights
+Reserved" are retained in Python 1.6.1 alone or in any derivative
+version prepared by Licensee. Alternately, in lieu of CNRI's License
+Agreement, Licensee may substitute the following text (omitting the
+quotes): "Python 1.6.1 is made available subject to the terms and
+conditions in CNRI's License Agreement. This Agreement together with
+Python 1.6.1 may be located on the Internet using the following
+unique, persistent identifier (known as a handle): 1895.22/1013. This
+Agreement may also be obtained from a proxy server on the Internet
+using the following URL: http://hdl.handle.net/1895.22/1013".
+
+3. In the event Licensee prepares a derivative work that is based on
+or incorporates Python 1.6.1 or any part thereof, and wants to make
+the derivative work available to others as provided herein, then
+Licensee hereby agrees to include in any such work a brief summary of
+the changes made to Python 1.6.1.
+
+4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS"
+basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR
+IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND
+DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS
+FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT
+INFRINGE ANY THIRD PARTY RIGHTS.
+
+5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON
+1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS
+A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1,
+OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF.
+
+6. This License Agreement will automatically terminate upon a material
+breach of its terms and conditions.
+
+7. This License Agreement shall be governed by the federal
+intellectual property law of the United States, including without
+limitation the federal copyright law, and, to the extent such
+U.S. federal law does not apply, by the law of the Commonwealth of
+Virginia, excluding Virginia's conflict of law provisions.
+Notwithstanding the foregoing, with regard to derivative works based
+on Python 1.6.1 that incorporate non-separable material that was
+previously distributed under the GNU General Public License (GPL), the
+law of the Commonwealth of Virginia shall govern this License
+Agreement only as to issues arising under or with respect to
+Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this
+License Agreement shall be deemed to create any relationship of
+agency, partnership, or joint venture between CNRI and Licensee. This
+License Agreement does not grant permission to use CNRI trademarks or
+trade name in a trademark sense to endorse or promote products or
+services of Licensee, or any third party.
+
+8. By clicking on the "ACCEPT" button where indicated, or by copying,
+installing or otherwise using Python 1.6.1, Licensee agrees to be
+bound by the terms and conditions of this License Agreement.
+
+ ACCEPT
+
+
+CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2
+--------------------------------------------------
+
+Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam,
+The Netherlands. All rights reserved.
+
+Permission to use, copy, modify, and distribute this software and its
+documentation for any purpose and without fee is hereby granted,
+provided that the above copyright notice appear in all copies and that
+both that copyright notice and this permission notice appear in
+supporting documentation, and that the name of Stichting Mathematisch
+Centrum or CWI not be used in advertising or publicity pertaining to
+distribution of the software without specific, written prior
+permission.
+
+STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO
+THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE
+FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
+OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/contrib/deprecated/python/typing/test/mod_generics_cache.py b/contrib/deprecated/python/typing/test/mod_generics_cache.py
new file mode 100644
index 0000000000..d9a60b4b28
--- /dev/null
+++ b/contrib/deprecated/python/typing/test/mod_generics_cache.py
@@ -0,0 +1,14 @@
+"""Module for testing the behavior of generics across different modules."""
+
+from typing import TypeVar, Generic
+
+T = TypeVar('T')
+
+
+class A(Generic[T]):
+ pass
+
+
+class B(Generic[T]):
+ class A(Generic[T]):
+ pass
diff --git a/contrib/deprecated/python/typing/test/test_typing.py b/contrib/deprecated/python/typing/test/test_typing.py
new file mode 100644
index 0000000000..2f260bac42
--- /dev/null
+++ b/contrib/deprecated/python/typing/test/test_typing.py
@@ -0,0 +1,2706 @@
+from __future__ import absolute_import, unicode_literals
+
+import collections
+import contextlib
+import os
+import pickle
+import re
+import subprocess
+import sys
+import abc
+import types
+from unittest import TestCase, main, SkipTest
+from copy import copy, deepcopy
+
+from typing import Any, NoReturn
+from typing import TypeVar, AnyStr
+from typing import T, KT, VT # Not in __all__.
+from typing import Union, Optional
+from typing import Tuple, List, MutableMapping
+from typing import Callable
+from typing import Generic, ClassVar, GenericMeta, Final, Literal
+from typing import cast
+from typing import Type, Protocol, runtime_checkable
+from typing import NewType
+from typing import NamedTuple, TypedDict
+from typing import Pattern, Match
+import typing
+import weakref
+import collections
+
+
+class BaseTestCase(TestCase):
+
+ def assertIsSubclass(self, cls, class_or_tuple, msg=None):
+ if not issubclass(cls, class_or_tuple):
+ message = '%r is not a subclass of %r' % (cls, class_or_tuple)
+ if msg is not None:
+ message += ' : %s' % msg
+ raise self.failureException(message)
+
+ def assertNotIsSubclass(self, cls, class_or_tuple, msg=None):
+ if issubclass(cls, class_or_tuple):
+ message = '%r is a subclass of %r' % (cls, class_or_tuple)
+ if msg is not None:
+ message += ' : %s' % msg
+ raise self.failureException(message)
+
+ def clear_caches(self):
+ for f in typing._cleanups:
+ f()
+
+
+class Employee(object):
+ pass
+
+
+class Manager(Employee):
+ pass
+
+
+class Founder(Employee):
+ pass
+
+
+class ManagingFounder(Manager, Founder):
+ pass
+
+
+class AnyTests(BaseTestCase):
+
+ def test_any_instance_type_error(self):
+ with self.assertRaises(TypeError):
+ isinstance(42, Any)
+
+ def test_any_subclass_type_error(self):
+ with self.assertRaises(TypeError):
+ issubclass(Employee, Any)
+ with self.assertRaises(TypeError):
+ issubclass(Any, Employee)
+
+ def test_repr(self):
+ self.assertEqual(repr(Any), 'typing.Any')
+
+ def test_errors(self):
+ with self.assertRaises(TypeError):
+ issubclass(42, Any)
+ with self.assertRaises(TypeError):
+ Any[int] # Any is not a generic type.
+
+ def test_cannot_subclass(self):
+ with self.assertRaises(TypeError):
+ class A(Any):
+ pass
+ with self.assertRaises(TypeError):
+ class A(type(Any)):
+ pass
+
+ def test_cannot_instantiate(self):
+ with self.assertRaises(TypeError):
+ Any()
+ with self.assertRaises(TypeError):
+ type(Any)()
+
+ def test_any_is_subclass(self):
+ # These expressions must simply not fail.
+ typing.Match[Any]
+ typing.Pattern[Any]
+ typing.IO[Any]
+
+
+class NoReturnTests(BaseTestCase):
+
+ def test_noreturn_instance_type_error(self):
+ with self.assertRaises(TypeError):
+ isinstance(42, NoReturn)
+
+ def test_noreturn_subclass_type_error(self):
+ with self.assertRaises(TypeError):
+ issubclass(Employee, NoReturn)
+ with self.assertRaises(TypeError):
+ issubclass(NoReturn, Employee)
+
+ def test_repr(self):
+ self.assertEqual(repr(NoReturn), 'typing.NoReturn')
+
+ def test_not_generic(self):
+ with self.assertRaises(TypeError):
+ NoReturn[int]
+
+ def test_cannot_subclass(self):
+ with self.assertRaises(TypeError):
+ class A(NoReturn):
+ pass
+ with self.assertRaises(TypeError):
+ class A(type(NoReturn)):
+ pass
+
+ def test_cannot_instantiate(self):
+ with self.assertRaises(TypeError):
+ NoReturn()
+ with self.assertRaises(TypeError):
+ type(NoReturn)()
+
+
+class TypeVarTests(BaseTestCase):
+
+ def test_basic_plain(self):
+ T = TypeVar('T')
+ # T equals itself.
+ self.assertEqual(T, T)
+ # T is an instance of TypeVar
+ self.assertIsInstance(T, TypeVar)
+
+ def test_typevar_instance_type_error(self):
+ T = TypeVar('T')
+ with self.assertRaises(TypeError):
+ isinstance(42, T)
+
+ def test_typevar_subclass_type_error(self):
+ T = TypeVar('T')
+ with self.assertRaises(TypeError):
+ issubclass(int, T)
+ with self.assertRaises(TypeError):
+ issubclass(T, int)
+
+ def test_constrained_error(self):
+ with self.assertRaises(TypeError):
+ X = TypeVar('X', int)
+ X
+
+ def test_union_unique(self):
+ X = TypeVar('X')
+ Y = TypeVar('Y')
+ self.assertNotEqual(X, Y)
+ self.assertEqual(Union[X], X)
+ self.assertNotEqual(Union[X], Union[X, Y])
+ self.assertEqual(Union[X, X], X)
+ self.assertNotEqual(Union[X, int], Union[X])
+ self.assertNotEqual(Union[X, int], Union[int])
+ self.assertEqual(Union[X, int].__args__, (X, int))
+ self.assertEqual(Union[X, int].__parameters__, (X,))
+ self.assertIs(Union[X, int].__origin__, Union)
+
+ def test_union_constrained(self):
+ A = TypeVar('A', str, bytes)
+ self.assertNotEqual(Union[A, str], Union[A])
+
+ def test_repr(self):
+ self.assertEqual(repr(T), '~T')
+ self.assertEqual(repr(KT), '~KT')
+ self.assertEqual(repr(VT), '~VT')
+ self.assertEqual(repr(AnyStr), '~AnyStr')
+ T_co = TypeVar('T_co', covariant=True)
+ self.assertEqual(repr(T_co), '+T_co')
+ T_contra = TypeVar('T_contra', contravariant=True)
+ self.assertEqual(repr(T_contra), '-T_contra')
+
+ def test_no_redefinition(self):
+ self.assertNotEqual(TypeVar('T'), TypeVar('T'))
+ self.assertNotEqual(TypeVar('T', int, str), TypeVar('T', int, str))
+
+ def test_cannot_subclass_vars(self):
+ with self.assertRaises(TypeError):
+ class V(TypeVar('T')):
+ pass
+
+ def test_cannot_subclass_var_itself(self):
+ with self.assertRaises(TypeError):
+ class V(TypeVar):
+ pass
+
+ def test_cannot_instantiate_vars(self):
+ with self.assertRaises(TypeError):
+ TypeVar('A')()
+
+ def test_bound_errors(self):
+ with self.assertRaises(TypeError):
+ TypeVar('X', bound=42)
+ with self.assertRaises(TypeError):
+ TypeVar('X', str, float, bound=Employee)
+
+ def test_no_bivariant(self):
+ with self.assertRaises(ValueError):
+ TypeVar('T', covariant=True, contravariant=True)
+
+
+class UnionTests(BaseTestCase):
+
+ def test_basics(self):
+ u = Union[int, float]
+ self.assertNotEqual(u, Union)
+
+ def test_subclass_error(self):
+ with self.assertRaises(TypeError):
+ issubclass(int, Union)
+ with self.assertRaises(TypeError):
+ issubclass(Union, int)
+ with self.assertRaises(TypeError):
+ issubclass(int, Union[int, str])
+ with self.assertRaises(TypeError):
+ issubclass(Union[int, str], int)
+
+ def test_union_any(self):
+ u = Union[Any]
+ self.assertEqual(u, Any)
+ u1 = Union[int, Any]
+ u2 = Union[Any, int]
+ u3 = Union[Any, object]
+ self.assertEqual(u1, u2)
+ self.assertNotEqual(u1, Any)
+ self.assertNotEqual(u2, Any)
+ self.assertNotEqual(u3, Any)
+
+ def test_union_object(self):
+ u = Union[object]
+ self.assertEqual(u, object)
+ u = Union[int, object]
+ self.assertEqual(u, object)
+ u = Union[object, int]
+ self.assertEqual(u, object)
+
+ def test_unordered(self):
+ u1 = Union[int, float]
+ u2 = Union[float, int]
+ self.assertEqual(u1, u2)
+
+ def test_single_class_disappears(self):
+ t = Union[Employee]
+ self.assertIs(t, Employee)
+
+ def test_base_class_disappears(self):
+ u = Union[Employee, Manager, int]
+ self.assertEqual(u, Union[int, Employee])
+ u = Union[Manager, int, Employee]
+ self.assertEqual(u, Union[int, Employee])
+ u = Union[Employee, Manager]
+ self.assertIs(u, Employee)
+
+ def test_union_union(self):
+ u = Union[int, float]
+ v = Union[u, Employee]
+ self.assertEqual(v, Union[int, float, Employee])
+
+ def test_repr(self):
+ self.assertEqual(repr(Union), 'typing.Union')
+ u = Union[Employee, int]
+ self.assertEqual(repr(u), 'typing.Union[%s.Employee, int]' % __name__)
+ u = Union[int, Employee]
+ self.assertEqual(repr(u), 'typing.Union[int, %s.Employee]' % __name__)
+ T = TypeVar('T')
+ u = Union[T, int][int]
+ self.assertEqual(repr(u), repr(int))
+ u = Union[List[int], int]
+ self.assertEqual(repr(u), 'typing.Union[typing.List[int], int]')
+
+ def test_cannot_subclass(self):
+ with self.assertRaises(TypeError):
+ class C(Union):
+ pass
+ with self.assertRaises(TypeError):
+ class C(type(Union)):
+ pass
+ with self.assertRaises(TypeError):
+ class C(Union[int, str]):
+ pass
+
+ def test_cannot_instantiate(self):
+ with self.assertRaises(TypeError):
+ Union()
+ u = Union[int, float]
+ with self.assertRaises(TypeError):
+ u()
+ with self.assertRaises(TypeError):
+ type(u)()
+
+ def test_union_generalization(self):
+ self.assertFalse(Union[str, typing.Iterable[int]] == str)
+ self.assertFalse(Union[str, typing.Iterable[int]] == typing.Iterable[int])
+ self.assertTrue(Union[str, typing.Iterable] == typing.Iterable)
+
+ def test_union_compare_other(self):
+ self.assertNotEqual(Union, object)
+ self.assertNotEqual(Union, Any)
+ self.assertNotEqual(ClassVar, Union)
+ self.assertNotEqual(Optional, Union)
+ self.assertNotEqual([None], Optional)
+ self.assertNotEqual(Optional, typing.Mapping)
+ self.assertNotEqual(Optional[typing.MutableMapping], Union)
+
+ def test_optional(self):
+ o = Optional[int]
+ u = Union[int, None]
+ self.assertEqual(o, u)
+
+ def test_empty(self):
+ with self.assertRaises(TypeError):
+ Union[()]
+
+ def test_union_instance_type_error(self):
+ with self.assertRaises(TypeError):
+ isinstance(42, Union[int, str])
+
+ def test_no_eval_union(self):
+ u = Union[int, str]
+ self.assertIs(u._eval_type({}, {}), u)
+
+ def test_function_repr_union(self):
+ def fun(): pass
+ self.assertEqual(repr(Union[fun, int]), 'typing.Union[fun, int]')
+
+ def test_union_str_pattern(self):
+ # Shouldn't crash; see http://bugs.python.org/issue25390
+ A = Union[str, Pattern]
+ A
+
+ def test_etree(self):
+ # See https://github.com/python/typing/issues/229
+ # (Only relevant for Python 2.)
+ try:
+ from xml.etree.cElementTree import Element
+ except ImportError:
+ raise SkipTest("cElementTree not found")
+ Union[Element, str] # Shouldn't crash
+
+ def Elem(*args):
+ return Element(*args)
+
+ Union[Elem, str] # Nor should this
+
+
+class TupleTests(BaseTestCase):
+
+ def test_basics(self):
+ with self.assertRaises(TypeError):
+ issubclass(Tuple, Tuple[int, str])
+ with self.assertRaises(TypeError):
+ issubclass(tuple, Tuple[int, str])
+
+ class TP(tuple): pass
+ self.assertTrue(issubclass(tuple, Tuple))
+ self.assertTrue(issubclass(TP, Tuple))
+
+ def test_equality(self):
+ self.assertEqual(Tuple[int], Tuple[int])
+ self.assertEqual(Tuple[int, ...], Tuple[int, ...])
+ self.assertNotEqual(Tuple[int], Tuple[int, int])
+ self.assertNotEqual(Tuple[int], Tuple[int, ...])
+
+ def test_tuple_subclass(self):
+ class MyTuple(tuple):
+ pass
+ self.assertTrue(issubclass(MyTuple, Tuple))
+
+ def test_tuple_instance_type_error(self):
+ with self.assertRaises(TypeError):
+ isinstance((0, 0), Tuple[int, int])
+ isinstance((0, 0), Tuple)
+
+ def test_repr(self):
+ self.assertEqual(repr(Tuple), 'typing.Tuple')
+ self.assertEqual(repr(Tuple[()]), 'typing.Tuple[()]')
+ self.assertEqual(repr(Tuple[int, float]), 'typing.Tuple[int, float]')
+ self.assertEqual(repr(Tuple[int, ...]), 'typing.Tuple[int, ...]')
+
+ def test_errors(self):
+ with self.assertRaises(TypeError):
+ issubclass(42, Tuple)
+ with self.assertRaises(TypeError):
+ issubclass(42, Tuple[int])
+
+
+class CallableTests(BaseTestCase):
+
+ def test_self_subclass(self):
+ with self.assertRaises(TypeError):
+ self.assertTrue(issubclass(type(lambda x: x), Callable[[int], int]))
+ self.assertTrue(issubclass(type(lambda x: x), Callable))
+
+ def test_eq_hash(self):
+ self.assertEqual(Callable[[int], int], Callable[[int], int])
+ self.assertEqual(len({Callable[[int], int], Callable[[int], int]}), 1)
+ self.assertNotEqual(Callable[[int], int], Callable[[int], str])
+ self.assertNotEqual(Callable[[int], int], Callable[[str], int])
+ self.assertNotEqual(Callable[[int], int], Callable[[int, int], int])
+ self.assertNotEqual(Callable[[int], int], Callable[[], int])
+ self.assertNotEqual(Callable[[int], int], Callable)
+
+ def test_cannot_instantiate(self):
+ with self.assertRaises(TypeError):
+ Callable()
+ with self.assertRaises(TypeError):
+ type(Callable)()
+ c = Callable[[int], str]
+ with self.assertRaises(TypeError):
+ c()
+ with self.assertRaises(TypeError):
+ type(c)()
+
+ def test_callable_wrong_forms(self):
+ with self.assertRaises(TypeError):
+ Callable[(), int]
+ with self.assertRaises(TypeError):
+ Callable[[()], int]
+ with self.assertRaises(TypeError):
+ Callable[[int, 1], 2]
+ with self.assertRaises(TypeError):
+ Callable[int]
+
+ def test_callable_instance_works(self):
+ def f():
+ pass
+ self.assertIsInstance(f, Callable)
+ self.assertNotIsInstance(None, Callable)
+
+ def test_callable_instance_type_error(self):
+ def f():
+ pass
+ with self.assertRaises(TypeError):
+ self.assertIsInstance(f, Callable[[], None])
+ with self.assertRaises(TypeError):
+ self.assertIsInstance(f, Callable[[], Any])
+ with self.assertRaises(TypeError):
+ self.assertNotIsInstance(None, Callable[[], None])
+ with self.assertRaises(TypeError):
+ self.assertNotIsInstance(None, Callable[[], Any])
+
+ def test_repr(self):
+ ct0 = Callable[[], bool]
+ self.assertEqual(repr(ct0), 'typing.Callable[[], bool]')
+ ct2 = Callable[[str, float], int]
+ self.assertEqual(repr(ct2), 'typing.Callable[[str, float], int]')
+ ctv = Callable[..., str]
+ self.assertEqual(repr(ctv), 'typing.Callable[..., str]')
+
+ def test_ellipsis_in_generic(self):
+ # Shouldn't crash; see https://github.com/python/typing/issues/259
+ typing.List[Callable[..., str]]
+
+
+XK = TypeVar('XK', unicode, bytes)
+XV = TypeVar('XV')
+
+
+class SimpleMapping(Generic[XK, XV]):
+
+ def __getitem__(self, key):
+ pass
+
+ def __setitem__(self, key, value):
+ pass
+
+ def get(self, key, default=None):
+ pass
+
+
+class MySimpleMapping(SimpleMapping[XK, XV]):
+
+ def __init__(self):
+ self.store = {}
+
+ def __getitem__(self, key):
+ return self.store[key]
+
+ def __setitem__(self, key, value):
+ self.store[key] = value
+
+ def get(self, key, default=None):
+ try:
+ return self.store[key]
+ except KeyError:
+ return default
+
+
+class ProtocolTests(BaseTestCase):
+
+ def test_basic_protocol(self):
+ @runtime_checkable
+ class P(Protocol):
+ def meth(self):
+ pass
+ class C(object): pass
+ class D(object):
+ def meth(self):
+ pass
+ def f():
+ pass
+ self.assertIsSubclass(D, P)
+ self.assertIsInstance(D(), P)
+ self.assertNotIsSubclass(C, P)
+ self.assertNotIsInstance(C(), P)
+ self.assertNotIsSubclass(types.FunctionType, P)
+ self.assertNotIsInstance(f, P)
+
+ def test_everything_implements_empty_protocol(self):
+ @runtime_checkable
+ class Empty(Protocol): pass
+ class C(object): pass
+ def f():
+ pass
+ for thing in (object, type, tuple, C, types.FunctionType):
+ self.assertIsSubclass(thing, Empty)
+ for thing in (object(), 1, (), typing, f):
+ self.assertIsInstance(thing, Empty)
+
+ def test_function_implements_protocol(self):
+ @runtime_checkable
+ class Function(Protocol):
+ def __call__(self, *args, **kwargs):
+ pass
+ def f():
+ pass
+ self.assertIsInstance(f, Function)
+
+ def test_no_inheritance_from_nominal(self):
+ class C(object): pass
+ class BP(Protocol): pass
+ with self.assertRaises(TypeError):
+ class P(C, Protocol):
+ pass
+ with self.assertRaises(TypeError):
+ class P(Protocol, C):
+ pass
+ with self.assertRaises(TypeError):
+ class P(BP, C, Protocol):
+ pass
+ class D(BP, C): pass
+ class E(C, BP): pass
+ self.assertNotIsInstance(D(), E)
+ self.assertNotIsInstance(E(), D)
+
+ def test_no_instantiation(self):
+ class P(Protocol): pass
+ with self.assertRaises(TypeError):
+ P()
+ class C(P): pass
+ self.assertIsInstance(C(), C)
+ T = typing.TypeVar('T')
+ class PG(Protocol[T]): pass
+ with self.assertRaises(TypeError):
+ PG()
+ with self.assertRaises(TypeError):
+ PG[int]()
+ with self.assertRaises(TypeError):
+ PG[T]()
+ class CG(PG[T]): pass
+ self.assertIsInstance(CG[int](), CG)
+
+ def test_cannot_instantiate_abstract(self):
+ @runtime_checkable
+ class P(Protocol):
+ @abc.abstractmethod
+ def ameth(self):
+ raise NotImplementedError
+ class B(P):
+ pass
+ class C(B):
+ def ameth(self):
+ return 26
+ with self.assertRaises(TypeError):
+ B()
+ self.assertIsInstance(C(), P)
+
+ def test_subprotocols_extending(self):
+ class P1(Protocol):
+ def meth1(self):
+ pass
+ @runtime_checkable
+ class P2(P1, Protocol):
+ def meth2(self):
+ pass
+ class C(object):
+ def meth1(self):
+ pass
+ def meth2(self):
+ pass
+ class C1(object):
+ def meth1(self):
+ pass
+ class C2(object):
+ def meth2(self):
+ pass
+ self.assertNotIsInstance(C1(), P2)
+ self.assertNotIsInstance(C2(), P2)
+ self.assertNotIsSubclass(C1, P2)
+ self.assertNotIsSubclass(C2, P2)
+ self.assertIsInstance(C(), P2)
+ self.assertIsSubclass(C, P2)
+
+ def test_subprotocols_merging(self):
+ class P1(Protocol):
+ def meth1(self):
+ pass
+ class P2(Protocol):
+ def meth2(self):
+ pass
+ @runtime_checkable
+ class P(P1, P2, Protocol):
+ pass
+ class C(object):
+ def meth1(self):
+ pass
+ def meth2(self):
+ pass
+ class C1(object):
+ def meth1(self):
+ pass
+ class C2(object):
+ def meth2(self):
+ pass
+ self.assertNotIsInstance(C1(), P)
+ self.assertNotIsInstance(C2(), P)
+ self.assertNotIsSubclass(C1, P)
+ self.assertNotIsSubclass(C2, P)
+ self.assertIsInstance(C(), P)
+ self.assertIsSubclass(C, P)
+
+ def test_protocols_issubclass(self):
+ T = typing.TypeVar('T')
+ @runtime_checkable
+ class P(Protocol):
+ def x(self): pass
+ @runtime_checkable
+ class PG(Protocol[T]):
+ def x(self): pass
+ class BadP(Protocol):
+ def x(self): pass
+ class BadPG(Protocol[T]):
+ def x(self): pass
+ class C(object):
+ def x(self): pass
+ self.assertIsSubclass(C, P)
+ self.assertIsSubclass(C, PG)
+ self.assertIsSubclass(BadP, PG)
+ self.assertIsSubclass(PG[int], PG)
+ self.assertIsSubclass(BadPG[int], P)
+ self.assertIsSubclass(BadPG[T], PG)
+ with self.assertRaises(TypeError):
+ issubclass(C, PG[T])
+ with self.assertRaises(TypeError):
+ issubclass(C, PG[C])
+ with self.assertRaises(TypeError):
+ issubclass(C, BadP)
+ with self.assertRaises(TypeError):
+ issubclass(C, BadPG)
+ with self.assertRaises(TypeError):
+ issubclass(P, PG[T])
+ with self.assertRaises(TypeError):
+ issubclass(PG, PG[int])
+
+ def test_protocols_issubclass_non_callable(self):
+ class C(object):
+ x = 1
+ @runtime_checkable
+ class PNonCall(Protocol):
+ x = 1
+ with self.assertRaises(TypeError):
+ issubclass(C, PNonCall)
+ self.assertIsInstance(C(), PNonCall)
+ PNonCall.register(C)
+ with self.assertRaises(TypeError):
+ issubclass(C, PNonCall)
+ self.assertIsInstance(C(), PNonCall)
+ # check that non-protocol subclasses are not affected
+ class D(PNonCall): pass
+ self.assertNotIsSubclass(C, D)
+ self.assertNotIsInstance(C(), D)
+ D.register(C)
+ self.assertIsSubclass(C, D)
+ self.assertIsInstance(C(), D)
+ with self.assertRaises(TypeError):
+ issubclass(D, PNonCall)
+
+ def test_protocols_isinstance(self):
+ T = typing.TypeVar('T')
+ @runtime_checkable
+ class P(Protocol):
+ def meth(x): pass
+ @runtime_checkable
+ class PG(Protocol[T]):
+ def meth(x): pass
+ class BadP(Protocol):
+ def meth(x): pass
+ class BadPG(Protocol[T]):
+ def meth(x): pass
+ class C(object):
+ def meth(x): pass
+ self.assertIsInstance(C(), P)
+ self.assertIsInstance(C(), PG)
+ with self.assertRaises(TypeError):
+ isinstance(C(), PG[T])
+ with self.assertRaises(TypeError):
+ isinstance(C(), PG[C])
+ with self.assertRaises(TypeError):
+ isinstance(C(), BadP)
+ with self.assertRaises(TypeError):
+ isinstance(C(), BadPG)
+
+ def test_protocols_isinstance_init(self):
+ T = typing.TypeVar('T')
+ @runtime_checkable
+ class P(Protocol):
+ x = 1
+ @runtime_checkable
+ class PG(Protocol[T]):
+ x = 1
+ class C(object):
+ def __init__(self, x):
+ self.x = x
+ self.assertIsInstance(C(1), P)
+ self.assertIsInstance(C(1), PG)
+
+ def test_protocol_checks_after_subscript(self):
+ class P(Protocol[T]): pass
+ class C(P[T]): pass
+ class Old1: pass
+ class New1(object): pass
+ class Old2: pass
+ class New2(object): pass
+ CA = C[Any] # noqa
+
+ self.assertNotIsInstance(Old1(), C)
+ self.assertNotIsInstance(New1(), C)
+ self.assertNotIsSubclass(Old2, C)
+ self.assertNotIsSubclass(New2, C)
+
+ class D1(C[Any]): pass
+ class D2(C[Any]): pass
+ CI = C[int] # noqa
+
+ self.assertIsInstance(D1(), C)
+ self.assertIsSubclass(D2, C)
+
+ def test_protocols_support_register(self):
+ @runtime_checkable
+ class P(Protocol):
+ x = 1
+ class PM(Protocol):
+ def meth(self): pass
+ class D(PM): pass
+ class C(object): pass
+ D.register(C)
+ P.register(C)
+ self.assertIsInstance(C(), P)
+ self.assertIsInstance(C(), D)
+
+ def test_none_on_non_callable_doesnt_block_implementation(self):
+ @runtime_checkable
+ class P(Protocol):
+ x = 1
+ class A(object):
+ x = 1
+ class B(A):
+ x = None
+ class C(object):
+ def __init__(self):
+ self.x = None
+ self.assertIsInstance(B(), P)
+ self.assertIsInstance(C(), P)
+
+ def test_none_on_callable_blocks_implementation(self):
+ @runtime_checkable
+ class P(Protocol):
+ def x(self): pass
+ class A(object):
+ def x(self): pass
+ class B(A):
+ x = None
+ class C(object):
+ def __init__(self):
+ self.x = None
+ self.assertNotIsInstance(B(), P)
+ self.assertNotIsInstance(C(), P)
+
+ def test_non_protocol_subclasses(self):
+ class P(Protocol):
+ x = 1
+ @runtime_checkable
+ class PR(Protocol):
+ def meth(self): pass
+ class NonP(P):
+ x = 1
+ class NonPR(PR): pass
+ class C(object):
+ x = 1
+ class D(object):
+ def meth(self): pass
+ self.assertNotIsInstance(C(), NonP)
+ self.assertNotIsInstance(D(), NonPR)
+ self.assertNotIsSubclass(C, NonP)
+ self.assertNotIsSubclass(D, NonPR)
+ self.assertIsInstance(NonPR(), PR)
+ self.assertIsSubclass(NonPR, PR)
+
+ def test_custom_subclasshook(self):
+ class P(Protocol):
+ x = 1
+ class OKClass(object): pass
+ class BadClass(object):
+ x = 1
+ class C(P):
+ @classmethod
+ def __subclasshook__(cls, other):
+ return other.__name__.startswith("OK")
+ self.assertIsInstance(OKClass(), C)
+ self.assertNotIsInstance(BadClass(), C)
+ self.assertIsSubclass(OKClass, C)
+ self.assertNotIsSubclass(BadClass, C)
+
+ def test_issubclass_fails_correctly(self):
+ @runtime_checkable
+ class P(Protocol):
+ x = 1
+ class C: pass
+ with self.assertRaises(TypeError):
+ issubclass(C(), P)
+
+ def test_defining_generic_protocols(self):
+ T = typing.TypeVar('T')
+ S = typing.TypeVar('S')
+ @runtime_checkable
+ class PR(Protocol[T, S]):
+ def meth(self): pass
+ class P(PR[int, T], Protocol[T]):
+ y = 1
+ self.assertIsSubclass(PR[int, T], PR)
+ self.assertIsSubclass(P[str], PR)
+ with self.assertRaises(TypeError):
+ PR[int]
+ with self.assertRaises(TypeError):
+ P[int, str]
+ with self.assertRaises(TypeError):
+ PR[int, 1]
+ with self.assertRaises(TypeError):
+ PR[int, ClassVar]
+ class C(PR[int, T]): pass
+ self.assertIsInstance(C[str](), C)
+
+ def test_defining_generic_protocols_old_style(self):
+ T = typing.TypeVar('T')
+ S = typing.TypeVar('S')
+ @runtime_checkable
+ class PR(Protocol, typing.Generic[T, S]):
+ def meth(self): pass
+ class P(PR[int, str], Protocol):
+ y = 1
+ self.assertIsSubclass(PR[int, str], PR)
+ self.assertIsSubclass(P, PR)
+ with self.assertRaises(TypeError):
+ PR[int]
+ with self.assertRaises(TypeError):
+ PR[int, 1]
+ class P1(Protocol, typing.Generic[T]):
+ def bar(self, x): pass
+ class P2(typing.Generic[T], Protocol):
+ def bar(self, x): pass
+ @runtime_checkable
+ class PSub(P1[str], Protocol):
+ x = 1
+ class Test(object):
+ x = 1
+ def bar(self, x):
+ return x
+ self.assertIsInstance(Test(), PSub)
+ with self.assertRaises(TypeError):
+ PR[int, ClassVar]
+
+ def test_init_called(self):
+ T = typing.TypeVar('T')
+ class P(Protocol[T]): pass
+ class C(P[T]):
+ def __init__(self):
+ self.test = 'OK'
+ self.assertEqual(C[int]().test, 'OK')
+
+ def test_protocols_bad_subscripts(self):
+ T = typing.TypeVar('T')
+ S = typing.TypeVar('S')
+ with self.assertRaises(TypeError):
+ class P(Protocol[T, T]): pass
+ with self.assertRaises(TypeError):
+ class P(Protocol[int]): pass
+ with self.assertRaises(TypeError):
+ class P(Protocol[T], Protocol[S]): pass
+ with self.assertRaises(TypeError):
+ class P(Protocol[T], typing.Mapping[T, S]): pass
+
+ def test_generic_protocols_repr(self):
+ T = typing.TypeVar('T')
+ S = typing.TypeVar('S')
+ class P(Protocol[T, S]): pass
+ self.assertTrue(repr(P).endswith('P'))
+ self.assertTrue(repr(P[T, S]).endswith('P[~T, ~S]'))
+ self.assertTrue(repr(P[int, str]).endswith('P[int, str]'))
+
+ def test_generic_protocols_eq(self):
+ T = typing.TypeVar('T')
+ S = typing.TypeVar('S')
+ class P(Protocol[T, S]): pass
+ self.assertEqual(P, P)
+ self.assertEqual(P[int, T], P[int, T])
+ self.assertEqual(P[T, T][typing.Tuple[T, S]][int, str],
+ P[typing.Tuple[int, str], typing.Tuple[int, str]])
+
+ def test_generic_protocols_special_from_generic(self):
+ T = typing.TypeVar('T')
+ class P(Protocol[T]): pass
+ self.assertEqual(P.__parameters__, (T,))
+ self.assertIs(P.__args__, None)
+ self.assertIs(P.__origin__, None)
+ self.assertEqual(P[int].__parameters__, ())
+ self.assertEqual(P[int].__args__, (int,))
+ self.assertIs(P[int].__origin__, P)
+
+ def test_generic_protocols_special_from_protocol(self):
+ @runtime_checkable
+ class PR(Protocol):
+ x = 1
+ class P(Protocol):
+ def meth(self):
+ pass
+ T = typing.TypeVar('T')
+ class PG(Protocol[T]):
+ x = 1
+ def meth(self):
+ pass
+ self.assertTrue(P._is_protocol)
+ self.assertTrue(PR._is_protocol)
+ self.assertTrue(PG._is_protocol)
+ with self.assertRaises(AttributeError):
+ self.assertFalse(P._is_runtime_protocol)
+ self.assertTrue(PR._is_runtime_protocol)
+ self.assertTrue(PG[int]._is_protocol)
+ self.assertEqual(P._get_protocol_attrs(), {'meth'})
+ self.assertEqual(PR._get_protocol_attrs(), {'x'})
+ self.assertEqual(frozenset(PG._get_protocol_attrs()),
+ frozenset({'x', 'meth'}))
+ self.assertEqual(frozenset(PG[int]._get_protocol_attrs()),
+ frozenset({'x', 'meth'}))
+
+ def test_no_runtime_deco_on_nominal(self):
+ with self.assertRaises(TypeError):
+ @runtime_checkable
+ class C(object): pass
+ class Proto(Protocol):
+ x = 1
+ with self.assertRaises(TypeError):
+ @runtime_checkable
+ class Concrete(Proto):
+ pass
+
+ def test_none_treated_correctly(self):
+ @runtime_checkable
+ class P(Protocol):
+ x = None # type: int
+ class B(object): pass
+ self.assertNotIsInstance(B(), P)
+ class C(object):
+ x = 1
+ class D(object):
+ x = None
+ self.assertIsInstance(C(), P)
+ self.assertIsInstance(D(), P)
+ class CI(object):
+ def __init__(self):
+ self.x = 1
+ class DI(object):
+ def __init__(self):
+ self.x = None
+ self.assertIsInstance(C(), P)
+ self.assertIsInstance(D(), P)
+
+ def test_protocols_in_unions(self):
+ class P(Protocol):
+ x = None # type: int
+ Alias = typing.Union[typing.Iterable, P]
+ Alias2 = typing.Union[P, typing.Iterable]
+ self.assertEqual(Alias, Alias2)
+
+ def test_protocols_pickleable(self):
+ global P, CP # pickle wants to reference the class by name
+ T = typing.TypeVar('T')
+
+ @runtime_checkable
+ class P(Protocol[T]):
+ x = 1
+ class CP(P[int]):
+ pass
+
+ c = CP()
+ c.foo = 42
+ c.bar = 'abc'
+ for proto in range(pickle.HIGHEST_PROTOCOL + 1):
+ z = pickle.dumps(c, proto)
+ x = pickle.loads(z)
+ self.assertEqual(x.foo, 42)
+ self.assertEqual(x.bar, 'abc')
+ self.assertEqual(x.x, 1)
+ self.assertEqual(x.__dict__, {'foo': 42, 'bar': 'abc'})
+ s = pickle.dumps(P)
+ D = pickle.loads(s)
+ class E(object):
+ x = 1
+ self.assertIsInstance(E(), D)
+
+ def test_supports_int(self):
+ self.assertIsSubclass(int, typing.SupportsInt)
+ self.assertNotIsSubclass(str, typing.SupportsInt)
+
+ def test_supports_float(self):
+ self.assertIsSubclass(float, typing.SupportsFloat)
+ self.assertNotIsSubclass(str, typing.SupportsFloat)
+
+ def test_supports_complex(self):
+
+ # Note: complex itself doesn't have __complex__.
+ class C(object):
+ def __complex__(self):
+ return 0j
+
+ self.assertIsSubclass(C, typing.SupportsComplex)
+ self.assertNotIsSubclass(str, typing.SupportsComplex)
+
+ def test_supports_abs(self):
+ self.assertIsSubclass(float, typing.SupportsAbs)
+ self.assertIsSubclass(int, typing.SupportsAbs)
+ self.assertNotIsSubclass(str, typing.SupportsAbs)
+
+ def test_reversible(self):
+ self.assertIsSubclass(list, typing.Reversible)
+ self.assertNotIsSubclass(int, typing.Reversible)
+
+ def test_supports_index(self):
+ self.assertIsSubclass(int, typing.SupportsIndex)
+ self.assertNotIsSubclass(str, typing.SupportsIndex)
+
+ def test_protocol_instance_works(self):
+ self.assertIsInstance(0, typing.SupportsAbs)
+ self.assertNotIsInstance('no', typing.SupportsAbs)
+ class C1(typing.SupportsInt):
+ def __int__(self):
+ return 42
+ class C2(C1):
+ pass
+ c = C2()
+ self.assertIsInstance(c, C1)
+
+ def test_collections_protocols_allowed(self):
+ @runtime_checkable
+ class Custom(collections.Iterable, Protocol):
+ def close(self): pass
+
+ class A(object): pass
+ class B(object):
+ def __iter__(self):
+ return []
+ def close(self):
+ return 0
+
+ self.assertIsSubclass(B, Custom)
+ self.assertNotIsSubclass(A, Custom)
+
+
+class GenericTests(BaseTestCase):
+
+ def test_basics(self):
+ X = SimpleMapping[str, Any]
+ self.assertEqual(X.__parameters__, ())
+ with self.assertRaises(TypeError):
+ X[unicode]
+ with self.assertRaises(TypeError):
+ X[unicode, unicode]
+ Y = SimpleMapping[XK, unicode]
+ self.assertEqual(Y.__parameters__, (XK,))
+ Y[unicode]
+ with self.assertRaises(TypeError):
+ Y[unicode, unicode]
+ self.assertIsSubclass(SimpleMapping[str, int], SimpleMapping)
+
+ def test_generic_errors(self):
+ T = TypeVar('T')
+ S = TypeVar('S')
+ with self.assertRaises(TypeError):
+ Generic[T]()
+ with self.assertRaises(TypeError):
+ Generic[T][T]
+ with self.assertRaises(TypeError):
+ Generic[T][S]
+ with self.assertRaises(TypeError):
+ isinstance([], List[int])
+ with self.assertRaises(TypeError):
+ issubclass(list, List[int])
+ with self.assertRaises(TypeError):
+ class NewGeneric(Generic): pass
+ with self.assertRaises(TypeError):
+ class MyGeneric(Generic[T], Generic[S]): pass
+ with self.assertRaises(TypeError):
+ class MyGeneric(List[T], Generic[S]): pass
+
+ def test_init(self):
+ T = TypeVar('T')
+ S = TypeVar('S')
+ with self.assertRaises(TypeError):
+ Generic[T, T]
+ with self.assertRaises(TypeError):
+ Generic[T, S, T]
+
+ def test_repr(self):
+ self.assertEqual(repr(SimpleMapping),
+ __name__ + '.' + 'SimpleMapping')
+ self.assertEqual(repr(MySimpleMapping),
+ __name__ + '.' + 'MySimpleMapping')
+
+ def test_chain_repr(self):
+ T = TypeVar('T')
+ S = TypeVar('S')
+
+ class C(Generic[T]):
+ pass
+
+ X = C[Tuple[S, T]]
+ self.assertEqual(X, C[Tuple[S, T]])
+ self.assertNotEqual(X, C[Tuple[T, S]])
+
+ Y = X[T, int]
+ self.assertEqual(Y, X[T, int])
+ self.assertNotEqual(Y, X[S, int])
+ self.assertNotEqual(Y, X[T, str])
+
+ Z = Y[str]
+ self.assertEqual(Z, Y[str])
+ self.assertNotEqual(Z, Y[int])
+ self.assertNotEqual(Z, Y[T])
+
+ self.assertTrue(str(Z).endswith(
+ '.C[typing.Tuple[str, int]]'))
+
+ def test_new_repr(self):
+ T = TypeVar('T')
+ U = TypeVar('U', covariant=True)
+ S = TypeVar('S')
+
+ self.assertEqual(repr(List), 'typing.List')
+ self.assertEqual(repr(List[T]), 'typing.List[~T]')
+ self.assertEqual(repr(List[U]), 'typing.List[+U]')
+ self.assertEqual(repr(List[S][T][int]), 'typing.List[int]')
+ self.assertEqual(repr(List[int]), 'typing.List[int]')
+
+ def test_new_repr_complex(self):
+ T = TypeVar('T')
+ TS = TypeVar('TS')
+
+ self.assertEqual(repr(typing.Mapping[T, TS][TS, T]), 'typing.Mapping[~TS, ~T]')
+ self.assertEqual(repr(List[Tuple[T, TS]][int, T]),
+ 'typing.List[typing.Tuple[int, ~T]]')
+ self.assertEqual(
+ repr(List[Tuple[T, T]][List[int]]),
+ 'typing.List[typing.Tuple[typing.List[int], typing.List[int]]]'
+ )
+
+ def test_new_repr_bare(self):
+ T = TypeVar('T')
+ self.assertEqual(repr(Generic[T]), 'typing.Generic[~T]')
+ self.assertEqual(repr(typing.Protocol[T]), 'typing.Protocol[~T]')
+ class C(typing.Dict[Any, Any]): pass
+ # this line should just work
+ repr(C.__mro__)
+
+ def test_dict(self):
+ T = TypeVar('T')
+
+ class B(Generic[T]):
+ pass
+
+ b = B()
+ b.foo = 42
+ self.assertEqual(b.__dict__, {'foo': 42})
+
+ class C(B[int]):
+ pass
+
+ c = C()
+ c.bar = 'abc'
+ self.assertEqual(c.__dict__, {'bar': 'abc'})
+
+ def test_subscripted_generics_as_proxies(self):
+ T = TypeVar('T')
+ class C(Generic[T]):
+ x = 'def'
+ self.assertEqual(C[int].x, 'def')
+ self.assertEqual(C[C[int]].x, 'def')
+ C[C[int]].x = 'changed'
+ self.assertEqual(C.x, 'changed')
+ self.assertEqual(C[str].x, 'changed')
+ C[List[str]].z = 'new'
+ self.assertEqual(C.z, 'new')
+ self.assertEqual(C[Tuple[int]].z, 'new')
+
+ self.assertEqual(C().x, 'changed')
+ self.assertEqual(C[Tuple[str]]().z, 'new')
+
+ class D(C[T]):
+ pass
+ self.assertEqual(D[int].x, 'changed')
+ self.assertEqual(D.z, 'new')
+ D.z = 'from derived z'
+ D[int].x = 'from derived x'
+ self.assertEqual(C.x, 'changed')
+ self.assertEqual(C[int].z, 'new')
+ self.assertEqual(D.x, 'from derived x')
+ self.assertEqual(D[str].z, 'from derived z')
+
+ def test_abc_registry_kept(self):
+ T = TypeVar('T')
+ class C(Generic[T]): pass
+ C.register(int)
+ self.assertIsInstance(1, C)
+ C[int]
+ self.assertIsInstance(1, C)
+
+ def test_false_subclasses(self):
+ class MyMapping(MutableMapping[str, str]): pass
+ self.assertNotIsInstance({}, MyMapping)
+ self.assertNotIsSubclass(dict, MyMapping)
+
+ def test_abc_bases(self):
+ class MM(MutableMapping[str, str]):
+ def __getitem__(self, k):
+ return None
+ def __setitem__(self, k, v):
+ pass
+ def __delitem__(self, k):
+ pass
+ def __iter__(self):
+ return iter(())
+ def __len__(self):
+ return 0
+ # this should just work
+ MM().update()
+ self.assertIsInstance(MM(), collections.MutableMapping)
+ self.assertIsInstance(MM(), MutableMapping)
+ self.assertNotIsInstance(MM(), List)
+ self.assertNotIsInstance({}, MM)
+
+ def test_multiple_bases(self):
+ class MM1(MutableMapping[str, str], collections.MutableMapping):
+ pass
+ with self.assertRaises(TypeError):
+ # consistent MRO not possible
+ class MM2(collections.MutableMapping, MutableMapping[str, str]):
+ pass
+
+ def test_orig_bases(self):
+ T = TypeVar('T')
+ class C(typing.Dict[str, T]): pass
+ self.assertEqual(C.__orig_bases__, (typing.Dict[str, T],))
+
+ def test_naive_runtime_checks(self):
+ def naive_dict_check(obj, tp):
+ # Check if a dictionary conforms to Dict type
+ if len(tp.__parameters__) > 0:
+ raise NotImplementedError
+ if tp.__args__:
+ KT, VT = tp.__args__
+ return all(
+ isinstance(k, KT) and isinstance(v, VT)
+ for k, v in obj.items()
+ )
+ self.assertTrue(naive_dict_check({'x': 1}, typing.Dict[typing.Text, int]))
+ self.assertFalse(naive_dict_check({1: 'x'}, typing.Dict[typing.Text, int]))
+ with self.assertRaises(NotImplementedError):
+ naive_dict_check({1: 'x'}, typing.Dict[typing.Text, T])
+
+ def naive_generic_check(obj, tp):
+ # Check if an instance conforms to the generic class
+ if not hasattr(obj, '__orig_class__'):
+ raise NotImplementedError
+ return obj.__orig_class__ == tp
+ class Node(Generic[T]): pass
+ self.assertTrue(naive_generic_check(Node[int](), Node[int]))
+ self.assertFalse(naive_generic_check(Node[str](), Node[int]))
+ self.assertFalse(naive_generic_check(Node[str](), List))
+ with self.assertRaises(NotImplementedError):
+ naive_generic_check([1, 2, 3], Node[int])
+
+ def naive_list_base_check(obj, tp):
+ # Check if list conforms to a List subclass
+ return all(isinstance(x, tp.__orig_bases__[0].__args__[0])
+ for x in obj)
+ class C(List[int]): pass
+ self.assertTrue(naive_list_base_check([1, 2, 3], C))
+ self.assertFalse(naive_list_base_check(['a', 'b'], C))
+
+ def test_multi_subscr_base(self):
+ T = TypeVar('T')
+ U = TypeVar('U')
+ V = TypeVar('V')
+ class C(List[T][U][V]): pass
+ class D(C, List[T][U][V]): pass
+ self.assertEqual(C.__parameters__, (V,))
+ self.assertEqual(D.__parameters__, (V,))
+ self.assertEqual(C[int].__parameters__, ())
+ self.assertEqual(D[int].__parameters__, ())
+ self.assertEqual(C[int].__args__, (int,))
+ self.assertEqual(D[int].__args__, (int,))
+ self.assertEqual(C.__bases__, (List,))
+ self.assertEqual(D.__bases__, (C, List))
+ self.assertEqual(C.__orig_bases__, (List[T][U][V],))
+ self.assertEqual(D.__orig_bases__, (C, List[T][U][V]))
+
+ def test_subscript_meta(self):
+ T = TypeVar('T')
+ self.assertEqual(Type[GenericMeta], Type[GenericMeta])
+ self.assertEqual(Union[T, int][GenericMeta], Union[GenericMeta, int])
+ self.assertEqual(Callable[..., GenericMeta].__args__, (Ellipsis, GenericMeta))
+
+ def test_generic_hashes(self):
+ import mod_generics_cache
+ class A(Generic[T]):
+ __module__ = 'test_typing'
+
+ class B(Generic[T]):
+ class A(Generic[T]):
+ pass
+
+ self.assertEqual(A, A)
+ self.assertEqual(mod_generics_cache.A[str], mod_generics_cache.A[str])
+ self.assertEqual(B.A, B.A)
+ self.assertEqual(mod_generics_cache.B.A[B.A[str]],
+ mod_generics_cache.B.A[B.A[str]])
+
+ self.assertNotEqual(A, B.A)
+ self.assertNotEqual(A, mod_generics_cache.A)
+ self.assertNotEqual(A, mod_generics_cache.B.A)
+ self.assertNotEqual(B.A, mod_generics_cache.A)
+ self.assertNotEqual(B.A, mod_generics_cache.B.A)
+
+ self.assertNotEqual(A[str], B.A[str])
+ self.assertNotEqual(A[List[Any]], B.A[List[Any]])
+ self.assertNotEqual(A[str], mod_generics_cache.A[str])
+ self.assertNotEqual(A[str], mod_generics_cache.B.A[str])
+ self.assertNotEqual(B.A[int], mod_generics_cache.A[int])
+ self.assertNotEqual(B.A[List[Any]], mod_generics_cache.B.A[List[Any]])
+
+ self.assertNotEqual(Tuple[A[str]], Tuple[B.A[str]])
+ self.assertNotEqual(Tuple[A[List[Any]]], Tuple[B.A[List[Any]]])
+ self.assertNotEqual(Union[str, A[str]], Union[str, mod_generics_cache.A[str]])
+ self.assertNotEqual(Union[A[str], A[str]],
+ Union[A[str], mod_generics_cache.A[str]])
+ self.assertNotEqual(typing.FrozenSet[A[str]],
+ typing.FrozenSet[mod_generics_cache.B.A[str]])
+
+ self.assertTrue(repr(Tuple[A[str]]).endswith('test_typing.A[str]]'))
+ self.assertTrue(repr(Tuple[mod_generics_cache.A[str]])
+ .endswith('mod_generics_cache.A[str]]'))
+
+ def test_extended_generic_rules_eq(self):
+ T = TypeVar('T')
+ U = TypeVar('U')
+ self.assertEqual(Tuple[T, T][int], Tuple[int, int])
+ self.assertEqual(typing.Iterable[Tuple[T, T]][T], typing.Iterable[Tuple[T, T]])
+ with self.assertRaises(TypeError):
+ Tuple[T, int][()]
+ with self.assertRaises(TypeError):
+ Tuple[T, U][T, ...]
+
+ self.assertEqual(Union[T, int][int], int)
+ self.assertEqual(Union[T, U][int, Union[int, str]], Union[int, str])
+ class Base(object): pass
+ class Derived(Base): pass
+ self.assertEqual(Union[T, Base][Derived], Base)
+ with self.assertRaises(TypeError):
+ Union[T, int][1]
+
+ self.assertEqual(Callable[[T], T][KT], Callable[[KT], KT])
+ self.assertEqual(Callable[..., List[T]][int], Callable[..., List[int]])
+ with self.assertRaises(TypeError):
+ Callable[[T], U][..., int]
+ with self.assertRaises(TypeError):
+ Callable[[T], U][[], int]
+
+ def test_extended_generic_rules_repr(self):
+ T = TypeVar('T')
+ self.assertEqual(repr(Union[Tuple, Callable]).replace('typing.', ''),
+ 'Union[Tuple, Callable]')
+ self.assertEqual(repr(Union[Tuple, Tuple[int]]).replace('typing.', ''),
+ 'Tuple')
+ self.assertEqual(repr(Callable[..., Optional[T]][int]).replace('typing.', ''),
+ 'Callable[..., Union[int, NoneType]]')
+ self.assertEqual(repr(Callable[[], List[T]][int]).replace('typing.', ''),
+ 'Callable[[], List[int]]')
+
+ def test_generic_forvard_ref(self):
+ LLT = List[List['CC']]
+ class CC: pass
+ self.assertEqual(typing._eval_type(LLT, globals(), locals()), List[List[CC]])
+ T = TypeVar('T')
+ AT = Tuple[T, ...]
+ self.assertIs(typing._eval_type(AT, globals(), locals()), AT)
+ CT = Callable[..., List[T]]
+ self.assertIs(typing._eval_type(CT, globals(), locals()), CT)
+
+ def test_extended_generic_rules_subclassing(self):
+ class T1(Tuple[T, KT]): pass
+ class T2(Tuple[T, ...]): pass
+ class C1(Callable[[T], T]): pass
+ class C2(Callable[..., int]):
+ def __call__(self):
+ return None
+
+ self.assertEqual(T1.__parameters__, (T, KT))
+ self.assertEqual(T1[int, str].__args__, (int, str))
+ self.assertEqual(T1[int, T].__origin__, T1)
+
+ self.assertEqual(T2.__parameters__, (T,))
+ with self.assertRaises(TypeError):
+ T1[int]
+ with self.assertRaises(TypeError):
+ T2[int, str]
+
+ self.assertEqual(repr(C1[int]).split('.')[-1], 'C1[int]')
+ self.assertEqual(C2.__parameters__, ())
+ self.assertIsInstance(C2(), collections.Callable)
+ self.assertIsSubclass(C2, collections.Callable)
+ self.assertIsSubclass(C1, collections.Callable)
+ self.assertIsInstance(T1(), tuple)
+ self.assertIsSubclass(T2, tuple)
+ self.assertIsSubclass(Tuple[int, ...], typing.Sequence)
+ self.assertIsSubclass(Tuple[int, ...], typing.Iterable)
+
+ def test_fail_with_bare_union(self):
+ with self.assertRaises(TypeError):
+ List[Union]
+ with self.assertRaises(TypeError):
+ Tuple[Optional]
+ with self.assertRaises(TypeError):
+ ClassVar[ClassVar]
+ with self.assertRaises(TypeError):
+ List[ClassVar[int]]
+
+ def test_fail_with_bare_generic(self):
+ T = TypeVar('T')
+ with self.assertRaises(TypeError):
+ List[Generic]
+ with self.assertRaises(TypeError):
+ Tuple[Generic[T]]
+ with self.assertRaises(TypeError):
+ List[typing.Protocol]
+ with self.assertRaises(TypeError):
+ isinstance(1, Generic)
+
+ def test_type_erasure_special(self):
+ T = TypeVar('T')
+ # this is the only test that checks type caching
+ self.clear_caches()
+ class MyTup(Tuple[T, T]): pass
+ self.assertIs(MyTup[int]().__class__, MyTup)
+ self.assertIs(MyTup[int]().__orig_class__, MyTup[int])
+ class MyCall(Callable[..., T]):
+ def __call__(self): return None
+ self.assertIs(MyCall[T]().__class__, MyCall)
+ self.assertIs(MyCall[T]().__orig_class__, MyCall[T])
+ class MyDict(typing.Dict[T, T]): pass
+ self.assertIs(MyDict[int]().__class__, MyDict)
+ self.assertIs(MyDict[int]().__orig_class__, MyDict[int])
+ class MyDef(typing.DefaultDict[str, T]): pass
+ self.assertIs(MyDef[int]().__class__, MyDef)
+ self.assertIs(MyDef[int]().__orig_class__, MyDef[int])
+
+ def test_all_repr_eq_any(self):
+ objs = (getattr(typing, el) for el in typing.__all__)
+ for obj in objs:
+ self.assertNotEqual(repr(obj), '')
+ self.assertEqual(obj, obj)
+ if getattr(obj, '__parameters__', None) and len(obj.__parameters__) == 1:
+ self.assertEqual(obj[Any].__args__, (Any,))
+ if isinstance(obj, type):
+ for base in obj.__mro__:
+ self.assertNotEqual(repr(base), '')
+ self.assertEqual(base, base)
+
+ def test_pickle(self):
+ global C # pickle wants to reference the class by name
+ T = TypeVar('T')
+
+ class B(Generic[T]):
+ pass
+
+ class C(B[int]):
+ pass
+
+ c = C()
+ c.foo = 42
+ c.bar = 'abc'
+ for proto in range(pickle.HIGHEST_PROTOCOL + 1):
+ z = pickle.dumps(c, proto)
+ x = pickle.loads(z)
+ self.assertEqual(x.foo, 42)
+ self.assertEqual(x.bar, 'abc')
+ self.assertEqual(x.__dict__, {'foo': 42, 'bar': 'abc'})
+ simples = [Any, Union, Tuple, Callable, ClassVar, List, typing.Iterable]
+ for s in simples:
+ for proto in range(pickle.HIGHEST_PROTOCOL + 1):
+ z = pickle.dumps(s, proto)
+ x = pickle.loads(z)
+ self.assertEqual(s, x)
+
+ def test_copy_and_deepcopy(self):
+ T = TypeVar('T')
+ class Node(Generic[T]): pass
+ things = [
+ Any,
+ Callable[..., T],
+ Callable[[int], int],
+ ClassVar[List[T]],
+ ClassVar[int],
+ List['T'],
+ Node[Any],
+ Node[T],
+ Node[int],
+ Tuple['T', 'T'],
+ Tuple[Any, Any],
+ Tuple[T, int],
+ Union['T', int],
+ Union[T, int],
+ typing.Dict[T, Any],
+ typing.Dict[int, str],
+ typing.Iterable[Any],
+ typing.Iterable[T],
+ typing.Iterable[int],
+ typing.Mapping['T', int]
+ ]
+ for t in things:
+ self.assertEqual(t, deepcopy(t))
+ self.assertEqual(t, copy(t))
+
+ def test_copy_generic_instances(self):
+ T = TypeVar('T')
+ class C(Generic[T]):
+ def __init__(self, attr):
+ self.attr = attr
+
+ c = C(42)
+ self.assertEqual(copy(c).attr, 42)
+ self.assertEqual(deepcopy(c).attr, 42)
+ self.assertIsNot(copy(c), c)
+ self.assertIsNot(deepcopy(c), c)
+ c.attr = 1
+ self.assertEqual(copy(c).attr, 1)
+ self.assertEqual(deepcopy(c).attr, 1)
+ ci = C[int](42)
+ self.assertEqual(copy(ci).attr, 42)
+ self.assertEqual(deepcopy(ci).attr, 42)
+ self.assertIsNot(copy(ci), ci)
+ self.assertIsNot(deepcopy(ci), ci)
+ ci.attr = 1
+ self.assertEqual(copy(ci).attr, 1)
+ self.assertEqual(deepcopy(ci).attr, 1)
+ self.assertEqual(ci.__orig_class__, C[int])
+
+ def test_weakref_all(self):
+ T = TypeVar('T')
+ things = [Any, Union[T, int], Callable[..., T], Tuple[Any, Any],
+ Optional[List[int]], typing.Mapping[int, str],
+ typing.re.Match[bytes], typing.Iterable['whatever']]
+ for t in things:
+ self.assertEqual(weakref.ref(t)(), t)
+
+ def test_parameterized_slots(self):
+ T = TypeVar('T')
+ class C(Generic[T]):
+ __slots__ = ('potato',)
+
+ c = C()
+ c_int = C[int]()
+ self.assertEqual(C.__slots__, C[str].__slots__)
+
+ c.potato = 0
+ c_int.potato = 0
+ with self.assertRaises(AttributeError):
+ c.tomato = 0
+ with self.assertRaises(AttributeError):
+ c_int.tomato = 0
+
+ self.assertEqual(typing._eval_type(C['C'], globals(), locals()), C[C])
+ self.assertEqual(typing._eval_type(C['C'], globals(), locals()).__slots__,
+ C.__slots__)
+ self.assertEqual(copy(C[int]), deepcopy(C[int]))
+
+ def test_parameterized_slots_dict(self):
+ T = TypeVar('T')
+ class D(Generic[T]):
+ __slots__ = {'banana': 42}
+
+ d = D()
+ d_int = D[int]()
+ self.assertEqual(D.__slots__, D[str].__slots__)
+
+ d.banana = 'yes'
+ d_int.banana = 'yes'
+ with self.assertRaises(AttributeError):
+ d.foobar = 'no'
+ with self.assertRaises(AttributeError):
+ d_int.foobar = 'no'
+
+ def test_errors(self):
+ with self.assertRaises(TypeError):
+ B = SimpleMapping[XK, Any]
+
+ class C(Generic[B]):
+ pass
+
+ def test_repr_2(self):
+ PY32 = sys.version_info[:2] < (3, 3)
+
+ class C(Generic[T]):
+ pass
+
+ self.assertEqual(C.__module__, __name__)
+ if not PY32:
+ self.assertEqual(C.__qualname__,
+ 'GenericTests.test_repr_2.<locals>.C')
+ self.assertEqual(repr(C).split('.')[-1], 'C')
+ X = C[int]
+ self.assertEqual(X.__module__, __name__)
+ if not PY32:
+ self.assertTrue(X.__qualname__.endswith('.<locals>.C'))
+ self.assertEqual(repr(X).split('.')[-1], 'C[int]')
+
+ class Y(C[int]):
+ pass
+
+ self.assertEqual(Y.__module__, __name__)
+ if not PY32:
+ self.assertEqual(Y.__qualname__,
+ 'GenericTests.test_repr_2.<locals>.Y')
+ self.assertEqual(repr(Y).split('.')[-1], 'Y')
+
+ def test_eq_1(self):
+ self.assertEqual(Generic, Generic)
+ self.assertEqual(Generic[T], Generic[T])
+ self.assertNotEqual(Generic[KT], Generic[VT])
+
+ def test_eq_2(self):
+
+ class A(Generic[T]):
+ pass
+
+ class B(Generic[T]):
+ pass
+
+ self.assertEqual(A, A)
+ self.assertNotEqual(A, B)
+ self.assertEqual(A[T], A[T])
+ self.assertNotEqual(A[T], B[T])
+
+ def test_multiple_inheritance(self):
+
+ class A(Generic[T, VT]):
+ pass
+
+ class B(Generic[KT, T]):
+ pass
+
+ class C(A[T, VT], Generic[VT, T, KT], B[KT, T]):
+ pass
+
+ self.assertEqual(C.__parameters__, (VT, T, KT))
+
+ def test_nested(self):
+
+ G = Generic
+
+ class Visitor(G[T]):
+
+ a = None
+
+ def set(self, a):
+ self.a = a
+
+ def get(self):
+ return self.a
+
+ def visit(self):
+ return self.a
+
+ V = Visitor[typing.List[int]]
+
+ class IntListVisitor(V):
+
+ def append(self, x):
+ self.a.append(x)
+
+ a = IntListVisitor()
+ a.set([])
+ a.append(1)
+ a.append(42)
+ self.assertEqual(a.get(), [1, 42])
+
+ def test_type_erasure(self):
+ T = TypeVar('T')
+
+ class Node(Generic[T]):
+ def __init__(self, label,
+ left=None,
+ right=None):
+ self.label = label # type: T
+ self.left = left # type: Optional[Node[T]]
+ self.right = right # type: Optional[Node[T]]
+
+ def foo(x):
+ a = Node(x)
+ b = Node[T](x)
+ c = Node[Any](x)
+ self.assertIs(type(a), Node)
+ self.assertIs(type(b), Node)
+ self.assertIs(type(c), Node)
+ self.assertEqual(a.label, x)
+ self.assertEqual(b.label, x)
+ self.assertEqual(c.label, x)
+
+ foo(42)
+
+ def test_implicit_any(self):
+ T = TypeVar('T')
+
+ class C(Generic[T]):
+ pass
+
+ class D(C):
+ pass
+
+ self.assertEqual(D.__parameters__, ())
+
+ with self.assertRaises(Exception):
+ D[int]
+ with self.assertRaises(Exception):
+ D[Any]
+ with self.assertRaises(Exception):
+ D[T]
+
+ def test_new_with_args(self):
+
+ class A(Generic[T]):
+ pass
+
+ class B(object):
+ def __new__(cls, arg):
+ # call object.__new__
+ obj = super(B, cls).__new__(cls)
+ obj.arg = arg
+ return obj
+
+ # mro: C, A, Generic, B, object
+ class C(A, B):
+ pass
+
+ c = C('foo')
+ self.assertEqual(c.arg, 'foo')
+
+ def test_new_with_args2(self):
+
+ class A(object):
+ def __init__(self, arg):
+ self.from_a = arg
+ # call object
+ super(A, self).__init__()
+
+ # mro: C, Generic, A, object
+ class C(Generic[T], A):
+ def __init__(self, arg):
+ self.from_c = arg
+ # call Generic
+ super(C, self).__init__(arg)
+
+ c = C('foo')
+ self.assertEqual(c.from_a, 'foo')
+ self.assertEqual(c.from_c, 'foo')
+
+ def test_new_no_args(self):
+
+ class A(Generic[T]):
+ pass
+
+ with self.assertRaises(TypeError):
+ A('foo')
+
+ class B(object):
+ def __new__(cls):
+ # call object
+ obj = super(B, cls).__new__(cls)
+ obj.from_b = 'b'
+ return obj
+
+ # mro: C, A, Generic, B, object
+ class C(A, B):
+ def __init__(self, arg):
+ self.arg = arg
+
+ def __new__(cls, arg):
+ # call A
+ obj = super(C, cls).__new__(cls)
+ obj.from_c = 'c'
+ return obj
+
+ c = C('foo')
+ self.assertEqual(c.arg, 'foo')
+ self.assertEqual(c.from_b, 'b')
+ self.assertEqual(c.from_c, 'c')
+
+
+class ClassVarTests(BaseTestCase):
+
+ def test_basics(self):
+ with self.assertRaises(TypeError):
+ ClassVar[1]
+ with self.assertRaises(TypeError):
+ ClassVar[int, str]
+ with self.assertRaises(TypeError):
+ ClassVar[int][str]
+
+ def test_repr(self):
+ self.assertEqual(repr(ClassVar), 'typing.ClassVar')
+ cv = ClassVar[int]
+ self.assertEqual(repr(cv), 'typing.ClassVar[int]')
+ cv = ClassVar[Employee]
+ self.assertEqual(repr(cv), 'typing.ClassVar[%s.Employee]' % __name__)
+
+ def test_cannot_subclass(self):
+ with self.assertRaises(TypeError):
+ class C(type(ClassVar)):
+ pass
+ with self.assertRaises(TypeError):
+ class C(type(ClassVar[int])):
+ pass
+
+ def test_cannot_init(self):
+ with self.assertRaises(TypeError):
+ ClassVar()
+ with self.assertRaises(TypeError):
+ type(ClassVar)()
+ with self.assertRaises(TypeError):
+ type(ClassVar[Optional[int]])()
+
+ def test_no_isinstance(self):
+ with self.assertRaises(TypeError):
+ isinstance(1, ClassVar[int])
+ with self.assertRaises(TypeError):
+ issubclass(int, ClassVar)
+
+
+class FinalTests(BaseTestCase):
+
+ def test_basics(self):
+ with self.assertRaises(TypeError):
+ Final[1]
+ with self.assertRaises(TypeError):
+ Final[int, str]
+ with self.assertRaises(TypeError):
+ Final[int][str]
+
+ def test_repr(self):
+ self.assertEqual(repr(Final), 'typing.Final')
+ cv = Final[int]
+ self.assertEqual(repr(cv), 'typing.Final[int]')
+ cv = Final[Employee]
+ self.assertEqual(repr(cv), 'typing.Final[%s.Employee]' % __name__)
+
+ def test_cannot_subclass(self):
+ with self.assertRaises(TypeError):
+ class C(type(Final)):
+ pass
+ with self.assertRaises(TypeError):
+ class C(type(Final[int])):
+ pass
+
+ def test_cannot_init(self):
+ with self.assertRaises(TypeError):
+ Final()
+ with self.assertRaises(TypeError):
+ type(Final)()
+ with self.assertRaises(TypeError):
+ type(Final[typing.Optional[int]])()
+
+ def test_no_isinstance(self):
+ with self.assertRaises(TypeError):
+ isinstance(1, Final[int])
+ with self.assertRaises(TypeError):
+ issubclass(int, Final)
+
+
+class LiteralTests(BaseTestCase):
+ def test_basics(self):
+ Literal[1]
+ Literal[1, 2, 3]
+ Literal["x", "y", "z"]
+ Literal[None]
+
+ def test_illegal_parameters_do_not_raise_runtime_errors(self):
+ # Type checkers should reject these types, but we do not
+ # raise errors at runtime to maintain maximium flexibility
+ Literal[int]
+ Literal[Literal[1, 2], Literal[4, 5]]
+ Literal[3j + 2, ..., ()]
+ Literal[b"foo", u"bar"]
+ Literal[{"foo": 3, "bar": 4}]
+ Literal[T]
+
+ def test_literals_inside_other_types(self):
+ typing.List[Literal[1, 2, 3]]
+ typing.List[Literal[("foo", "bar", "baz")]]
+
+ def test_repr(self):
+ self.assertEqual(repr(Literal[1]), "typing.Literal[1]")
+ self.assertEqual(repr(Literal[1, True, "foo"]), "typing.Literal[1, True, u'foo']")
+ self.assertEqual(repr(Literal[int]), "typing.Literal[int]")
+ self.assertEqual(repr(Literal), "typing.Literal")
+ self.assertEqual(repr(Literal[None]), "typing.Literal[None]")
+
+ def test_cannot_init(self):
+ with self.assertRaises(TypeError):
+ Literal()
+ with self.assertRaises(TypeError):
+ Literal[1]()
+ with self.assertRaises(TypeError):
+ type(Literal)()
+ with self.assertRaises(TypeError):
+ type(Literal[1])()
+
+ def test_no_isinstance_or_issubclass(self):
+ with self.assertRaises(TypeError):
+ isinstance(1, Literal[1])
+ with self.assertRaises(TypeError):
+ isinstance(int, Literal[1])
+ with self.assertRaises(TypeError):
+ issubclass(1, Literal[1])
+ with self.assertRaises(TypeError):
+ issubclass(int, Literal[1])
+
+ def test_no_subclassing(self):
+ with self.assertRaises(TypeError):
+ class Foo(Literal[1]): pass
+ with self.assertRaises(TypeError):
+ class Bar(Literal): pass
+
+ def test_no_multiple_subscripts(self):
+ with self.assertRaises(TypeError):
+ Literal[1][1]
+
+
+class CastTests(BaseTestCase):
+
+ def test_basics(self):
+ self.assertEqual(cast(int, 42), 42)
+ self.assertEqual(cast(float, 42), 42)
+ self.assertIs(type(cast(float, 42)), int)
+ self.assertEqual(cast(Any, 42), 42)
+ self.assertEqual(cast(list, 42), 42)
+ self.assertEqual(cast(Union[str, float], 42), 42)
+ self.assertEqual(cast(AnyStr, 42), 42)
+ self.assertEqual(cast(None, 42), 42)
+
+ def test_errors(self):
+ # Bogus calls are not expected to fail.
+ cast(42, 42)
+ cast('hello', 42)
+
+
+class ForwardRefTests(BaseTestCase):
+
+ def test_forwardref_instance_type_error(self):
+ fr = typing._ForwardRef('int')
+ with self.assertRaises(TypeError):
+ isinstance(42, fr)
+
+ def test_syntax_error(self):
+
+ with self.assertRaises(SyntaxError):
+ Generic['/T']
+
+ def test_forwardref_subclass_type_error(self):
+ fr = typing._ForwardRef('int')
+ with self.assertRaises(TypeError):
+ issubclass(int, fr)
+
+ def test_forward_equality(self):
+ fr = typing._ForwardRef('int')
+ self.assertEqual(fr, typing._ForwardRef('int'))
+ self.assertNotEqual(List['int'], List[int])
+
+ def test_forward_repr(self):
+ self.assertEqual(repr(List['int']), "typing.List[_ForwardRef(%r)]" % 'int')
+
+
+class OverloadTests(BaseTestCase):
+
+ def test_overload_fails(self):
+ from typing import overload
+
+ with self.assertRaises(RuntimeError):
+
+ @overload
+ def blah():
+ pass
+
+ blah()
+
+ def test_overload_succeeds(self):
+ from typing import overload
+
+ @overload
+ def blah():
+ pass
+
+ def blah():
+ pass
+
+ blah()
+
+
+class CollectionsAbcTests(BaseTestCase):
+
+ def test_hashable(self):
+ self.assertIsInstance(42, typing.Hashable)
+ self.assertNotIsInstance([], typing.Hashable)
+
+ def test_iterable(self):
+ self.assertIsInstance([], typing.Iterable)
+ # Due to ABC caching, the second time takes a separate code
+ # path and could fail. So call this a few times.
+ self.assertIsInstance([], typing.Iterable)
+ self.assertIsInstance([], typing.Iterable)
+ self.assertNotIsInstance(42, typing.Iterable)
+ # Just in case, also test issubclass() a few times.
+ self.assertIsSubclass(list, typing.Iterable)
+ self.assertIsSubclass(list, typing.Iterable)
+
+ def test_iterator(self):
+ it = iter([])
+ self.assertIsInstance(it, typing.Iterator)
+ self.assertNotIsInstance(42, typing.Iterator)
+
+ def test_sized(self):
+ self.assertIsInstance([], typing.Sized)
+ self.assertNotIsInstance(42, typing.Sized)
+
+ def test_container(self):
+ self.assertIsInstance([], typing.Container)
+ self.assertNotIsInstance(42, typing.Container)
+
+ def test_abstractset(self):
+ self.assertIsInstance(set(), typing.AbstractSet)
+ self.assertNotIsInstance(42, typing.AbstractSet)
+
+ def test_mutableset(self):
+ self.assertIsInstance(set(), typing.MutableSet)
+ self.assertNotIsInstance(frozenset(), typing.MutableSet)
+
+ def test_mapping(self):
+ self.assertIsInstance({}, typing.Mapping)
+ self.assertNotIsInstance(42, typing.Mapping)
+
+ def test_mutablemapping(self):
+ self.assertIsInstance({}, typing.MutableMapping)
+ self.assertNotIsInstance(42, typing.MutableMapping)
+
+ def test_sequence(self):
+ self.assertIsInstance([], typing.Sequence)
+ self.assertNotIsInstance(42, typing.Sequence)
+
+ def test_mutablesequence(self):
+ self.assertIsInstance([], typing.MutableSequence)
+ self.assertNotIsInstance((), typing.MutableSequence)
+
+ def test_bytestring(self):
+ self.assertIsInstance(b'', typing.ByteString)
+ self.assertIsInstance(bytearray(b''), typing.ByteString)
+
+ def test_list(self):
+ self.assertIsSubclass(list, typing.List)
+
+ def test_deque(self):
+ self.assertIsSubclass(collections.deque, typing.Deque)
+ class MyDeque(typing.Deque[int]): pass
+ self.assertIsInstance(MyDeque(), collections.deque)
+
+ def test_counter(self):
+ self.assertIsSubclass(collections.Counter, typing.Counter)
+
+ def test_set(self):
+ self.assertIsSubclass(set, typing.Set)
+ self.assertNotIsSubclass(frozenset, typing.Set)
+
+ def test_frozenset(self):
+ self.assertIsSubclass(frozenset, typing.FrozenSet)
+ self.assertNotIsSubclass(set, typing.FrozenSet)
+
+ def test_dict(self):
+ self.assertIsSubclass(dict, typing.Dict)
+
+ def test_no_list_instantiation(self):
+ with self.assertRaises(TypeError):
+ typing.List()
+ with self.assertRaises(TypeError):
+ typing.List[T]()
+ with self.assertRaises(TypeError):
+ typing.List[int]()
+
+ def test_list_subclass(self):
+
+ class MyList(typing.List[int]):
+ pass
+
+ a = MyList()
+ self.assertIsInstance(a, MyList)
+ self.assertIsInstance(a, typing.Sequence)
+
+ self.assertIsSubclass(MyList, list)
+ self.assertNotIsSubclass(list, MyList)
+
+ def test_no_dict_instantiation(self):
+ with self.assertRaises(TypeError):
+ typing.Dict()
+ with self.assertRaises(TypeError):
+ typing.Dict[KT, VT]()
+ with self.assertRaises(TypeError):
+ typing.Dict[str, int]()
+
+ def test_dict_subclass(self):
+
+ class MyDict(typing.Dict[str, int]):
+ pass
+
+ d = MyDict()
+ self.assertIsInstance(d, MyDict)
+ self.assertIsInstance(d, typing.MutableMapping)
+
+ self.assertIsSubclass(MyDict, dict)
+ self.assertNotIsSubclass(dict, MyDict)
+
+ def test_defaultdict_instantiation(self):
+ self.assertIs(type(typing.DefaultDict()), collections.defaultdict)
+ self.assertIs(type(typing.DefaultDict[KT, VT]()), collections.defaultdict)
+ self.assertIs(type(typing.DefaultDict[str, int]()), collections.defaultdict)
+
+ def test_defaultdict_subclass(self):
+
+ class MyDefDict(typing.DefaultDict[str, int]):
+ pass
+
+ dd = MyDefDict()
+ self.assertIsInstance(dd, MyDefDict)
+
+ self.assertIsSubclass(MyDefDict, collections.defaultdict)
+ self.assertNotIsSubclass(collections.defaultdict, MyDefDict)
+
+ def test_deque_instantiation(self):
+ self.assertIs(type(typing.Deque()), collections.deque)
+ self.assertIs(type(typing.Deque[T]()), collections.deque)
+ self.assertIs(type(typing.Deque[int]()), collections.deque)
+ class D(typing.Deque[T]): pass
+ self.assertIs(type(D[int]()), D)
+
+ def test_counter_instantiation(self):
+ self.assertIs(type(typing.Counter()), collections.Counter)
+ self.assertIs(type(typing.Counter[T]()), collections.Counter)
+ self.assertIs(type(typing.Counter[int]()), collections.Counter)
+ class C(typing.Counter[T]): pass
+ self.assertIs(type(C[int]()), C)
+
+ def test_counter_subclass_instantiation(self):
+
+ class MyCounter(typing.Counter[int]):
+ pass
+
+ d = MyCounter()
+ self.assertIsInstance(d, MyCounter)
+ self.assertIsInstance(d, typing.Counter)
+ self.assertIsInstance(d, collections.Counter)
+
+ def test_no_set_instantiation(self):
+ with self.assertRaises(TypeError):
+ typing.Set()
+ with self.assertRaises(TypeError):
+ typing.Set[T]()
+ with self.assertRaises(TypeError):
+ typing.Set[int]()
+
+ def test_set_subclass_instantiation(self):
+
+ class MySet(typing.Set[int]):
+ pass
+
+ d = MySet()
+ self.assertIsInstance(d, MySet)
+
+ def test_no_frozenset_instantiation(self):
+ with self.assertRaises(TypeError):
+ typing.FrozenSet()
+ with self.assertRaises(TypeError):
+ typing.FrozenSet[T]()
+ with self.assertRaises(TypeError):
+ typing.FrozenSet[int]()
+
+ def test_frozenset_subclass_instantiation(self):
+
+ class MyFrozenSet(typing.FrozenSet[int]):
+ pass
+
+ d = MyFrozenSet()
+ self.assertIsInstance(d, MyFrozenSet)
+
+ def test_no_tuple_instantiation(self):
+ with self.assertRaises(TypeError):
+ Tuple()
+ with self.assertRaises(TypeError):
+ Tuple[T]()
+ with self.assertRaises(TypeError):
+ Tuple[int]()
+
+ def test_generator(self):
+ def foo():
+ yield 42
+ g = foo()
+ self.assertIsSubclass(type(g), typing.Generator)
+
+ def test_no_generator_instantiation(self):
+ with self.assertRaises(TypeError):
+ typing.Generator()
+ with self.assertRaises(TypeError):
+ typing.Generator[T, T, T]()
+ with self.assertRaises(TypeError):
+ typing.Generator[int, int, int]()
+
+ def test_subclassing(self):
+
+ class MMA(typing.MutableMapping):
+ pass
+
+ with self.assertRaises(TypeError): # It's abstract
+ MMA()
+
+ class MMC(MMA):
+ def __getitem__(self, k):
+ return None
+ def __setitem__(self, k, v):
+ pass
+ def __delitem__(self, k):
+ pass
+ def __iter__(self):
+ return iter(())
+ def __len__(self):
+ return 0
+
+ self.assertEqual(len(MMC()), 0)
+ assert callable(MMC.update)
+ self.assertIsInstance(MMC(), typing.Mapping)
+
+ class MMB(typing.MutableMapping[KT, VT]):
+ def __getitem__(self, k):
+ return None
+ def __setitem__(self, k, v):
+ pass
+ def __delitem__(self, k):
+ pass
+ def __iter__(self):
+ return iter(())
+ def __len__(self):
+ return 0
+
+ self.assertEqual(len(MMB()), 0)
+ self.assertEqual(len(MMB[str, str]()), 0)
+ self.assertEqual(len(MMB[KT, VT]()), 0)
+
+ self.assertNotIsSubclass(dict, MMA)
+ self.assertNotIsSubclass(dict, MMB)
+
+ self.assertIsSubclass(MMA, typing.Mapping)
+ self.assertIsSubclass(MMB, typing.Mapping)
+ self.assertIsSubclass(MMC, typing.Mapping)
+
+ self.assertIsInstance(MMB[KT, VT](), typing.Mapping)
+ self.assertIsInstance(MMB[KT, VT](), collections.Mapping)
+
+ self.assertIsSubclass(MMA, collections.Mapping)
+ self.assertIsSubclass(MMB, collections.Mapping)
+ self.assertIsSubclass(MMC, collections.Mapping)
+
+ self.assertIsSubclass(MMB[str, str], typing.Mapping)
+ self.assertIsSubclass(MMC, MMA)
+
+ class It(typing.Iterable): pass
+ self.assertNotIsSubclass(list, It)
+
+ class G(typing.Generator[int, int, int]): pass
+ def g(): yield 0
+ self.assertIsSubclass(G, typing.Generator)
+ self.assertIsSubclass(G, typing.Iterable)
+ if hasattr(collections, 'Generator'):
+ self.assertIsSubclass(G, collections.Generator)
+ self.assertIsSubclass(G, collections.Iterable)
+ self.assertNotIsSubclass(type(g), G)
+
+ def test_subclassing_subclasshook(self):
+
+ class Base(typing.Iterable):
+ @classmethod
+ def __subclasshook__(cls, other):
+ if other.__name__ == 'Foo':
+ return True
+ else:
+ return False
+
+ class C(Base): pass
+ class Foo: pass
+ class Bar: pass
+ self.assertIsSubclass(Foo, Base)
+ self.assertIsSubclass(Foo, C)
+ self.assertNotIsSubclass(Bar, C)
+
+ def test_subclassing_register(self):
+
+ class A(typing.Container): pass
+ class B(A): pass
+
+ class C: pass
+ A.register(C)
+ self.assertIsSubclass(C, A)
+ self.assertNotIsSubclass(C, B)
+
+ class D: pass
+ B.register(D)
+ self.assertIsSubclass(D, A)
+ self.assertIsSubclass(D, B)
+
+ class M(): pass
+ collections.MutableMapping.register(M)
+ self.assertIsSubclass(M, typing.Mapping)
+
+ def test_collections_as_base(self):
+
+ class M(collections.Mapping): pass
+ self.assertIsSubclass(M, typing.Mapping)
+ self.assertIsSubclass(M, typing.Iterable)
+
+ class S(collections.MutableSequence): pass
+ self.assertIsSubclass(S, typing.MutableSequence)
+ self.assertIsSubclass(S, typing.Iterable)
+
+ class It(collections.Iterable): pass
+ self.assertIsSubclass(It, typing.Iterable)
+
+ class A(collections.Mapping): pass
+ class B: pass
+ A.register(B)
+ self.assertIsSubclass(B, typing.Mapping)
+
+
+class OtherABCTests(BaseTestCase):
+
+ def test_contextmanager(self):
+ @contextlib.contextmanager
+ def manager():
+ yield 42
+
+ cm = manager()
+ self.assertIsInstance(cm, typing.ContextManager)
+ self.assertNotIsInstance(42, typing.ContextManager)
+
+
+class TypeTests(BaseTestCase):
+
+ def test_type_basic(self):
+
+ class User(object): pass
+ class BasicUser(User): pass
+ class ProUser(User): pass
+
+ def new_user(user_class):
+ # type: (Type[User]) -> User
+ return user_class()
+
+ new_user(BasicUser)
+
+ def test_type_typevar(self):
+
+ class User(object): pass
+ class BasicUser(User): pass
+ class ProUser(User): pass
+
+ global U
+ U = TypeVar('U', bound=User)
+
+ def new_user(user_class):
+ # type: (Type[U]) -> U
+ return user_class()
+
+ new_user(BasicUser)
+
+ def test_type_optional(self):
+ A = Optional[Type[BaseException]] # noqa
+
+ def foo(a):
+ # type: (A) -> Optional[BaseException]
+ if a is None:
+ return None
+ else:
+ return a()
+
+ assert isinstance(foo(KeyboardInterrupt), KeyboardInterrupt)
+ assert foo(None) is None
+
+
+class NewTypeTests(BaseTestCase):
+
+ def test_basic(self):
+ UserId = NewType('UserId', int)
+ UserName = NewType('UserName', str)
+ self.assertIsInstance(UserId(5), int)
+ self.assertIsInstance(UserName('Joe'), type('Joe'))
+ self.assertEqual(UserId(5) + 1, 6)
+
+ def test_errors(self):
+ UserId = NewType('UserId', int)
+ UserName = NewType('UserName', str)
+ with self.assertRaises(TypeError):
+ issubclass(UserId, int)
+ with self.assertRaises(TypeError):
+ class D(UserName):
+ pass
+
+
+class NamedTupleTests(BaseTestCase):
+
+ def test_basics(self):
+ Emp = NamedTuple('Emp', [('name', str), ('id', int)])
+ self.assertIsSubclass(Emp, tuple)
+ joe = Emp('Joe', 42)
+ jim = Emp(name='Jim', id=1)
+ self.assertIsInstance(joe, Emp)
+ self.assertIsInstance(joe, tuple)
+ self.assertEqual(joe.name, 'Joe')
+ self.assertEqual(joe.id, 42)
+ self.assertEqual(jim.name, 'Jim')
+ self.assertEqual(jim.id, 1)
+ self.assertEqual(Emp.__name__, 'Emp')
+ self.assertEqual(Emp._fields, ('name', 'id'))
+ self.assertEqual(Emp._field_types, dict(name=str, id=int))
+
+ def test_pickle(self):
+ global Emp # pickle wants to reference the class by name
+ Emp = NamedTuple('Emp', [('name', str), ('id', int)])
+ jane = Emp('jane', 37)
+ for proto in range(pickle.HIGHEST_PROTOCOL + 1):
+ z = pickle.dumps(jane, proto)
+ jane2 = pickle.loads(z)
+ self.assertEqual(jane2, jane)
+
+
+class TypedDictTests(BaseTestCase):
+
+ def test_basics_iterable_syntax(self):
+ Emp = TypedDict(b'Emp', {'name': str, 'id': int})
+ self.assertIsSubclass(Emp, dict)
+ self.assertIsSubclass(Emp, typing.MutableMapping)
+ if sys.version_info[0] >= 3:
+ import collections.abc
+ self.assertNotIsSubclass(Emp, collections.abc.Sequence)
+ jim = Emp(name='Jim', id=1)
+ self.assertIs(type(jim), dict)
+ self.assertEqual(jim['name'], 'Jim')
+ self.assertEqual(jim['id'], 1)
+ self.assertEqual(Emp.__name__, 'Emp')
+ self.assertEqual(Emp.__module__, __name__)
+ self.assertEqual(Emp.__bases__, (dict,))
+ self.assertEqual(Emp.__annotations__, {'name': str, 'id': int})
+ self.assertEqual(Emp.__total__, True)
+
+ def test_basics_keywords_syntax(self):
+ Emp = TypedDict(b'Emp', name=str, id=int)
+ self.assertIsSubclass(Emp, dict)
+ self.assertIsSubclass(Emp, typing.MutableMapping)
+ if sys.version_info[0] >= 3:
+ import collections.abc
+ self.assertNotIsSubclass(Emp, collections.abc.Sequence)
+ jim = Emp(name='Jim', id=1)
+ self.assertIs(type(jim), dict)
+ self.assertEqual(jim['name'], 'Jim')
+ self.assertEqual(jim['id'], 1)
+ self.assertEqual(Emp.__name__, 'Emp')
+ self.assertEqual(Emp.__module__, __name__)
+ self.assertEqual(Emp.__bases__, (dict,))
+ self.assertEqual(Emp.__annotations__, {'name': str, 'id': int})
+ self.assertEqual(Emp.__total__, True)
+
+ def test_typeddict_errors(self):
+ Emp = TypedDict(b'Emp', {'name': str, 'id': int})
+ self.assertEqual(TypedDict.__module__, 'typing')
+ jim = Emp(name='Jim', id=1)
+ with self.assertRaises(TypeError):
+ isinstance({}, Emp)
+ with self.assertRaises(TypeError):
+ isinstance(jim, Emp)
+ with self.assertRaises(TypeError):
+ issubclass(dict, Emp)
+ with self.assertRaises(TypeError):
+ TypedDict('Hi', x=1)
+ with self.assertRaises(TypeError):
+ TypedDict('Hi', [('x', int), ('y', 1)])
+ with self.assertRaises(TypeError):
+ TypedDict('Hi', [('x', int)], y=int)
+
+ def test_pickle(self):
+ global EmpD # pickle wants to reference the class by name
+ EmpD = TypedDict(b'EmpD', name=str, id=int)
+ jane = EmpD({'name': 'jane', 'id': 37})
+ for proto in range(pickle.HIGHEST_PROTOCOL + 1):
+ z = pickle.dumps(jane, proto)
+ jane2 = pickle.loads(z)
+ self.assertEqual(jane2, jane)
+ self.assertEqual(jane2, {'name': 'jane', 'id': 37})
+ ZZ = pickle.dumps(EmpD, proto)
+ EmpDnew = pickle.loads(ZZ)
+ self.assertEqual(EmpDnew({'name': 'jane', 'id': 37}), jane)
+
+ def test_optional(self):
+ EmpD = TypedDict(b'EmpD', name=str, id=int)
+
+ self.assertEqual(typing.Optional[EmpD], typing.Union[None, EmpD])
+ self.assertNotEqual(typing.List[EmpD], typing.Tuple[EmpD])
+
+ def test_total(self):
+ D = TypedDict(b'D', {'x': int}, total=False)
+ self.assertEqual(D(), {})
+ self.assertEqual(D(x=1), {'x': 1})
+ self.assertEqual(D.__total__, False)
+
+
+class IOTests(BaseTestCase):
+
+ def test_io_submodule(self):
+ from typing.io import IO, TextIO, BinaryIO, __all__, __name__
+ self.assertIs(IO, typing.IO)
+ self.assertIs(TextIO, typing.TextIO)
+ self.assertIs(BinaryIO, typing.BinaryIO)
+ self.assertEqual(set(__all__), set(['IO', 'TextIO', 'BinaryIO']))
+ self.assertEqual(__name__, 'typing.io')
+
+
+class RETests(BaseTestCase):
+ # Much of this is really testing _TypeAlias.
+
+ def test_basics(self):
+ pat = re.compile('[a-z]+', re.I)
+ self.assertIsSubclass(pat.__class__, Pattern)
+ self.assertIsSubclass(type(pat), Pattern)
+ self.assertIsInstance(pat, Pattern)
+
+ mat = pat.search('12345abcde.....')
+ self.assertIsSubclass(mat.__class__, Match)
+ self.assertIsSubclass(type(mat), Match)
+ self.assertIsInstance(mat, Match)
+
+ # these should just work
+ Pattern[Union[str, bytes]]
+ Match[Union[bytes, str]]
+
+ def test_alias_equality(self):
+ self.assertEqual(Pattern[str], Pattern[str])
+ self.assertNotEqual(Pattern[str], Pattern[bytes])
+ self.assertNotEqual(Pattern[str], Match[str])
+ self.assertNotEqual(Pattern[str], str)
+
+ def test_errors(self):
+ with self.assertRaises(TypeError):
+ # Doesn't fit AnyStr.
+ Pattern[int]
+ with self.assertRaises(TypeError):
+ # Can't change type vars?
+ Match[T]
+ m = Match[Union[str, bytes]]
+ with self.assertRaises(TypeError):
+ # Too complicated?
+ m[str]
+ with self.assertRaises(TypeError):
+ # We don't support isinstance().
+ isinstance(42, Pattern[str])
+ with self.assertRaises(TypeError):
+ # We don't support issubclass().
+ issubclass(Pattern[bytes], Pattern[str])
+
+ def test_repr(self):
+ self.assertEqual(repr(Pattern), 'Pattern[~AnyStr]')
+ self.assertEqual(repr(Pattern[unicode]), 'Pattern[unicode]')
+ self.assertEqual(repr(Pattern[str]), 'Pattern[str]')
+ self.assertEqual(repr(Match), 'Match[~AnyStr]')
+ self.assertEqual(repr(Match[unicode]), 'Match[unicode]')
+ self.assertEqual(repr(Match[str]), 'Match[str]')
+
+ def test_re_submodule(self):
+ from typing.re import Match, Pattern, __all__, __name__
+ self.assertIs(Match, typing.Match)
+ self.assertIs(Pattern, typing.Pattern)
+ self.assertEqual(set(__all__), set(['Match', 'Pattern']))
+ self.assertEqual(__name__, 'typing.re')
+
+ def test_cannot_subclass(self):
+ with self.assertRaises(TypeError) as ex:
+
+ class A(typing.Match):
+ pass
+
+ self.assertEqual(str(ex.exception),
+ "Cannot subclass typing._TypeAlias")
+
+
+class AllTests(BaseTestCase):
+ """Tests for __all__."""
+
+ def test_all(self):
+ from typing import __all__ as a
+ # Just spot-check the first and last of every category.
+ self.assertIn('AbstractSet', a)
+ self.assertIn('ValuesView', a)
+ self.assertIn('cast', a)
+ self.assertIn('overload', a)
+ # Check that io and re are not exported.
+ self.assertNotIn('io', a)
+ self.assertNotIn('re', a)
+ # Spot-check that stdlib modules aren't exported.
+ self.assertNotIn('os', a)
+ self.assertNotIn('sys', a)
+ # Check that Text is defined.
+ self.assertIn('Text', a)
+ # Check previously missing class.
+ self.assertIn('SupportsComplex', a)
+
+ def test_respect_no_type_check(self):
+ @typing.no_type_check
+ class NoTpCheck(object):
+ class Inn(object):
+ def __init__(self, x):
+ # type: (this is not actually a type) -> None # noqa
+ pass
+ self.assertTrue(NoTpCheck.__no_type_check__)
+ self.assertTrue(NoTpCheck.Inn.__init__.__no_type_check__)
+
+ def test_get_type_hints_dummy(self):
+
+ def foo(x):
+ # type: (int) -> int
+ return x + 1
+
+ self.assertIsNone(typing.get_type_hints(foo))
+
+ # def test_typing_compiles_with_opt(self):
+ # file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)),
+ # 'typing.py')
+ # try:
+ # subprocess.check_output([sys.executable, '-OO', file_path],
+ # stderr=subprocess.STDOUT)
+ # except subprocess.CalledProcessError:
+ # self.fail('Module does not compile with optimize=2 (-OO flag).')
+
+
+if __name__ == '__main__':
+ main()
diff --git a/contrib/deprecated/python/typing/test/ya.make b/contrib/deprecated/python/typing/test/ya.make
new file mode 100644
index 0000000000..9199fe1e54
--- /dev/null
+++ b/contrib/deprecated/python/typing/test/ya.make
@@ -0,0 +1,14 @@
+PY2TEST()
+
+PEERDIR(
+ contrib/deprecated/python/typing
+)
+
+TEST_SRCS(
+ mod_generics_cache.py
+ test_typing.py
+)
+
+NO_LINT()
+
+END()
diff --git a/contrib/deprecated/python/typing/typing.py b/contrib/deprecated/python/typing/typing.py
new file mode 100644
index 0000000000..dd16d9af96
--- /dev/null
+++ b/contrib/deprecated/python/typing/typing.py
@@ -0,0 +1,2550 @@
+from __future__ import absolute_import, unicode_literals
+
+import abc
+from abc import abstractmethod, abstractproperty
+import collections
+import functools
+import re as stdlib_re # Avoid confusion with the re we export.
+import sys
+import types
+import copy
+try:
+ import collections.abc as collections_abc
+except ImportError:
+ import collections as collections_abc # Fallback for PY3.2.
+
+
+# Please keep __all__ alphabetized within each category.
+__all__ = [
+ # Super-special typing primitives.
+ 'Any',
+ 'Callable',
+ 'ClassVar',
+ 'Final',
+ 'Generic',
+ 'Literal',
+ 'Optional',
+ 'Protocol',
+ 'Tuple',
+ 'Type',
+ 'TypeVar',
+ 'Union',
+
+ # ABCs (from collections.abc).
+ 'AbstractSet', # collections.abc.Set.
+ 'GenericMeta', # subclass of abc.ABCMeta and a metaclass
+ # for 'Generic' and ABCs below.
+ 'ByteString',
+ 'Container',
+ 'ContextManager',
+ 'Hashable',
+ 'ItemsView',
+ 'Iterable',
+ 'Iterator',
+ 'KeysView',
+ 'Mapping',
+ 'MappingView',
+ 'MutableMapping',
+ 'MutableSequence',
+ 'MutableSet',
+ 'Sequence',
+ 'Sized',
+ 'ValuesView',
+
+ # Structural checks, a.k.a. protocols.
+ 'Reversible',
+ 'SupportsAbs',
+ 'SupportsComplex',
+ 'SupportsFloat',
+ 'SupportsIndex',
+ 'SupportsInt',
+
+ # Concrete collection types.
+ 'Counter',
+ 'Deque',
+ 'Dict',
+ 'DefaultDict',
+ 'List',
+ 'Set',
+ 'FrozenSet',
+ 'NamedTuple', # Not really a type.
+ 'TypedDict', # Not really a type.
+ 'Generator',
+
+ # One-off things.
+ 'AnyStr',
+ 'cast',
+ 'final',
+ 'get_type_hints',
+ 'NewType',
+ 'no_type_check',
+ 'no_type_check_decorator',
+ 'NoReturn',
+ 'overload',
+ 'runtime_checkable',
+ 'Text',
+ 'TYPE_CHECKING',
+]
+
+# The pseudo-submodules 're' and 'io' are part of the public
+# namespace, but excluded from __all__ because they might stomp on
+# legitimate imports of those modules.
+
+
+def _qualname(x):
+ if sys.version_info[:2] >= (3, 3):
+ return x.__qualname__
+ else:
+ # Fall back to just name.
+ return x.__name__
+
+
+def _trim_name(nm):
+ whitelist = ('_TypeAlias', '_ForwardRef', '_TypingBase', '_FinalTypingBase')
+ if nm.startswith('_') and nm not in whitelist:
+ nm = nm[1:]
+ return nm
+
+
+class TypingMeta(type):
+ """Metaclass for most types defined in typing module
+ (not a part of public API).
+
+ This also defines a dummy constructor (all the work for most typing
+ constructs is done in __new__) and a nicer repr().
+ """
+
+ _is_protocol = False
+
+ def __new__(cls, name, bases, namespace):
+ return super(TypingMeta, cls).__new__(cls, str(name), bases, namespace)
+
+ @classmethod
+ def assert_no_subclassing(cls, bases):
+ for base in bases:
+ if isinstance(base, cls):
+ raise TypeError("Cannot subclass %s" %
+ (', '.join(map(_type_repr, bases)) or '()'))
+
+ def __init__(self, *args, **kwds):
+ pass
+
+ def _eval_type(self, globalns, localns):
+ """Override this in subclasses to interpret forward references.
+
+ For example, List['C'] is internally stored as
+ List[_ForwardRef('C')], which should evaluate to List[C],
+ where C is an object found in globalns or localns (searching
+ localns first, of course).
+ """
+ return self
+
+ def _get_type_vars(self, tvars):
+ pass
+
+ def __repr__(self):
+ qname = _trim_name(_qualname(self))
+ return '%s.%s' % (self.__module__, qname)
+
+
+class _TypingBase(object):
+ """Internal indicator of special typing constructs."""
+ __metaclass__ = TypingMeta
+ __slots__ = ('__weakref__',)
+
+ def __init__(self, *args, **kwds):
+ pass
+
+ def __new__(cls, *args, **kwds):
+ """Constructor.
+
+ This only exists to give a better error message in case
+ someone tries to subclass a special typing object (not a good idea).
+ """
+ if (len(args) == 3 and
+ isinstance(args[0], str) and
+ isinstance(args[1], tuple)):
+ # Close enough.
+ raise TypeError("Cannot subclass %r" % cls)
+ return super(_TypingBase, cls).__new__(cls)
+
+ # Things that are not classes also need these.
+ def _eval_type(self, globalns, localns):
+ return self
+
+ def _get_type_vars(self, tvars):
+ pass
+
+ def __repr__(self):
+ cls = type(self)
+ qname = _trim_name(_qualname(cls))
+ return '%s.%s' % (cls.__module__, qname)
+
+ def __call__(self, *args, **kwds):
+ raise TypeError("Cannot instantiate %r" % type(self))
+
+
+class _FinalTypingBase(_TypingBase):
+ """Internal mix-in class to prevent instantiation.
+
+ Prevents instantiation unless _root=True is given in class call.
+ It is used to create pseudo-singleton instances Any, Union, Optional, etc.
+ """
+
+ __slots__ = ()
+
+ def __new__(cls, *args, **kwds):
+ self = super(_FinalTypingBase, cls).__new__(cls, *args, **kwds)
+ if '_root' in kwds and kwds['_root'] is True:
+ return self
+ raise TypeError("Cannot instantiate %r" % cls)
+
+ def __reduce__(self):
+ return _trim_name(type(self).__name__)
+
+
+class _ForwardRef(_TypingBase):
+ """Internal wrapper to hold a forward reference."""
+
+ __slots__ = ('__forward_arg__', '__forward_code__',
+ '__forward_evaluated__', '__forward_value__')
+
+ def __init__(self, arg):
+ super(_ForwardRef, self).__init__(arg)
+ if not isinstance(arg, basestring):
+ raise TypeError('Forward reference must be a string -- got %r' % (arg,))
+ try:
+ code = compile(arg, '<string>', 'eval')
+ except SyntaxError:
+ raise SyntaxError('Forward reference must be an expression -- got %r' %
+ (arg,))
+ self.__forward_arg__ = arg
+ self.__forward_code__ = code
+ self.__forward_evaluated__ = False
+ self.__forward_value__ = None
+
+ def _eval_type(self, globalns, localns):
+ if not self.__forward_evaluated__ or localns is not globalns:
+ if globalns is None and localns is None:
+ globalns = localns = {}
+ elif globalns is None:
+ globalns = localns
+ elif localns is None:
+ localns = globalns
+ self.__forward_value__ = _type_check(
+ eval(self.__forward_code__, globalns, localns),
+ "Forward references must evaluate to types.")
+ self.__forward_evaluated__ = True
+ return self.__forward_value__
+
+ def __eq__(self, other):
+ if not isinstance(other, _ForwardRef):
+ return NotImplemented
+ return (self.__forward_arg__ == other.__forward_arg__ and
+ self.__forward_value__ == other.__forward_value__)
+
+ def __hash__(self):
+ return hash((self.__forward_arg__, self.__forward_value__))
+
+ def __instancecheck__(self, obj):
+ raise TypeError("Forward references cannot be used with isinstance().")
+
+ def __subclasscheck__(self, cls):
+ raise TypeError("Forward references cannot be used with issubclass().")
+
+ def __repr__(self):
+ return '_ForwardRef(%r)' % (self.__forward_arg__,)
+
+
+class _TypeAlias(_TypingBase):
+ """Internal helper class for defining generic variants of concrete types.
+
+ Note that this is not a type; let's call it a pseudo-type. It cannot
+ be used in instance and subclass checks in parameterized form, i.e.
+ ``isinstance(42, Match[str])`` raises ``TypeError`` instead of returning
+ ``False``.
+ """
+
+ __slots__ = ('name', 'type_var', 'impl_type', 'type_checker')
+
+ def __init__(self, name, type_var, impl_type, type_checker):
+ """Initializer.
+
+ Args:
+ name: The name, e.g. 'Pattern'.
+ type_var: The type parameter, e.g. AnyStr, or the
+ specific type, e.g. str.
+ impl_type: The implementation type.
+ type_checker: Function that takes an impl_type instance.
+ and returns a value that should be a type_var instance.
+ """
+ assert isinstance(name, basestring), repr(name)
+ assert isinstance(impl_type, type), repr(impl_type)
+ assert not isinstance(impl_type, TypingMeta), repr(impl_type)
+ assert isinstance(type_var, (type, _TypingBase)), repr(type_var)
+ self.name = name
+ self.type_var = type_var
+ self.impl_type = impl_type
+ self.type_checker = type_checker
+
+ def __repr__(self):
+ return "%s[%s]" % (self.name, _type_repr(self.type_var))
+
+ def __getitem__(self, parameter):
+ if not isinstance(self.type_var, TypeVar):
+ raise TypeError("%s cannot be further parameterized." % self)
+ if self.type_var.__constraints__ and isinstance(parameter, type):
+ if not issubclass(parameter, self.type_var.__constraints__):
+ raise TypeError("%s is not a valid substitution for %s." %
+ (parameter, self.type_var))
+ if isinstance(parameter, TypeVar) and parameter is not self.type_var:
+ raise TypeError("%s cannot be re-parameterized." % self)
+ return self.__class__(self.name, parameter,
+ self.impl_type, self.type_checker)
+
+ def __eq__(self, other):
+ if not isinstance(other, _TypeAlias):
+ return NotImplemented
+ return self.name == other.name and self.type_var == other.type_var
+
+ def __hash__(self):
+ return hash((self.name, self.type_var))
+
+ def __instancecheck__(self, obj):
+ if not isinstance(self.type_var, TypeVar):
+ raise TypeError("Parameterized type aliases cannot be used "
+ "with isinstance().")
+ return isinstance(obj, self.impl_type)
+
+ def __subclasscheck__(self, cls):
+ if not isinstance(self.type_var, TypeVar):
+ raise TypeError("Parameterized type aliases cannot be used "
+ "with issubclass().")
+ return issubclass(cls, self.impl_type)
+
+
+def _get_type_vars(types, tvars):
+ for t in types:
+ if isinstance(t, TypingMeta) or isinstance(t, _TypingBase):
+ t._get_type_vars(tvars)
+
+
+def _type_vars(types):
+ tvars = []
+ _get_type_vars(types, tvars)
+ return tuple(tvars)
+
+
+def _eval_type(t, globalns, localns):
+ if isinstance(t, TypingMeta) or isinstance(t, _TypingBase):
+ return t._eval_type(globalns, localns)
+ return t
+
+
+def _type_check(arg, msg):
+ """Check that the argument is a type, and return it (internal helper).
+
+ As a special case, accept None and return type(None) instead.
+ Also, _TypeAlias instances (e.g. Match, Pattern) are acceptable.
+
+ The msg argument is a human-readable error message, e.g.
+
+ "Union[arg, ...]: arg should be a type."
+
+ We append the repr() of the actual value (truncated to 100 chars).
+ """
+ if arg is None:
+ return type(None)
+ if isinstance(arg, basestring):
+ arg = _ForwardRef(arg)
+ if (
+ isinstance(arg, _TypingBase) and type(arg).__name__ == '_ClassVar' or
+ not isinstance(arg, (type, _TypingBase)) and not callable(arg)
+ ):
+ raise TypeError(msg + " Got %.100r." % (arg,))
+ # Bare Union etc. are not valid as type arguments
+ if (
+ type(arg).__name__ in ('_Union', '_Optional') and
+ not getattr(arg, '__origin__', None) or
+ isinstance(arg, TypingMeta) and arg._gorg in (Generic, Protocol)
+ ):
+ raise TypeError("Plain %s is not valid as type argument" % arg)
+ return arg
+
+
+def _type_repr(obj):
+ """Return the repr() of an object, special-casing types (internal helper).
+
+ If obj is a type, we return a shorter version than the default
+ type.__repr__, based on the module and qualified name, which is
+ typically enough to uniquely identify a type. For everything
+ else, we fall back on repr(obj).
+ """
+ if isinstance(obj, type) and not isinstance(obj, TypingMeta):
+ if obj.__module__ == '__builtin__':
+ return _qualname(obj)
+ return '%s.%s' % (obj.__module__, _qualname(obj))
+ if obj is Ellipsis:
+ return '...'
+ if isinstance(obj, types.FunctionType):
+ return obj.__name__
+ return repr(obj)
+
+
+class ClassVarMeta(TypingMeta):
+ """Metaclass for _ClassVar"""
+
+ def __new__(cls, name, bases, namespace):
+ cls.assert_no_subclassing(bases)
+ self = super(ClassVarMeta, cls).__new__(cls, name, bases, namespace)
+ return self
+
+
+class _ClassVar(_FinalTypingBase):
+ """Special type construct to mark class variables.
+
+ An annotation wrapped in ClassVar indicates that a given
+ attribute is intended to be used as a class variable and
+ should not be set on instances of that class. Usage::
+
+ class Starship:
+ stats = {} # type: ClassVar[Dict[str, int]] # class variable
+ damage = 10 # type: int # instance variable
+
+ ClassVar accepts only types and cannot be further subscribed.
+
+ Note that ClassVar is not a class itself, and should not
+ be used with isinstance() or issubclass().
+ """
+
+ __metaclass__ = ClassVarMeta
+ __slots__ = ('__type__',)
+
+ def __init__(self, tp=None, _root=False):
+ self.__type__ = tp
+
+ def __getitem__(self, item):
+ cls = type(self)
+ if self.__type__ is None:
+ return cls(_type_check(item,
+ '{} accepts only types.'.format(cls.__name__[1:])),
+ _root=True)
+ raise TypeError('{} cannot be further subscripted'
+ .format(cls.__name__[1:]))
+
+ def _eval_type(self, globalns, localns):
+ return type(self)(_eval_type(self.__type__, globalns, localns),
+ _root=True)
+
+ def __repr__(self):
+ r = super(_ClassVar, self).__repr__()
+ if self.__type__ is not None:
+ r += '[{}]'.format(_type_repr(self.__type__))
+ return r
+
+ def __hash__(self):
+ return hash((type(self).__name__, self.__type__))
+
+ def __eq__(self, other):
+ if not isinstance(other, _ClassVar):
+ return NotImplemented
+ if self.__type__ is not None:
+ return self.__type__ == other.__type__
+ return self is other
+
+
+ClassVar = _ClassVar(_root=True)
+
+
+class _FinalMeta(TypingMeta):
+ """Metaclass for _Final"""
+
+ def __new__(cls, name, bases, namespace):
+ cls.assert_no_subclassing(bases)
+ self = super(_FinalMeta, cls).__new__(cls, name, bases, namespace)
+ return self
+
+
+class _Final(_FinalTypingBase):
+ """A special typing construct to indicate that a name
+ cannot be re-assigned or overridden in a subclass.
+ For example:
+
+ MAX_SIZE: Final = 9000
+ MAX_SIZE += 1 # Error reported by type checker
+
+ class Connection:
+ TIMEOUT: Final[int] = 10
+ class FastConnector(Connection):
+ TIMEOUT = 1 # Error reported by type checker
+
+ There is no runtime checking of these properties.
+ """
+
+ __metaclass__ = _FinalMeta
+ __slots__ = ('__type__',)
+
+ def __init__(self, tp=None, **kwds):
+ self.__type__ = tp
+
+ def __getitem__(self, item):
+ cls = type(self)
+ if self.__type__ is None:
+ return cls(_type_check(item,
+ '{} accepts only single type.'.format(cls.__name__[1:])),
+ _root=True)
+ raise TypeError('{} cannot be further subscripted'
+ .format(cls.__name__[1:]))
+
+ def _eval_type(self, globalns, localns):
+ new_tp = _eval_type(self.__type__, globalns, localns)
+ if new_tp == self.__type__:
+ return self
+ return type(self)(new_tp, _root=True)
+
+ def __repr__(self):
+ r = super(_Final, self).__repr__()
+ if self.__type__ is not None:
+ r += '[{}]'.format(_type_repr(self.__type__))
+ return r
+
+ def __hash__(self):
+ return hash((type(self).__name__, self.__type__))
+
+ def __eq__(self, other):
+ if not isinstance(other, _Final):
+ return NotImplemented
+ if self.__type__ is not None:
+ return self.__type__ == other.__type__
+ return self is other
+
+
+Final = _Final(_root=True)
+
+
+def final(f):
+ """This decorator can be used to indicate to type checkers that
+ the decorated method cannot be overridden, and decorated class
+ cannot be subclassed. For example:
+
+ class Base:
+ @final
+ def done(self) -> None:
+ ...
+ class Sub(Base):
+ def done(self) -> None: # Error reported by type checker
+ ...
+ @final
+ class Leaf:
+ ...
+ class Other(Leaf): # Error reported by type checker
+ ...
+
+ There is no runtime checking of these properties.
+ """
+ return f
+
+
+class _LiteralMeta(TypingMeta):
+ """Metaclass for _Literal"""
+
+ def __new__(cls, name, bases, namespace):
+ cls.assert_no_subclassing(bases)
+ self = super(_LiteralMeta, cls).__new__(cls, name, bases, namespace)
+ return self
+
+
+class _Literal(_FinalTypingBase):
+ """A type that can be used to indicate to type checkers that the
+ corresponding value has a value literally equivalent to the
+ provided parameter. For example:
+
+ var: Literal[4] = 4
+
+ The type checker understands that 'var' is literally equal to the
+ value 4 and no other value.
+
+ Literal[...] cannot be subclassed. There is no runtime checking
+ verifying that the parameter is actually a value instead of a type.
+ """
+
+ __metaclass__ = _LiteralMeta
+ __slots__ = ('__values__',)
+
+ def __init__(self, values=None, **kwds):
+ self.__values__ = values
+
+ def __getitem__(self, item):
+ cls = type(self)
+ if self.__values__ is None:
+ if not isinstance(item, tuple):
+ item = (item,)
+ return cls(values=item,
+ _root=True)
+ raise TypeError('{} cannot be further subscripted'
+ .format(cls.__name__[1:]))
+
+ def _eval_type(self, globalns, localns):
+ return self
+
+ def __repr__(self):
+ r = super(_Literal, self).__repr__()
+ if self.__values__ is not None:
+ r += '[{}]'.format(', '.join(map(_type_repr, self.__values__)))
+ return r
+
+ def __hash__(self):
+ return hash((type(self).__name__, self.__values__))
+
+ def __eq__(self, other):
+ if not isinstance(other, _Literal):
+ return NotImplemented
+ if self.__values__ is not None:
+ return self.__values__ == other.__values__
+ return self is other
+
+
+Literal = _Literal(_root=True)
+
+
+class AnyMeta(TypingMeta):
+ """Metaclass for Any."""
+
+ def __new__(cls, name, bases, namespace):
+ cls.assert_no_subclassing(bases)
+ self = super(AnyMeta, cls).__new__(cls, name, bases, namespace)
+ return self
+
+
+class _Any(_FinalTypingBase):
+ """Special type indicating an unconstrained type.
+
+ - Any is compatible with every type.
+ - Any assumed to have all methods.
+ - All values assumed to be instances of Any.
+
+ Note that all the above statements are true from the point of view of
+ static type checkers. At runtime, Any should not be used with instance
+ or class checks.
+ """
+ __metaclass__ = AnyMeta
+ __slots__ = ()
+
+ def __instancecheck__(self, obj):
+ raise TypeError("Any cannot be used with isinstance().")
+
+ def __subclasscheck__(self, cls):
+ raise TypeError("Any cannot be used with issubclass().")
+
+
+Any = _Any(_root=True)
+
+
+class NoReturnMeta(TypingMeta):
+ """Metaclass for NoReturn."""
+
+ def __new__(cls, name, bases, namespace):
+ cls.assert_no_subclassing(bases)
+ self = super(NoReturnMeta, cls).__new__(cls, name, bases, namespace)
+ return self
+
+
+class _NoReturn(_FinalTypingBase):
+ """Special type indicating functions that never return.
+ Example::
+
+ from typing import NoReturn
+
+ def stop() -> NoReturn:
+ raise Exception('no way')
+
+ This type is invalid in other positions, e.g., ``List[NoReturn]``
+ will fail in static type checkers.
+ """
+ __metaclass__ = NoReturnMeta
+ __slots__ = ()
+
+ def __instancecheck__(self, obj):
+ raise TypeError("NoReturn cannot be used with isinstance().")
+
+ def __subclasscheck__(self, cls):
+ raise TypeError("NoReturn cannot be used with issubclass().")
+
+
+NoReturn = _NoReturn(_root=True)
+
+
+class TypeVarMeta(TypingMeta):
+ def __new__(cls, name, bases, namespace):
+ cls.assert_no_subclassing(bases)
+ return super(TypeVarMeta, cls).__new__(cls, name, bases, namespace)
+
+
+class TypeVar(_TypingBase):
+ """Type variable.
+
+ Usage::
+
+ T = TypeVar('T') # Can be anything
+ A = TypeVar('A', str, bytes) # Must be str or bytes
+
+ Type variables exist primarily for the benefit of static type
+ checkers. They serve as the parameters for generic types as well
+ as for generic function definitions. See class Generic for more
+ information on generic types. Generic functions work as follows:
+
+ def repeat(x: T, n: int) -> List[T]:
+ '''Return a list containing n references to x.'''
+ return [x]*n
+
+ def longest(x: A, y: A) -> A:
+ '''Return the longest of two strings.'''
+ return x if len(x) >= len(y) else y
+
+ The latter example's signature is essentially the overloading
+ of (str, str) -> str and (bytes, bytes) -> bytes. Also note
+ that if the arguments are instances of some subclass of str,
+ the return type is still plain str.
+
+ At runtime, isinstance(x, T) and issubclass(C, T) will raise TypeError.
+
+ Type variables defined with covariant=True or contravariant=True
+ can be used do declare covariant or contravariant generic types.
+ See PEP 484 for more details. By default generic types are invariant
+ in all type variables.
+
+ Type variables can be introspected. e.g.:
+
+ T.__name__ == 'T'
+ T.__constraints__ == ()
+ T.__covariant__ == False
+ T.__contravariant__ = False
+ A.__constraints__ == (str, bytes)
+ """
+
+ __metaclass__ = TypeVarMeta
+ __slots__ = ('__name__', '__bound__', '__constraints__',
+ '__covariant__', '__contravariant__')
+
+ def __init__(self, name, *constraints, **kwargs):
+ super(TypeVar, self).__init__(name, *constraints, **kwargs)
+ bound = kwargs.get('bound', None)
+ covariant = kwargs.get('covariant', False)
+ contravariant = kwargs.get('contravariant', False)
+ self.__name__ = name
+ if covariant and contravariant:
+ raise ValueError("Bivariant types are not supported.")
+ self.__covariant__ = bool(covariant)
+ self.__contravariant__ = bool(contravariant)
+ if constraints and bound is not None:
+ raise TypeError("Constraints cannot be combined with bound=...")
+ if constraints and len(constraints) == 1:
+ raise TypeError("A single constraint is not allowed")
+ msg = "TypeVar(name, constraint, ...): constraints must be types."
+ self.__constraints__ = tuple(_type_check(t, msg) for t in constraints)
+ if bound:
+ self.__bound__ = _type_check(bound, "Bound must be a type.")
+ else:
+ self.__bound__ = None
+
+ def _get_type_vars(self, tvars):
+ if self not in tvars:
+ tvars.append(self)
+
+ def __repr__(self):
+ if self.__covariant__:
+ prefix = '+'
+ elif self.__contravariant__:
+ prefix = '-'
+ else:
+ prefix = '~'
+ return prefix + self.__name__
+
+ def __instancecheck__(self, instance):
+ raise TypeError("Type variables cannot be used with isinstance().")
+
+ def __subclasscheck__(self, cls):
+ raise TypeError("Type variables cannot be used with issubclass().")
+
+
+# Some unconstrained type variables. These are used by the container types.
+# (These are not for export.)
+T = TypeVar('T') # Any type.
+KT = TypeVar('KT') # Key type.
+VT = TypeVar('VT') # Value type.
+T_co = TypeVar('T_co', covariant=True) # Any type covariant containers.
+V_co = TypeVar('V_co', covariant=True) # Any type covariant containers.
+VT_co = TypeVar('VT_co', covariant=True) # Value type covariant containers.
+T_contra = TypeVar('T_contra', contravariant=True) # Ditto contravariant.
+
+# A useful type variable with constraints. This represents string types.
+# (This one *is* for export!)
+AnyStr = TypeVar('AnyStr', bytes, unicode)
+
+
+def _replace_arg(arg, tvars, args):
+ """An internal helper function: replace arg if it is a type variable
+ found in tvars with corresponding substitution from args or
+ with corresponding substitution sub-tree if arg is a generic type.
+ """
+
+ if tvars is None:
+ tvars = []
+ if hasattr(arg, '_subs_tree') and isinstance(arg, (GenericMeta, _TypingBase)):
+ return arg._subs_tree(tvars, args)
+ if isinstance(arg, TypeVar):
+ for i, tvar in enumerate(tvars):
+ if arg == tvar:
+ return args[i]
+ return arg
+
+
+# Special typing constructs Union, Optional, Generic, Callable and Tuple
+# use three special attributes for internal bookkeeping of generic types:
+# * __parameters__ is a tuple of unique free type parameters of a generic
+# type, for example, Dict[T, T].__parameters__ == (T,);
+# * __origin__ keeps a reference to a type that was subscripted,
+# e.g., Union[T, int].__origin__ == Union;
+# * __args__ is a tuple of all arguments used in subscripting,
+# e.g., Dict[T, int].__args__ == (T, int).
+
+
+def _subs_tree(cls, tvars=None, args=None):
+ """An internal helper function: calculate substitution tree
+ for generic cls after replacing its type parameters with
+ substitutions in tvars -> args (if any).
+ Repeat the same following __origin__'s.
+
+ Return a list of arguments with all possible substitutions
+ performed. Arguments that are generic classes themselves are represented
+ as tuples (so that no new classes are created by this function).
+ For example: _subs_tree(List[Tuple[int, T]][str]) == [(Tuple, int, str)]
+ """
+
+ if cls.__origin__ is None:
+ return cls
+ # Make of chain of origins (i.e. cls -> cls.__origin__)
+ current = cls.__origin__
+ orig_chain = []
+ while current.__origin__ is not None:
+ orig_chain.append(current)
+ current = current.__origin__
+ # Replace type variables in __args__ if asked ...
+ tree_args = []
+ for arg in cls.__args__:
+ tree_args.append(_replace_arg(arg, tvars, args))
+ # ... then continue replacing down the origin chain.
+ for ocls in orig_chain:
+ new_tree_args = []
+ for arg in ocls.__args__:
+ new_tree_args.append(_replace_arg(arg, ocls.__parameters__, tree_args))
+ tree_args = new_tree_args
+ return tree_args
+
+
+def _remove_dups_flatten(parameters):
+ """An internal helper for Union creation and substitution: flatten Union's
+ among parameters, then remove duplicates and strict subclasses.
+ """
+
+ # Flatten out Union[Union[...], ...].
+ params = []
+ for p in parameters:
+ if isinstance(p, _Union) and p.__origin__ is Union:
+ params.extend(p.__args__)
+ elif isinstance(p, tuple) and len(p) > 0 and p[0] is Union:
+ params.extend(p[1:])
+ else:
+ params.append(p)
+ # Weed out strict duplicates, preserving the first of each occurrence.
+ all_params = set(params)
+ if len(all_params) < len(params):
+ new_params = []
+ for t in params:
+ if t in all_params:
+ new_params.append(t)
+ all_params.remove(t)
+ params = new_params
+ assert not all_params, all_params
+ # Weed out subclasses.
+ # E.g. Union[int, Employee, Manager] == Union[int, Employee].
+ # If object is present it will be sole survivor among proper classes.
+ # Never discard type variables.
+ # (In particular, Union[str, AnyStr] != AnyStr.)
+ all_params = set(params)
+ for t1 in params:
+ if not isinstance(t1, type):
+ continue
+ if any(isinstance(t2, type) and issubclass(t1, t2)
+ for t2 in all_params - {t1}
+ if not (isinstance(t2, GenericMeta) and
+ t2.__origin__ is not None)):
+ all_params.remove(t1)
+ return tuple(t for t in params if t in all_params)
+
+
+def _check_generic(cls, parameters):
+ # Check correct count for parameters of a generic cls (internal helper).
+ if not cls.__parameters__:
+ raise TypeError("%s is not a generic class" % repr(cls))
+ alen = len(parameters)
+ elen = len(cls.__parameters__)
+ if alen != elen:
+ raise TypeError("Too %s parameters for %s; actual %s, expected %s" %
+ ("many" if alen > elen else "few", repr(cls), alen, elen))
+
+
+_cleanups = []
+
+
+def _tp_cache(func):
+ maxsize = 128
+ cache = {}
+ _cleanups.append(cache.clear)
+
+ @functools.wraps(func)
+ def inner(*args):
+ key = args
+ try:
+ return cache[key]
+ except TypeError:
+ # Assume it's an unhashable argument.
+ return func(*args)
+ except KeyError:
+ value = func(*args)
+ if len(cache) >= maxsize:
+ # If the cache grows too much, just start over.
+ cache.clear()
+ cache[key] = value
+ return value
+
+ return inner
+
+
+class UnionMeta(TypingMeta):
+ """Metaclass for Union."""
+
+ def __new__(cls, name, bases, namespace):
+ cls.assert_no_subclassing(bases)
+ return super(UnionMeta, cls).__new__(cls, name, bases, namespace)
+
+
+class _Union(_FinalTypingBase):
+ """Union type; Union[X, Y] means either X or Y.
+
+ To define a union, use e.g. Union[int, str]. Details:
+
+ - The arguments must be types and there must be at least one.
+
+ - None as an argument is a special case and is replaced by
+ type(None).
+
+ - Unions of unions are flattened, e.g.::
+
+ Union[Union[int, str], float] == Union[int, str, float]
+
+ - Unions of a single argument vanish, e.g.::
+
+ Union[int] == int # The constructor actually returns int
+
+ - Redundant arguments are skipped, e.g.::
+
+ Union[int, str, int] == Union[int, str]
+
+ - When comparing unions, the argument order is ignored, e.g.::
+
+ Union[int, str] == Union[str, int]
+
+ - When two arguments have a subclass relationship, the least
+ derived argument is kept, e.g.::
+
+ class Employee: pass
+ class Manager(Employee): pass
+ Union[int, Employee, Manager] == Union[int, Employee]
+ Union[Manager, int, Employee] == Union[int, Employee]
+ Union[Employee, Manager] == Employee
+
+ - Similar for object::
+
+ Union[int, object] == object
+
+ - You cannot subclass or instantiate a union.
+
+ - You can use Optional[X] as a shorthand for Union[X, None].
+ """
+
+ __metaclass__ = UnionMeta
+ __slots__ = ('__parameters__', '__args__', '__origin__', '__tree_hash__')
+
+ def __new__(cls, parameters=None, origin=None, *args, **kwds):
+ self = super(_Union, cls).__new__(cls, parameters, origin, *args, **kwds)
+ if origin is None:
+ self.__parameters__ = None
+ self.__args__ = None
+ self.__origin__ = None
+ self.__tree_hash__ = hash(frozenset(('Union',)))
+ return self
+ if not isinstance(parameters, tuple):
+ raise TypeError("Expected parameters=<tuple>")
+ if origin is Union:
+ parameters = _remove_dups_flatten(parameters)
+ # It's not a union if there's only one type left.
+ if len(parameters) == 1:
+ return parameters[0]
+ self.__parameters__ = _type_vars(parameters)
+ self.__args__ = parameters
+ self.__origin__ = origin
+ # Pre-calculate the __hash__ on instantiation.
+ # This improves speed for complex substitutions.
+ subs_tree = self._subs_tree()
+ if isinstance(subs_tree, tuple):
+ self.__tree_hash__ = hash(frozenset(subs_tree))
+ else:
+ self.__tree_hash__ = hash(subs_tree)
+ return self
+
+ def _eval_type(self, globalns, localns):
+ if self.__args__ is None:
+ return self
+ ev_args = tuple(_eval_type(t, globalns, localns) for t in self.__args__)
+ ev_origin = _eval_type(self.__origin__, globalns, localns)
+ if ev_args == self.__args__ and ev_origin == self.__origin__:
+ # Everything is already evaluated.
+ return self
+ return self.__class__(ev_args, ev_origin, _root=True)
+
+ def _get_type_vars(self, tvars):
+ if self.__origin__ and self.__parameters__:
+ _get_type_vars(self.__parameters__, tvars)
+
+ def __repr__(self):
+ if self.__origin__ is None:
+ return super(_Union, self).__repr__()
+ tree = self._subs_tree()
+ if not isinstance(tree, tuple):
+ return repr(tree)
+ return tree[0]._tree_repr(tree)
+
+ def _tree_repr(self, tree):
+ arg_list = []
+ for arg in tree[1:]:
+ if not isinstance(arg, tuple):
+ arg_list.append(_type_repr(arg))
+ else:
+ arg_list.append(arg[0]._tree_repr(arg))
+ return super(_Union, self).__repr__() + '[%s]' % ', '.join(arg_list)
+
+ @_tp_cache
+ def __getitem__(self, parameters):
+ if parameters == ():
+ raise TypeError("Cannot take a Union of no types.")
+ if not isinstance(parameters, tuple):
+ parameters = (parameters,)
+ if self.__origin__ is None:
+ msg = "Union[arg, ...]: each arg must be a type."
+ else:
+ msg = "Parameters to generic types must be types."
+ parameters = tuple(_type_check(p, msg) for p in parameters)
+ if self is not Union:
+ _check_generic(self, parameters)
+ return self.__class__(parameters, origin=self, _root=True)
+
+ def _subs_tree(self, tvars=None, args=None):
+ if self is Union:
+ return Union # Nothing to substitute
+ tree_args = _subs_tree(self, tvars, args)
+ tree_args = _remove_dups_flatten(tree_args)
+ if len(tree_args) == 1:
+ return tree_args[0] # Union of a single type is that type
+ return (Union,) + tree_args
+
+ def __eq__(self, other):
+ if isinstance(other, _Union):
+ return self.__tree_hash__ == other.__tree_hash__
+ elif self is not Union:
+ return self._subs_tree() == other
+ else:
+ return self is other
+
+ def __hash__(self):
+ return self.__tree_hash__
+
+ def __instancecheck__(self, obj):
+ raise TypeError("Unions cannot be used with isinstance().")
+
+ def __subclasscheck__(self, cls):
+ raise TypeError("Unions cannot be used with issubclass().")
+
+
+Union = _Union(_root=True)
+
+
+class OptionalMeta(TypingMeta):
+ """Metaclass for Optional."""
+
+ def __new__(cls, name, bases, namespace):
+ cls.assert_no_subclassing(bases)
+ return super(OptionalMeta, cls).__new__(cls, name, bases, namespace)
+
+
+class _Optional(_FinalTypingBase):
+ """Optional type.
+
+ Optional[X] is equivalent to Union[X, None].
+ """
+
+ __metaclass__ = OptionalMeta
+ __slots__ = ()
+
+ @_tp_cache
+ def __getitem__(self, arg):
+ arg = _type_check(arg, "Optional[t] requires a single type.")
+ return Union[arg, type(None)]
+
+
+Optional = _Optional(_root=True)
+
+
+def _next_in_mro(cls):
+ """Helper for Generic.__new__.
+
+ Returns the class after the last occurrence of Generic or
+ Generic[...] in cls.__mro__.
+ """
+ next_in_mro = object
+ # Look for the last occurrence of Generic or Generic[...].
+ for i, c in enumerate(cls.__mro__[:-1]):
+ if isinstance(c, GenericMeta) and c._gorg is Generic:
+ next_in_mro = cls.__mro__[i + 1]
+ return next_in_mro
+
+
+def _make_subclasshook(cls):
+ """Construct a __subclasshook__ callable that incorporates
+ the associated __extra__ class in subclass checks performed
+ against cls.
+ """
+ if isinstance(cls.__extra__, abc.ABCMeta):
+ # The logic mirrors that of ABCMeta.__subclasscheck__.
+ # Registered classes need not be checked here because
+ # cls and its extra share the same _abc_registry.
+ def __extrahook__(cls, subclass):
+ res = cls.__extra__.__subclasshook__(subclass)
+ if res is not NotImplemented:
+ return res
+ if cls.__extra__ in getattr(subclass, '__mro__', ()):
+ return True
+ for scls in cls.__extra__.__subclasses__():
+ if isinstance(scls, GenericMeta):
+ continue
+ if issubclass(subclass, scls):
+ return True
+ return NotImplemented
+ else:
+ # For non-ABC extras we'll just call issubclass().
+ def __extrahook__(cls, subclass):
+ if cls.__extra__ and issubclass(subclass, cls.__extra__):
+ return True
+ return NotImplemented
+ return classmethod(__extrahook__)
+
+
+class GenericMeta(TypingMeta, abc.ABCMeta):
+ """Metaclass for generic types.
+
+ This is a metaclass for typing.Generic and generic ABCs defined in
+ typing module. User defined subclasses of GenericMeta can override
+ __new__ and invoke super().__new__. Note that GenericMeta.__new__
+ has strict rules on what is allowed in its bases argument:
+ * plain Generic is disallowed in bases;
+ * Generic[...] should appear in bases at most once;
+ * if Generic[...] is present, then it should list all type variables
+ that appear in other bases.
+ In addition, type of all generic bases is erased, e.g., C[int] is
+ stripped to plain C.
+ """
+
+ def __new__(cls, name, bases, namespace,
+ tvars=None, args=None, origin=None, extra=None, orig_bases=None):
+ """Create a new generic class. GenericMeta.__new__ accepts
+ keyword arguments that are used for internal bookkeeping, therefore
+ an override should pass unused keyword arguments to super().
+ """
+ if tvars is not None:
+ # Called from __getitem__() below.
+ assert origin is not None
+ assert all(isinstance(t, TypeVar) for t in tvars), tvars
+ else:
+ # Called from class statement.
+ assert tvars is None, tvars
+ assert args is None, args
+ assert origin is None, origin
+
+ # Get the full set of tvars from the bases.
+ tvars = _type_vars(bases)
+ # Look for Generic[T1, ..., Tn].
+ # If found, tvars must be a subset of it.
+ # If not found, tvars is it.
+ # Also check for and reject plain Generic,
+ # and reject multiple Generic[...].
+ gvars = None
+ for base in bases:
+ if base is Generic:
+ raise TypeError("Cannot inherit from plain Generic")
+ if (isinstance(base, GenericMeta) and
+ base.__origin__ in (Generic, Protocol)):
+ if gvars is not None:
+ raise TypeError(
+ "Cannot inherit from Generic[...] or"
+ " Protocol[...] multiple times.")
+ gvars = base.__parameters__
+ if gvars is None:
+ gvars = tvars
+ else:
+ tvarset = set(tvars)
+ gvarset = set(gvars)
+ if not tvarset <= gvarset:
+ raise TypeError(
+ "Some type variables (%s) "
+ "are not listed in %s[%s]" %
+ (", ".join(str(t) for t in tvars if t not in gvarset),
+ "Generic" if any(b.__origin__ is Generic
+ for b in bases) else "Protocol",
+ ", ".join(str(g) for g in gvars)))
+ tvars = gvars
+
+ initial_bases = bases
+ if extra is None:
+ extra = namespace.get('__extra__')
+ if extra is not None and type(extra) is abc.ABCMeta and extra not in bases:
+ bases = (extra,) + bases
+ bases = tuple(b._gorg if isinstance(b, GenericMeta) else b for b in bases)
+
+ # remove bare Generic from bases if there are other generic bases
+ if any(isinstance(b, GenericMeta) and b is not Generic for b in bases):
+ bases = tuple(b for b in bases if b is not Generic)
+ namespace.update({'__origin__': origin, '__extra__': extra})
+ self = super(GenericMeta, cls).__new__(cls, name, bases, namespace)
+ super(GenericMeta, self).__setattr__('_gorg',
+ self if not origin else origin._gorg)
+
+ self.__parameters__ = tvars
+ # Be prepared that GenericMeta will be subclassed by TupleMeta
+ # and CallableMeta, those two allow ..., (), or [] in __args___.
+ self.__args__ = tuple(Ellipsis if a is _TypingEllipsis else
+ () if a is _TypingEmpty else
+ a for a in args) if args else None
+ # Speed hack (https://github.com/python/typing/issues/196).
+ self.__next_in_mro__ = _next_in_mro(self)
+ # Preserve base classes on subclassing (__bases__ are type erased now).
+ if orig_bases is None:
+ self.__orig_bases__ = initial_bases
+
+ # This allows unparameterized generic collections to be used
+ # with issubclass() and isinstance() in the same way as their
+ # collections.abc counterparts (e.g., isinstance([], Iterable)).
+ if (
+ '__subclasshook__' not in namespace and extra or
+ # allow overriding
+ getattr(self.__subclasshook__, '__name__', '') == '__extrahook__'
+ ):
+ self.__subclasshook__ = _make_subclasshook(self)
+
+ if origin and hasattr(origin, '__qualname__'): # Fix for Python 3.2.
+ self.__qualname__ = origin.__qualname__
+ self.__tree_hash__ = (hash(self._subs_tree()) if origin else
+ super(GenericMeta, self).__hash__())
+ return self
+
+ def __init__(self, *args, **kwargs):
+ super(GenericMeta, self).__init__(*args, **kwargs)
+ if isinstance(self.__extra__, abc.ABCMeta):
+ self._abc_registry = self.__extra__._abc_registry
+ self._abc_cache = self.__extra__._abc_cache
+ elif self.__origin__ is not None:
+ self._abc_registry = self.__origin__._abc_registry
+ self._abc_cache = self.__origin__._abc_cache
+
+ # _abc_negative_cache and _abc_negative_cache_version
+ # realised as descriptors, since GenClass[t1, t2, ...] always
+ # share subclass info with GenClass.
+ # This is an important memory optimization.
+ @property
+ def _abc_negative_cache(self):
+ if isinstance(self.__extra__, abc.ABCMeta):
+ return self.__extra__._abc_negative_cache
+ return self._gorg._abc_generic_negative_cache
+
+ @_abc_negative_cache.setter
+ def _abc_negative_cache(self, value):
+ if self.__origin__ is None:
+ if isinstance(self.__extra__, abc.ABCMeta):
+ self.__extra__._abc_negative_cache = value
+ else:
+ self._abc_generic_negative_cache = value
+
+ @property
+ def _abc_negative_cache_version(self):
+ if isinstance(self.__extra__, abc.ABCMeta):
+ return self.__extra__._abc_negative_cache_version
+ return self._gorg._abc_generic_negative_cache_version
+
+ @_abc_negative_cache_version.setter
+ def _abc_negative_cache_version(self, value):
+ if self.__origin__ is None:
+ if isinstance(self.__extra__, abc.ABCMeta):
+ self.__extra__._abc_negative_cache_version = value
+ else:
+ self._abc_generic_negative_cache_version = value
+
+ def _get_type_vars(self, tvars):
+ if self.__origin__ and self.__parameters__:
+ _get_type_vars(self.__parameters__, tvars)
+
+ def _eval_type(self, globalns, localns):
+ ev_origin = (self.__origin__._eval_type(globalns, localns)
+ if self.__origin__ else None)
+ ev_args = tuple(_eval_type(a, globalns, localns) for a
+ in self.__args__) if self.__args__ else None
+ if ev_origin == self.__origin__ and ev_args == self.__args__:
+ return self
+ return self.__class__(self.__name__,
+ self.__bases__,
+ dict(self.__dict__),
+ tvars=_type_vars(ev_args) if ev_args else None,
+ args=ev_args,
+ origin=ev_origin,
+ extra=self.__extra__,
+ orig_bases=self.__orig_bases__)
+
+ def __repr__(self):
+ if self.__origin__ is None:
+ return super(GenericMeta, self).__repr__()
+ return self._tree_repr(self._subs_tree())
+
+ def _tree_repr(self, tree):
+ arg_list = []
+ for arg in tree[1:]:
+ if arg == ():
+ arg_list.append('()')
+ elif not isinstance(arg, tuple):
+ arg_list.append(_type_repr(arg))
+ else:
+ arg_list.append(arg[0]._tree_repr(arg))
+ return super(GenericMeta, self).__repr__() + '[%s]' % ', '.join(arg_list)
+
+ def _subs_tree(self, tvars=None, args=None):
+ if self.__origin__ is None:
+ return self
+ tree_args = _subs_tree(self, tvars, args)
+ return (self._gorg,) + tuple(tree_args)
+
+ def __eq__(self, other):
+ if not isinstance(other, GenericMeta):
+ return NotImplemented
+ if self.__origin__ is None or other.__origin__ is None:
+ return self is other
+ return self.__tree_hash__ == other.__tree_hash__
+
+ def __hash__(self):
+ return self.__tree_hash__
+
+ @_tp_cache
+ def __getitem__(self, params):
+ if not isinstance(params, tuple):
+ params = (params,)
+ if not params and self._gorg is not Tuple:
+ raise TypeError(
+ "Parameter list to %s[...] cannot be empty" % _qualname(self))
+ msg = "Parameters to generic types must be types."
+ params = tuple(_type_check(p, msg) for p in params)
+ if self in (Generic, Protocol):
+ # Generic can only be subscripted with unique type variables.
+ if not all(isinstance(p, TypeVar) for p in params):
+ raise TypeError(
+ "Parameters to %s[...] must all be type variables" % self.__name__)
+ if len(set(params)) != len(params):
+ raise TypeError(
+ "Parameters to %s[...] must all be unique" % self.__name__)
+ tvars = params
+ args = params
+ elif self in (Tuple, Callable):
+ tvars = _type_vars(params)
+ args = params
+ elif self.__origin__ in (Generic, Protocol):
+ # Can't subscript Generic[...] or Protocol[...].
+ raise TypeError("Cannot subscript already-subscripted %s" %
+ repr(self))
+ else:
+ # Subscripting a regular Generic subclass.
+ _check_generic(self, params)
+ tvars = _type_vars(params)
+ args = params
+
+ prepend = (self,) if self.__origin__ is None else ()
+ return self.__class__(self.__name__,
+ prepend + self.__bases__,
+ dict(self.__dict__),
+ tvars=tvars,
+ args=args,
+ origin=self,
+ extra=self.__extra__,
+ orig_bases=self.__orig_bases__)
+
+ def __subclasscheck__(self, cls):
+ if self.__origin__ is not None:
+ # These should only be modules within the standard library.
+ # singledispatch is an exception, because it's a Python 2 backport
+ # of functools.singledispatch.
+ whitelist = ['abc', 'functools', 'singledispatch']
+ if (sys._getframe(1).f_globals['__name__'] in whitelist or
+ # The second frame is needed for the case where we came
+ # from _ProtocolMeta.__subclasscheck__.
+ sys._getframe(2).f_globals['__name__'] in whitelist):
+ return False
+ raise TypeError("Parameterized generics cannot be used with class "
+ "or instance checks")
+ if self is Generic:
+ raise TypeError("Class %r cannot be used with class "
+ "or instance checks" % self)
+ return super(GenericMeta, self).__subclasscheck__(cls)
+
+ def __instancecheck__(self, instance):
+ # Since we extend ABC.__subclasscheck__ and
+ # ABC.__instancecheck__ inlines the cache checking done by the
+ # latter, we must extend __instancecheck__ too. For simplicity
+ # we just skip the cache check -- instance checks for generic
+ # classes are supposed to be rare anyways.
+ if hasattr(instance, "__class__"):
+ return issubclass(instance.__class__, self)
+ return False
+
+ def __setattr__(self, attr, value):
+ # We consider all the subscripted genrics as proxies for original class
+ if (
+ attr.startswith('__') and attr.endswith('__') or
+ attr.startswith('_abc_')
+ ):
+ super(GenericMeta, self).__setattr__(attr, value)
+ else:
+ super(GenericMeta, self._gorg).__setattr__(attr, value)
+
+
+def _copy_generic(self):
+ """Hack to work around https://bugs.python.org/issue11480 on Python 2"""
+ return self.__class__(self.__name__, self.__bases__, dict(self.__dict__),
+ self.__parameters__, self.__args__, self.__origin__,
+ self.__extra__, self.__orig_bases__)
+
+
+copy._copy_dispatch[GenericMeta] = _copy_generic
+
+
+# Prevent checks for Generic to crash when defining Generic.
+Generic = None
+
+
+def _generic_new(base_cls, cls, *args, **kwds):
+ # Assure type is erased on instantiation,
+ # but attempt to store it in __orig_class__
+ if cls.__origin__ is None:
+ if (base_cls.__new__ is object.__new__ and
+ cls.__init__ is not object.__init__):
+ return base_cls.__new__(cls)
+ else:
+ return base_cls.__new__(cls, *args, **kwds)
+ else:
+ origin = cls._gorg
+ if (base_cls.__new__ is object.__new__ and
+ cls.__init__ is not object.__init__):
+ obj = base_cls.__new__(origin)
+ else:
+ obj = base_cls.__new__(origin, *args, **kwds)
+ try:
+ obj.__orig_class__ = cls
+ except AttributeError:
+ pass
+ obj.__init__(*args, **kwds)
+ return obj
+
+
+class Generic(object):
+ """Abstract base class for generic types.
+
+ A generic type is typically declared by inheriting from
+ this class parameterized with one or more type variables.
+ For example, a generic mapping type might be defined as::
+
+ class Mapping(Generic[KT, VT]):
+ def __getitem__(self, key: KT) -> VT:
+ ...
+ # Etc.
+
+ This class can then be used as follows::
+
+ def lookup_name(mapping: Mapping[KT, VT], key: KT, default: VT) -> VT:
+ try:
+ return mapping[key]
+ except KeyError:
+ return default
+ """
+
+ __metaclass__ = GenericMeta
+ __slots__ = ()
+
+ def __new__(cls, *args, **kwds):
+ if cls._gorg is Generic:
+ raise TypeError("Type Generic cannot be instantiated; "
+ "it can be used only as a base class")
+ return _generic_new(cls.__next_in_mro__, cls, *args, **kwds)
+
+
+class _TypingEmpty(object):
+ """Internal placeholder for () or []. Used by TupleMeta and CallableMeta
+ to allow empty list/tuple in specific places, without allowing them
+ to sneak in where prohibited.
+ """
+
+
+class _TypingEllipsis(object):
+ """Internal placeholder for ... (ellipsis)."""
+
+
+class TupleMeta(GenericMeta):
+ """Metaclass for Tuple (internal)."""
+
+ @_tp_cache
+ def __getitem__(self, parameters):
+ if self.__origin__ is not None or self._gorg is not Tuple:
+ # Normal generic rules apply if this is not the first subscription
+ # or a subscription of a subclass.
+ return super(TupleMeta, self).__getitem__(parameters)
+ if parameters == ():
+ return super(TupleMeta, self).__getitem__((_TypingEmpty,))
+ if not isinstance(parameters, tuple):
+ parameters = (parameters,)
+ if len(parameters) == 2 and parameters[1] is Ellipsis:
+ msg = "Tuple[t, ...]: t must be a type."
+ p = _type_check(parameters[0], msg)
+ return super(TupleMeta, self).__getitem__((p, _TypingEllipsis))
+ msg = "Tuple[t0, t1, ...]: each t must be a type."
+ parameters = tuple(_type_check(p, msg) for p in parameters)
+ return super(TupleMeta, self).__getitem__(parameters)
+
+ def __instancecheck__(self, obj):
+ if self.__args__ is None:
+ return isinstance(obj, tuple)
+ raise TypeError("Parameterized Tuple cannot be used "
+ "with isinstance().")
+
+ def __subclasscheck__(self, cls):
+ if self.__args__ is None:
+ return issubclass(cls, tuple)
+ raise TypeError("Parameterized Tuple cannot be used "
+ "with issubclass().")
+
+
+copy._copy_dispatch[TupleMeta] = _copy_generic
+
+
+class Tuple(tuple):
+ """Tuple type; Tuple[X, Y] is the cross-product type of X and Y.
+
+ Example: Tuple[T1, T2] is a tuple of two elements corresponding
+ to type variables T1 and T2. Tuple[int, float, str] is a tuple
+ of an int, a float and a string.
+
+ To specify a variable-length tuple of homogeneous type, use Tuple[T, ...].
+ """
+
+ __metaclass__ = TupleMeta
+ __extra__ = tuple
+ __slots__ = ()
+
+ def __new__(cls, *args, **kwds):
+ if cls._gorg is Tuple:
+ raise TypeError("Type Tuple cannot be instantiated; "
+ "use tuple() instead")
+ return _generic_new(tuple, cls, *args, **kwds)
+
+
+class CallableMeta(GenericMeta):
+ """ Metaclass for Callable."""
+
+ def __repr__(self):
+ if self.__origin__ is None:
+ return super(CallableMeta, self).__repr__()
+ return self._tree_repr(self._subs_tree())
+
+ def _tree_repr(self, tree):
+ if self._gorg is not Callable:
+ return super(CallableMeta, self)._tree_repr(tree)
+ # For actual Callable (not its subclass) we override
+ # super(CallableMeta, self)._tree_repr() for nice formatting.
+ arg_list = []
+ for arg in tree[1:]:
+ if not isinstance(arg, tuple):
+ arg_list.append(_type_repr(arg))
+ else:
+ arg_list.append(arg[0]._tree_repr(arg))
+ if arg_list[0] == '...':
+ return repr(tree[0]) + '[..., %s]' % arg_list[1]
+ return (repr(tree[0]) +
+ '[[%s], %s]' % (', '.join(arg_list[:-1]), arg_list[-1]))
+
+ def __getitem__(self, parameters):
+ """A thin wrapper around __getitem_inner__ to provide the latter
+ with hashable arguments to improve speed.
+ """
+
+ if self.__origin__ is not None or self._gorg is not Callable:
+ return super(CallableMeta, self).__getitem__(parameters)
+ if not isinstance(parameters, tuple) or len(parameters) != 2:
+ raise TypeError("Callable must be used as "
+ "Callable[[arg, ...], result].")
+ args, result = parameters
+ if args is Ellipsis:
+ parameters = (Ellipsis, result)
+ else:
+ if not isinstance(args, list):
+ raise TypeError("Callable[args, result]: args must be a list."
+ " Got %.100r." % (args,))
+ parameters = (tuple(args), result)
+ return self.__getitem_inner__(parameters)
+
+ @_tp_cache
+ def __getitem_inner__(self, parameters):
+ args, result = parameters
+ msg = "Callable[args, result]: result must be a type."
+ result = _type_check(result, msg)
+ if args is Ellipsis:
+ return super(CallableMeta, self).__getitem__((_TypingEllipsis, result))
+ msg = "Callable[[arg, ...], result]: each arg must be a type."
+ args = tuple(_type_check(arg, msg) for arg in args)
+ parameters = args + (result,)
+ return super(CallableMeta, self).__getitem__(parameters)
+
+
+copy._copy_dispatch[CallableMeta] = _copy_generic
+
+
+class Callable(object):
+ """Callable type; Callable[[int], str] is a function of (int) -> str.
+
+ The subscription syntax must always be used with exactly two
+ values: the argument list and the return type. The argument list
+ must be a list of types or ellipsis; the return type must be a single type.
+
+ There is no syntax to indicate optional or keyword arguments,
+ such function types are rarely used as callback types.
+ """
+
+ __metaclass__ = CallableMeta
+ __extra__ = collections_abc.Callable
+ __slots__ = ()
+
+ def __new__(cls, *args, **kwds):
+ if cls._gorg is Callable:
+ raise TypeError("Type Callable cannot be instantiated; "
+ "use a non-abstract subclass instead")
+ return _generic_new(cls.__next_in_mro__, cls, *args, **kwds)
+
+
+def cast(typ, val):
+ """Cast a value to a type.
+
+ This returns the value unchanged. To the type checker this
+ signals that the return value has the designated type, but at
+ runtime we intentionally don't check anything (we want this
+ to be as fast as possible).
+ """
+ return val
+
+
+def _get_defaults(func):
+ """Internal helper to extract the default arguments, by name."""
+ code = func.__code__
+ pos_count = code.co_argcount
+ arg_names = code.co_varnames
+ arg_names = arg_names[:pos_count]
+ defaults = func.__defaults__ or ()
+ kwdefaults = func.__kwdefaults__
+ res = dict(kwdefaults) if kwdefaults else {}
+ pos_offset = pos_count - len(defaults)
+ for name, value in zip(arg_names[pos_offset:], defaults):
+ assert name not in res
+ res[name] = value
+ return res
+
+
+def get_type_hints(obj, globalns=None, localns=None):
+ """In Python 2 this is not supported and always returns None."""
+ return None
+
+
+def no_type_check(arg):
+ """Decorator to indicate that annotations are not type hints.
+
+ The argument must be a class or function; if it is a class, it
+ applies recursively to all methods and classes defined in that class
+ (but not to methods defined in its superclasses or subclasses).
+
+ This mutates the function(s) or class(es) in place.
+ """
+ if isinstance(arg, type):
+ arg_attrs = arg.__dict__.copy()
+ for attr, val in arg.__dict__.items():
+ if val in arg.__bases__ + (arg,):
+ arg_attrs.pop(attr)
+ for obj in arg_attrs.values():
+ if isinstance(obj, types.FunctionType):
+ obj.__no_type_check__ = True
+ if isinstance(obj, type):
+ no_type_check(obj)
+ try:
+ arg.__no_type_check__ = True
+ except TypeError: # built-in classes
+ pass
+ return arg
+
+
+def no_type_check_decorator(decorator):
+ """Decorator to give another decorator the @no_type_check effect.
+
+ This wraps the decorator with something that wraps the decorated
+ function in @no_type_check.
+ """
+
+ @functools.wraps(decorator)
+ def wrapped_decorator(*args, **kwds):
+ func = decorator(*args, **kwds)
+ func = no_type_check(func)
+ return func
+
+ return wrapped_decorator
+
+
+def _overload_dummy(*args, **kwds):
+ """Helper for @overload to raise when called."""
+ raise NotImplementedError(
+ "You should not call an overloaded function. "
+ "A series of @overload-decorated functions "
+ "outside a stub module should always be followed "
+ "by an implementation that is not @overload-ed.")
+
+
+def overload(func):
+ """Decorator for overloaded functions/methods.
+
+ In a stub file, place two or more stub definitions for the same
+ function in a row, each decorated with @overload. For example:
+
+ @overload
+ def utf8(value: None) -> None: ...
+ @overload
+ def utf8(value: bytes) -> bytes: ...
+ @overload
+ def utf8(value: str) -> bytes: ...
+
+ In a non-stub file (i.e. a regular .py file), do the same but
+ follow it with an implementation. The implementation should *not*
+ be decorated with @overload. For example:
+
+ @overload
+ def utf8(value: None) -> None: ...
+ @overload
+ def utf8(value: bytes) -> bytes: ...
+ @overload
+ def utf8(value: str) -> bytes: ...
+ def utf8(value):
+ # implementation goes here
+ """
+ return _overload_dummy
+
+
+_PROTO_WHITELIST = ['Callable', 'Iterable', 'Iterator',
+ 'Hashable', 'Sized', 'Container', 'Collection',
+ 'Reversible', 'ContextManager']
+
+
+class _ProtocolMeta(GenericMeta):
+ """Internal metaclass for Protocol.
+
+ This exists so Protocol classes can be generic without deriving
+ from Generic.
+ """
+ def __init__(cls, *args, **kwargs):
+ super(_ProtocolMeta, cls).__init__(*args, **kwargs)
+ if not cls.__dict__.get('_is_protocol', None):
+ cls._is_protocol = any(b is Protocol or
+ isinstance(b, _ProtocolMeta) and
+ b.__origin__ is Protocol
+ for b in cls.__bases__)
+ if cls._is_protocol:
+ for base in cls.__mro__[1:]:
+ if not (base in (object, Generic) or
+ base.__module__ == '_abcoll' and
+ base.__name__ in _PROTO_WHITELIST or
+ isinstance(base, TypingMeta) and base._is_protocol or
+ isinstance(base, GenericMeta) and base.__origin__ is Generic):
+ raise TypeError('Protocols can only inherit from other protocols,'
+ ' got %r' % base)
+ cls._callable_members_only = all(callable(getattr(cls, attr))
+ for attr in cls._get_protocol_attrs())
+
+ def _no_init(self, *args, **kwargs):
+ if type(self)._is_protocol:
+ raise TypeError('Protocols cannot be instantiated')
+ cls.__init__ = _no_init
+
+ def _proto_hook(cls, other):
+ if not cls.__dict__.get('_is_protocol', None):
+ return NotImplemented
+ if not isinstance(other, type):
+ # Similar error as for issubclass(1, int)
+ # (also not a chance for old-style classes)
+ raise TypeError('issubclass() arg 1 must be a new-style class')
+ for attr in cls._get_protocol_attrs():
+ for base in other.__mro__:
+ if attr in base.__dict__:
+ if base.__dict__[attr] is None:
+ return NotImplemented
+ break
+ else:
+ return NotImplemented
+ return True
+ if '__subclasshook__' not in cls.__dict__:
+ cls.__subclasshook__ = classmethod(_proto_hook)
+
+ def __instancecheck__(self, instance):
+ # We need this method for situations where attributes are assigned in __init__
+ if isinstance(instance, type):
+ # This looks like a fundamental limitation of Python 2.
+ # It cannot support runtime protocol metaclasses, On Python 2 classes
+ # cannot be correctly inspected as instances of protocols.
+ return False
+ if ((not getattr(self, '_is_protocol', False) or
+ self._callable_members_only) and
+ issubclass(instance.__class__, self)):
+ return True
+ if self._is_protocol:
+ if all(hasattr(instance, attr) and
+ (not callable(getattr(self, attr)) or
+ getattr(instance, attr) is not None)
+ for attr in self._get_protocol_attrs()):
+ return True
+ return super(GenericMeta, self).__instancecheck__(instance)
+
+ def __subclasscheck__(self, cls):
+ if (self.__dict__.get('_is_protocol', None) and
+ not self.__dict__.get('_is_runtime_protocol', None)):
+ if (sys._getframe(1).f_globals['__name__'] in ['abc', 'functools'] or
+ # This is needed because we remove subclasses from unions on Python 2.
+ sys._getframe(2).f_globals['__name__'] == 'typing'):
+ return False
+ raise TypeError("Instance and class checks can only be used with"
+ " @runtime_checkable protocols")
+ if (self.__dict__.get('_is_runtime_protocol', None) and
+ not self._callable_members_only):
+ if sys._getframe(1).f_globals['__name__'] in ['abc', 'functools']:
+ return super(GenericMeta, self).__subclasscheck__(cls)
+ raise TypeError("Protocols with non-method members"
+ " don't support issubclass()")
+ return super(_ProtocolMeta, self).__subclasscheck__(cls)
+
+ def _get_protocol_attrs(self):
+ attrs = set()
+ for base in self.__mro__[:-1]: # without object
+ if base.__name__ in ('Protocol', 'Generic'):
+ continue
+ annotations = getattr(base, '__annotations__', {})
+ for attr in list(base.__dict__.keys()) + list(annotations.keys()):
+ if (not attr.startswith('_abc_') and attr not in (
+ '__abstractmethods__', '__annotations__', '__weakref__',
+ '_is_protocol', '_is_runtime_protocol', '__dict__',
+ '__args__', '__slots__', '_get_protocol_attrs',
+ '__next_in_mro__', '__parameters__', '__origin__',
+ '__orig_bases__', '__extra__', '__tree_hash__',
+ '__doc__', '__subclasshook__', '__init__', '__new__',
+ '__module__', '_MutableMapping__marker',
+ '__metaclass__', '_gorg', '_callable_members_only')):
+ attrs.add(attr)
+ return attrs
+
+
+class Protocol(object):
+ """Base class for protocol classes. Protocol classes are defined as::
+
+ class Proto(Protocol):
+ def meth(self):
+ # type: () -> int
+ pass
+
+ Such classes are primarily used with static type checkers that recognize
+ structural subtyping (static duck-typing), for example::
+
+ class C:
+ def meth(self):
+ # type: () -> int
+ return 0
+
+ def func(x):
+ # type: (Proto) -> int
+ return x.meth()
+
+ func(C()) # Passes static type check
+
+ See PEP 544 for details. Protocol classes decorated with @typing.runtime_checkable
+ act as simple-minded runtime protocols that checks only the presence of
+ given attributes, ignoring their type signatures.
+
+ Protocol classes can be generic, they are defined as::
+
+ class GenProto(Protocol[T]):
+ def meth(self):
+ # type: () -> T
+ pass
+ """
+
+ __metaclass__ = _ProtocolMeta
+ __slots__ = ()
+ _is_protocol = True
+
+ def __new__(cls, *args, **kwds):
+ if cls._gorg is Protocol:
+ raise TypeError("Type Protocol cannot be instantiated; "
+ "it can be used only as a base class")
+ return _generic_new(cls.__next_in_mro__, cls, *args, **kwds)
+
+
+def runtime_checkable(cls):
+ """Mark a protocol class as a runtime protocol, so that it
+ can be used with isinstance() and issubclass(). Raise TypeError
+ if applied to a non-protocol class.
+
+ This allows a simple-minded structural check very similar to the
+ one-offs in collections.abc such as Hashable.
+ """
+ if not isinstance(cls, _ProtocolMeta) or not cls._is_protocol:
+ raise TypeError('@runtime_checkable can be only applied to protocol classes,'
+ ' got %r' % cls)
+ cls._is_runtime_protocol = True
+ return cls
+
+
+# Various ABCs mimicking those in collections.abc.
+# A few are simply re-exported for completeness.
+
+Hashable = collections_abc.Hashable # Not generic.
+
+
+class Iterable(Generic[T_co]):
+ __slots__ = ()
+ __extra__ = collections_abc.Iterable
+
+
+class Iterator(Iterable[T_co]):
+ __slots__ = ()
+ __extra__ = collections_abc.Iterator
+
+
+@runtime_checkable
+class SupportsInt(Protocol):
+ __slots__ = ()
+
+ @abstractmethod
+ def __int__(self):
+ pass
+
+
+@runtime_checkable
+class SupportsFloat(Protocol):
+ __slots__ = ()
+
+ @abstractmethod
+ def __float__(self):
+ pass
+
+
+@runtime_checkable
+class SupportsComplex(Protocol):
+ __slots__ = ()
+
+ @abstractmethod
+ def __complex__(self):
+ pass
+
+
+@runtime_checkable
+class SupportsIndex(Protocol):
+ __slots__ = ()
+
+ @abstractmethod
+ def __index__(self):
+ pass
+
+
+@runtime_checkable
+class SupportsAbs(Protocol[T_co]):
+ __slots__ = ()
+
+ @abstractmethod
+ def __abs__(self):
+ pass
+
+
+if hasattr(collections_abc, 'Reversible'):
+ class Reversible(Iterable[T_co]):
+ __slots__ = ()
+ __extra__ = collections_abc.Reversible
+else:
+ @runtime_checkable
+ class Reversible(Protocol[T_co]):
+ __slots__ = ()
+
+ @abstractmethod
+ def __reversed__(self):
+ pass
+
+
+Sized = collections_abc.Sized # Not generic.
+
+
+class Container(Generic[T_co]):
+ __slots__ = ()
+ __extra__ = collections_abc.Container
+
+
+# Callable was defined earlier.
+
+
+class AbstractSet(Sized, Iterable[T_co], Container[T_co]):
+ __slots__ = ()
+ __extra__ = collections_abc.Set
+
+
+class MutableSet(AbstractSet[T]):
+ __slots__ = ()
+ __extra__ = collections_abc.MutableSet
+
+
+# NOTE: It is only covariant in the value type.
+class Mapping(Sized, Iterable[KT], Container[KT], Generic[KT, VT_co]):
+ __slots__ = ()
+ __extra__ = collections_abc.Mapping
+
+
+class MutableMapping(Mapping[KT, VT]):
+ __slots__ = ()
+ __extra__ = collections_abc.MutableMapping
+
+
+if hasattr(collections_abc, 'Reversible'):
+ class Sequence(Sized, Reversible[T_co], Container[T_co]):
+ __slots__ = ()
+ __extra__ = collections_abc.Sequence
+else:
+ class Sequence(Sized, Iterable[T_co], Container[T_co]):
+ __slots__ = ()
+ __extra__ = collections_abc.Sequence
+
+
+class MutableSequence(Sequence[T]):
+ __slots__ = ()
+ __extra__ = collections_abc.MutableSequence
+
+
+class ByteString(Sequence[int]):
+ pass
+
+
+ByteString.register(str)
+ByteString.register(bytearray)
+
+
+class List(list, MutableSequence[T]):
+ __slots__ = ()
+ __extra__ = list
+
+ def __new__(cls, *args, **kwds):
+ if cls._gorg is List:
+ raise TypeError("Type List cannot be instantiated; "
+ "use list() instead")
+ return _generic_new(list, cls, *args, **kwds)
+
+
+class Deque(collections.deque, MutableSequence[T]):
+ __slots__ = ()
+ __extra__ = collections.deque
+
+ def __new__(cls, *args, **kwds):
+ if cls._gorg is Deque:
+ return collections.deque(*args, **kwds)
+ return _generic_new(collections.deque, cls, *args, **kwds)
+
+
+class Set(set, MutableSet[T]):
+ __slots__ = ()
+ __extra__ = set
+
+ def __new__(cls, *args, **kwds):
+ if cls._gorg is Set:
+ raise TypeError("Type Set cannot be instantiated; "
+ "use set() instead")
+ return _generic_new(set, cls, *args, **kwds)
+
+
+class FrozenSet(frozenset, AbstractSet[T_co]):
+ __slots__ = ()
+ __extra__ = frozenset
+
+ def __new__(cls, *args, **kwds):
+ if cls._gorg is FrozenSet:
+ raise TypeError("Type FrozenSet cannot be instantiated; "
+ "use frozenset() instead")
+ return _generic_new(frozenset, cls, *args, **kwds)
+
+
+class MappingView(Sized, Iterable[T_co]):
+ __slots__ = ()
+ __extra__ = collections_abc.MappingView
+
+
+class KeysView(MappingView[KT], AbstractSet[KT]):
+ __slots__ = ()
+ __extra__ = collections_abc.KeysView
+
+
+class ItemsView(MappingView[Tuple[KT, VT_co]],
+ AbstractSet[Tuple[KT, VT_co]],
+ Generic[KT, VT_co]):
+ __slots__ = ()
+ __extra__ = collections_abc.ItemsView
+
+
+class ValuesView(MappingView[VT_co]):
+ __slots__ = ()
+ __extra__ = collections_abc.ValuesView
+
+
+class ContextManager(Generic[T_co]):
+ __slots__ = ()
+
+ def __enter__(self):
+ return self
+
+ @abc.abstractmethod
+ def __exit__(self, exc_type, exc_value, traceback):
+ return None
+
+ @classmethod
+ def __subclasshook__(cls, C):
+ if cls is ContextManager:
+ # In Python 3.6+, it is possible to set a method to None to
+ # explicitly indicate that the class does not implement an ABC
+ # (https://bugs.python.org/issue25958), but we do not support
+ # that pattern here because this fallback class is only used
+ # in Python 3.5 and earlier.
+ if (any("__enter__" in B.__dict__ for B in C.__mro__) and
+ any("__exit__" in B.__dict__ for B in C.__mro__)):
+ return True
+ return NotImplemented
+
+
+class Dict(dict, MutableMapping[KT, VT]):
+ __slots__ = ()
+ __extra__ = dict
+
+ def __new__(cls, *args, **kwds):
+ if cls._gorg is Dict:
+ raise TypeError("Type Dict cannot be instantiated; "
+ "use dict() instead")
+ return _generic_new(dict, cls, *args, **kwds)
+
+
+class DefaultDict(collections.defaultdict, MutableMapping[KT, VT]):
+ __slots__ = ()
+ __extra__ = collections.defaultdict
+
+ def __new__(cls, *args, **kwds):
+ if cls._gorg is DefaultDict:
+ return collections.defaultdict(*args, **kwds)
+ return _generic_new(collections.defaultdict, cls, *args, **kwds)
+
+
+class Counter(collections.Counter, Dict[T, int]):
+ __slots__ = ()
+ __extra__ = collections.Counter
+
+ def __new__(cls, *args, **kwds):
+ if cls._gorg is Counter:
+ return collections.Counter(*args, **kwds)
+ return _generic_new(collections.Counter, cls, *args, **kwds)
+
+
+# Determine what base class to use for Generator.
+if hasattr(collections_abc, 'Generator'):
+ # Sufficiently recent versions of 3.5 have a Generator ABC.
+ _G_base = collections_abc.Generator
+else:
+ # Fall back on the exact type.
+ _G_base = types.GeneratorType
+
+
+class Generator(Iterator[T_co], Generic[T_co, T_contra, V_co]):
+ __slots__ = ()
+ __extra__ = _G_base
+
+ def __new__(cls, *args, **kwds):
+ if cls._gorg is Generator:
+ raise TypeError("Type Generator cannot be instantiated; "
+ "create a subclass instead")
+ return _generic_new(_G_base, cls, *args, **kwds)
+
+
+# Internal type variable used for Type[].
+CT_co = TypeVar('CT_co', covariant=True, bound=type)
+
+
+# This is not a real generic class. Don't use outside annotations.
+class Type(Generic[CT_co]):
+ """A special construct usable to annotate class objects.
+
+ For example, suppose we have the following classes::
+
+ class User: ... # Abstract base for User classes
+ class BasicUser(User): ...
+ class ProUser(User): ...
+ class TeamUser(User): ...
+
+ And a function that takes a class argument that's a subclass of
+ User and returns an instance of the corresponding class::
+
+ U = TypeVar('U', bound=User)
+ def new_user(user_class: Type[U]) -> U:
+ user = user_class()
+ # (Here we could write the user object to a database)
+ return user
+
+ joe = new_user(BasicUser)
+
+ At this point the type checker knows that joe has type BasicUser.
+ """
+ __slots__ = ()
+ __extra__ = type
+
+
+def NamedTuple(typename, fields):
+ """Typed version of namedtuple.
+
+ Usage::
+
+ Employee = typing.NamedTuple('Employee', [('name', str), ('id', int)])
+
+ This is equivalent to::
+
+ Employee = collections.namedtuple('Employee', ['name', 'id'])
+
+ The resulting class has one extra attribute: _field_types,
+ giving a dict mapping field names to types. (The field names
+ are in the _fields attribute, which is part of the namedtuple
+ API.)
+ """
+ fields = [(n, t) for n, t in fields]
+ cls = collections.namedtuple(typename, [n for n, t in fields])
+ cls._field_types = dict(fields)
+ # Set the module to the caller's module (otherwise it'd be 'typing').
+ try:
+ cls.__module__ = sys._getframe(1).f_globals.get('__name__', '__main__')
+ except (AttributeError, ValueError):
+ pass
+ return cls
+
+
+def _check_fails(cls, other):
+ try:
+ if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools', 'typing']:
+ # Typed dicts are only for static structural subtyping.
+ raise TypeError('TypedDict does not support instance and class checks')
+ except (AttributeError, ValueError):
+ pass
+ return False
+
+
+def _dict_new(cls, *args, **kwargs):
+ return dict(*args, **kwargs)
+
+
+def _typeddict_new(cls, _typename, _fields=None, **kwargs):
+ total = kwargs.pop('total', True)
+ if _fields is None:
+ _fields = kwargs
+ elif kwargs:
+ raise TypeError("TypedDict takes either a dict or keyword arguments,"
+ " but not both")
+
+ ns = {'__annotations__': dict(_fields), '__total__': total}
+ try:
+ # Setting correct module is necessary to make typed dict classes pickleable.
+ ns['__module__'] = sys._getframe(1).f_globals.get('__name__', '__main__')
+ except (AttributeError, ValueError):
+ pass
+
+ return _TypedDictMeta(_typename, (), ns)
+
+
+class _TypedDictMeta(type):
+ def __new__(cls, name, bases, ns, total=True):
+ # Create new typed dict class object.
+ # This method is called directly when TypedDict is subclassed,
+ # or via _typeddict_new when TypedDict is instantiated. This way
+ # TypedDict supports all three syntaxes described in its docstring.
+ # Subclasses and instances of TypedDict return actual dictionaries
+ # via _dict_new.
+ ns['__new__'] = _typeddict_new if name == b'TypedDict' else _dict_new
+ tp_dict = super(_TypedDictMeta, cls).__new__(cls, name, (dict,), ns)
+
+ anns = ns.get('__annotations__', {})
+ msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type"
+ anns = {n: _type_check(tp, msg) for n, tp in anns.items()}
+ for base in bases:
+ anns.update(base.__dict__.get('__annotations__', {}))
+ tp_dict.__annotations__ = anns
+ if not hasattr(tp_dict, '__total__'):
+ tp_dict.__total__ = total
+ return tp_dict
+
+ __instancecheck__ = __subclasscheck__ = _check_fails
+
+
+TypedDict = _TypedDictMeta(b'TypedDict', (dict,), {})
+TypedDict.__module__ = __name__
+TypedDict.__doc__ = \
+ """A simple typed name space. At runtime it is equivalent to a plain dict.
+
+ TypedDict creates a dictionary type that expects all of its
+ instances to have a certain set of keys, with each key
+ associated with a value of a consistent type. This expectation
+ is not checked at runtime but is only enforced by type checkers.
+ Usage::
+
+ Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str})
+
+ a: Point2D = {'x': 1, 'y': 2, 'label': 'good'} # OK
+ b: Point2D = {'z': 3, 'label': 'bad'} # Fails type check
+
+ assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first')
+
+ The type info could be accessed via Point2D.__annotations__. TypedDict
+ supports an additional equivalent form::
+
+ Point2D = TypedDict('Point2D', x=int, y=int, label=str)
+ """
+
+
+def NewType(name, tp):
+ """NewType creates simple unique types with almost zero
+ runtime overhead. NewType(name, tp) is considered a subtype of tp
+ by static type checkers. At runtime, NewType(name, tp) returns
+ a dummy function that simply returns its argument. Usage::
+
+ UserId = NewType('UserId', int)
+
+ def name_by_id(user_id):
+ # type: (UserId) -> str
+ ...
+
+ UserId('user') # Fails type check
+
+ name_by_id(42) # Fails type check
+ name_by_id(UserId(42)) # OK
+
+ num = UserId(5) + 1 # type: int
+ """
+
+ def new_type(x):
+ return x
+
+ # Some versions of Python 2 complain because of making all strings unicode
+ new_type.__name__ = str(name)
+ new_type.__supertype__ = tp
+ return new_type
+
+
+# Python-version-specific alias (Python 2: unicode; Python 3: str)
+Text = unicode
+
+
+# Constant that's True when type checking, but False here.
+TYPE_CHECKING = False
+
+
+class IO(Generic[AnyStr]):
+ """Generic base class for TextIO and BinaryIO.
+
+ This is an abstract, generic version of the return of open().
+
+ NOTE: This does not distinguish between the different possible
+ classes (text vs. binary, read vs. write vs. read/write,
+ append-only, unbuffered). The TextIO and BinaryIO subclasses
+ below capture the distinctions between text vs. binary, which is
+ pervasive in the interface; however we currently do not offer a
+ way to track the other distinctions in the type system.
+ """
+
+ __slots__ = ()
+
+ @abstractproperty
+ def mode(self):
+ pass
+
+ @abstractproperty
+ def name(self):
+ pass
+
+ @abstractmethod
+ def close(self):
+ pass
+
+ @abstractproperty
+ def closed(self):
+ pass
+
+ @abstractmethod
+ def fileno(self):
+ pass
+
+ @abstractmethod
+ def flush(self):
+ pass
+
+ @abstractmethod
+ def isatty(self):
+ pass
+
+ @abstractmethod
+ def read(self, n=-1):
+ pass
+
+ @abstractmethod
+ def readable(self):
+ pass
+
+ @abstractmethod
+ def readline(self, limit=-1):
+ pass
+
+ @abstractmethod
+ def readlines(self, hint=-1):
+ pass
+
+ @abstractmethod
+ def seek(self, offset, whence=0):
+ pass
+
+ @abstractmethod
+ def seekable(self):
+ pass
+
+ @abstractmethod
+ def tell(self):
+ pass
+
+ @abstractmethod
+ def truncate(self, size=None):
+ pass
+
+ @abstractmethod
+ def writable(self):
+ pass
+
+ @abstractmethod
+ def write(self, s):
+ pass
+
+ @abstractmethod
+ def writelines(self, lines):
+ pass
+
+ @abstractmethod
+ def __enter__(self):
+ pass
+
+ @abstractmethod
+ def __exit__(self, type, value, traceback):
+ pass
+
+
+class BinaryIO(IO[bytes]):
+ """Typed version of the return of open() in binary mode."""
+
+ __slots__ = ()
+
+ @abstractmethod
+ def write(self, s):
+ pass
+
+ @abstractmethod
+ def __enter__(self):
+ pass
+
+
+class TextIO(IO[unicode]):
+ """Typed version of the return of open() in text mode."""
+
+ __slots__ = ()
+
+ @abstractproperty
+ def buffer(self):
+ pass
+
+ @abstractproperty
+ def encoding(self):
+ pass
+
+ @abstractproperty
+ def errors(self):
+ pass
+
+ @abstractproperty
+ def line_buffering(self):
+ pass
+
+ @abstractproperty
+ def newlines(self):
+ pass
+
+ @abstractmethod
+ def __enter__(self):
+ pass
+
+
+class io(object):
+ """Wrapper namespace for IO generic classes."""
+
+ __all__ = ['IO', 'TextIO', 'BinaryIO']
+ IO = IO
+ TextIO = TextIO
+ BinaryIO = BinaryIO
+
+
+io.__name__ = __name__ + b'.io'
+sys.modules[io.__name__] = io
+
+
+Pattern = _TypeAlias('Pattern', AnyStr, type(stdlib_re.compile('')),
+ lambda p: p.pattern)
+Match = _TypeAlias('Match', AnyStr, type(stdlib_re.match('', '')),
+ lambda m: m.re.pattern)
+
+
+class re(object):
+ """Wrapper namespace for re type aliases."""
+
+ __all__ = ['Pattern', 'Match']
+ Pattern = Pattern
+ Match = Match
+
+
+re.__name__ = __name__ + b'.re'
+sys.modules[re.__name__] = re
diff --git a/contrib/deprecated/python/typing/ya.make b/contrib/deprecated/python/typing/ya.make
new file mode 100644
index 0000000000..5259779f65
--- /dev/null
+++ b/contrib/deprecated/python/typing/ya.make
@@ -0,0 +1,30 @@
+# NOTE: please do not change to PY23_LIBRARY()
+# instead, use
+# IF (PYTHON2)
+# PEERDIR(contrib/deprecated/python/typing)
+# ENDIF()
+# for code compatible with both Py2 and Py3
+PY2_LIBRARY() # backport
+
+LICENSE(PSF-2.0)
+
+VERSION(3.10.0.0)
+
+NO_LINT()
+
+PY_SRCS(
+ TOP_LEVEL
+ typing.py
+)
+
+RESOURCE_FILES(
+ PREFIX contrib/deprecated/python/typing/
+ .dist-info/METADATA
+ .dist-info/top_level.txt
+)
+
+END()
+
+RECURSE_FOR_TESTS(
+ test
+)
diff --git a/contrib/deprecated/python/win-unicode-console/.dist-info/METADATA b/contrib/deprecated/python/win-unicode-console/.dist-info/METADATA
new file mode 100644
index 0000000000..9a922bc913
--- /dev/null
+++ b/contrib/deprecated/python/win-unicode-console/.dist-info/METADATA
@@ -0,0 +1,133 @@
+Metadata-Version: 2.1
+Name: win-unicode-console
+Version: 0.5
+Summary: Enable Unicode input and display when running Python from Windows console.
+Home-page: https://github.com/Drekin/win-unicode-console
+Author: Drekin
+Author-email: drekin@gmail.com
+License: MIT
+Download-URL: https://github.com/Drekin/win-unicode-console/archive/0.5.zip
+Keywords: Windows,Unicode,console
+Platform: UNKNOWN
+Classifier: Development Status :: 4 - Beta
+Classifier: Environment :: Console
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: Microsoft :: Windows
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.7
+
+
+win-unicode-console
+===================
+
+A Python package to enable Unicode input and display when running Python from Windows console.
+
+General information
+-------------------
+
+When running Python in the standard console on Windows, there are several problems when one tries to enter or display Unicode characters. The relevant issue is http://bugs.python.org/issue1602. This package solves some of them.
+
+- First, when you want to display Unicode characters in Windows console, you have to select a font able to display them. Similarly, if you want to enter Unicode characters, you have to have you keyboard properly configured. This has nothing to do with Python, but is included here for completeness.
+
+- The standard stream objects (``sys.stdin``, ``sys.stdout``, ``sys.stderr``) are not capable of reading and displaying Unicode characters in Windows console. This has nothing to do with encoding, since even ``sys.stdin.buffer.raw.readline()`` returns ``b"?\n"`` when entering ``α`` and there is no encoding under which ``sys.stdout.buffer.raw.write`` displays ``α``.
+
+ The ``streams`` module provides several alternative stream objects. ``stdin_raw``, ``stdout_raw``, and ``stderr_raw`` are raw stream objects using WinAPI functions ``ReadConsoleW`` and ``WriteConsoleW`` to interact with Windows console through UTF-16-LE encoded bytes. The ``stdin_text``, ``stdout_text``, and ``stderr_text`` are standard text IO wrappers over standard buffered IO over our raw streams, and are intended to be primary replacements to ``sys.std*`` streams. Unfortunately, other wrappers around ``std*_text`` are needed (see below), so there are more stream objects in ``streams`` module.
+
+ The function ``streams.enable`` installs chosen stream objects instead of the original ones. By default, it chooses appropriate stream objects itself. The function ``streams.disable`` restores the original stream objects (these are stored in ``sys.__std*__`` attributes by Python).
+
+ After replacing the stream objects, also using ``print`` with a string containing Unicode characters and displaying Unicode characters in the interactive loop works. For ``input``, see below.
+
+- Python interactive loop doesn't use ``sys.stdin`` to read input so fixing it doesn't help. Also the ``input`` function may or may not use ``sys.stdin`` depending on whether ``sys.stdin`` and ``sys.stdout`` have the standard filenos and whether they are interactive. See http://bugs.python.org/issue17620 for more information.
+
+ To solve this, we install a custom readline hook. Readline hook is a function which is used to read a single line interactively by Python REPL. It may also be used by ``input`` function under certain conditions (see above). On Linux, this hook is usually set to GNU readline function, which provides features like autocompletion, history, …
+
+ The module ``readline_hook`` provides our custom readline hook, which uses ``sys.stdin`` to get the input and is (de)activated by functions ``readline_hook.enable``, ``readline_hook.disable``.
+
+ As we said, readline hook can be called from two places – from the REPL and from ``input`` function. In the first case the prompt is encoded using ``sys.stdin.encoding``, but in the second case ``sys.stdout.encoding`` is used. So Python currently makes an assumption that these two encodings are equal.
+
+- Python tokenizer, which is used when parsing the input from REPL, cannot handle UTF-16 or generally any encoding containing null bytes. Because UTF-16-LE is the encoding of Unicode used by Windows, we have to additionally wrap our text stream objects (``std*_text``). Thus, ``streams`` module contains also stream objects ``stdin_text_transcoded``, ``stdout_text_transcoded``, and ``stderr_text_transcoded``. They basically just hide the underlying UTF-16-LE encoded buffered IO, and sets encoding to UTF-8. These transcoding wrappers are used by default by ``streams.enable``.
+
+There are additional issues on Python 2.
+
+- Since default Python 2 strings correspond to ``bytes`` rather than ``unicode``, people are usually calling ``print`` with ``bytes`` argument. Therefore, ``sys.stdout.write`` and ``sys.stderr.write`` should support ``bytes`` argument. That is why we add ``stdout_text_str`` and ``stderr_text_str`` stream objects to ``streams`` module. They are used by default on Python 2.
+
+- When we enter a Unicode literal into interactive interpreter, it gets processed by the Python tokenizer, which is bytes-based. When we enter ``u"\u03b1"`` into the interactive interpreter, the tokenizer gets essentially ``b'u"\xce\xb1"'`` plus the information that the encoding used is UTF-8. The problem is that the tokenizer uses the encoding only if ``sys.stdin`` is a file object (see https://hg.python.org/cpython/file/d356e68de236/Parser/tokenizer.c#l797). Hence, we introduce another stream object ``streams.stdin_text_fileobj`` that wraps ``stdin_text_transcoded`` and also is structurally compatible with Python file object. This object is used by default on Python 2.
+
+- The check for interactive streams done by ``raw_input`` unfortunately requires that both ``sys.stdin`` and ``sys.stdout`` are file objects. Besides ``stdin_text_fileobj`` for stdin we could use also ``stdout_text_str_fileobj`` for stdout. Unfortunately, that breaks ``print``.
+
+ Using ``print`` statement or function leads to calling ``PyFile_WriteObject`` with ``sys.stdout`` as argument. Unfortunately, its generic ``write`` method is used only if it is *not* a file object. Otherwise, ``PyObject_Print`` is called, and this function is file-based, so it ends with a ``fprintf`` call, which is not something we want. In conclusion, we need stdout *not* to be a file object.
+
+ Given the situation described, the best solution seems to be reimplementing ``raw_input`` and ``input`` builtin functions and monkeypatching ``__builtins__``. This is done by our ``raw_input`` module on Python 2.
+
+- Similarly to the input from from ``sys.stdin`` the arguments in ``sys.argv`` are also ``bytes`` on Python 2 and the original ones may not be reconstructable. To overcome this we add ``unicode_argv`` module. The function ``unicode_argv.get_unicode_argv`` returns Unicode version of ``sys.argv`` obtained by WinAPI functions ``GetCommandLineW`` and ``CommandLineToArgvW``. The function ``unicode_argv.enable`` monkeypatches ``sys.argv`` with the Unicode arguments.
+
+
+Installation
+------------
+
+Install the package from PyPI via ``pip install win-unicode-console`` (recommended), or download the archive and install it from the archive (e.g. ``pip install win_unicode_console-0.x.zip``), or install the package manually by placing directory ``win_unicode_console`` and module ``run.py`` from the archive to the ``site-packages`` directory of your Python installation.
+
+
+Usage
+-----
+
+The top-level ``win_unicode_console`` module contains a function ``enable``, which install various fixes offered by ``win_unicode_console`` modules, and a function ``disable``, which restores the original environment. By default, custom stream objects are installed as well as a custom readline hook. On Python 2, ``raw_input`` and ``input`` functions are monkeypatched. ``sys.argv`` is not monkeypatched by default since unfortunately some Python 2 code strictly assumes ``str`` instances in ``sys.argv`` list. Use ``enable(use_unicode_argv=True)`` if you want the monkeypathcing. For further customization, see the sources. The logic should be clear.
+
+Generic usage of the package is just calling ``win_unicode_console.enable()`` whenever the fixes should be applied and ``win_unicode_console.disable()`` to revert all the changes. Note that it should be a responsibility of a Python user on Windows to install ``win_unicode_console`` and fix his Python environment regarding Unicode interaction with console, rather than of a third-party developer enabling ``win_unicode_console`` in his application, which adds a dependency. Our package should be seen as an external patch to Python on Windows rather than a feature package for other packages not directly related to fixing Unicode issues.
+
+Different ways of how ``win_unicode_console`` can be used to fix a Python environment on Windows follow.
+
+- *Python patch (recommended).* Just call ``win_unicode_console.enable()`` in your ``sitecustomize`` or ``usercustomize`` module (see https://docs.python.org/3/tutorial/appendix.html#the-customization-modules for more information). This will enable ``win_unicode_console`` on every run of the Python interpreter (unless ``site`` is disabled). Doing so should not break executed scripts in any way. Otherwise, it is a bug of ``win_unicode_console`` that should be fixed.
+
+- *Opt-in runner.* You may easily run a script with ``win_unicode_console`` enabled by using our ``runner`` module and its helper ``run`` script. To do so, execute ``py -i -m run script.py`` instead of ``py -i script.py`` for interactive mode, and similarly ``py -m run script.py`` instead of ``py script.py`` for non-interactive mode. Of course you may provide arguments to your script: ``py -i -m run script.py arg1 arg2``. To run the bare interactive interpreter with ``win_unicode_console`` enabled, execute ``py -i -m run``.
+
+- *Opt-out runner.* In case you are using ``win_unicode_console`` as Python patch, but you want to run a particular script with ``win_unicode_console`` disabled, you can also use the runner. To do so, execute ``py -i -m run --init-disable script.py``.
+
+- *Customized runner.* To move arbitrary initialization (e.g. enabling ``win_unicode_console`` with non-default arguments) from ``sitecustomize`` to opt-in runner, move it to a separate module and use ``py -i -m run --init-module module script.py``. That will import a module ``module`` on startup instead of enabling ``win_unicode_console`` with default arguments.
+
+
+Compatibility
+-------------
+
+``win_unicode_console`` package was tested on Python 3.4, Python 3.5, and Python 2.7. 32-bit or 64-bit shouldn't matter. It also interacts well with the following packages:
+
+- ``colorama`` package (https://pypi.python.org/pypi/colorama) makes ANSI escape character sequences (for producing colored terminal text and cursor positioning) work under MS Windows. It does so by wrapping ``sys.stdout`` and ``sys.stderr`` streams. Since ``win_unicode_console`` replaces the streams in order to support Unicode, ``win_unicode_console.enable`` has to be called before ``colorama.init`` so everything works as expected.
+
+ As of ``colorama`` v0.3.3, there was an early binding issue (https://github.com/tartley/colorama/issues/32), so ``win_unicode_console.enable`` has to be called even before importing ``colorama``. Note that is already the case when ``win_unicode_console`` is used as Python patch or as opt-in runner. The issue was already fixed.
+
+- ``pyreadline`` package (https://pypi.python.org/pypi/pyreadline/2.0) implements GNU readline features on Windows. It provides its own readline hook, which actually supports Unicode input. ``win_unicode_console.readline_hook`` detects when ``pyreadline`` is active, and in that case, by default, reuses its readline hook rather than installing its own, so GNU readline features are preserved on top of our Unicode streams.
+
+- ``IPython`` (https://pypi.python.org/pypi/ipython) can be also used with ``win_unicode_console``.
+
+ As of ``IPython`` 3.2.1, there is an early binding issue (https://github.com/ipython/ipython/issues/8669), so ``win_unicode_console.enable`` has to be called even before importing ``IPython``. That is the case when ``win_unicode_console`` is used as Python patch.
+
+ There was also an issue that IPython was not compatible with the builtin function ``raw_input`` returning unicode on Python 2 (https://github.com/ipython/ipython/issues/8670). If you hit this issue, you can make ``win_unicode_console.raw_input.raw_input`` return bytes by enabling it as ``win_unicode_console.enable(raw_input__return_unicode=False)``. This was fixed in IPython 4.
+
+
+Backward incompatibility
+------------------------
+
+- Since version 0.4, the signature of ``streams.enable`` has been changed because there are now more options for the stream objects to be used. It now accepts a keyword argument for each ``stdin``, ``stdout``, ``stderr``, setting the corresponding stream. ``None`` means “do not set”, ``Ellipsis`` means “use the default value”.
+
+ A function ``streams.enable_only`` was added. It works the same way as ``streams.enable``, but the default value for each parameter is ``None``.
+
+ Functions ``streams.enable_reader``, ``streams.enable_writer``, and ``streams.enable_error_writer`` have been removed. Example: instead of ``streams.enable_reader(transcode=True)`` use ``streams.enable_only(stdin=streams.stdin_text_transcoding)``.
+
+ There are also corresponding changes in top-level ``enable`` function.
+
+- Since version 0.3, the custom stream objects have the standard filenos, so calling ``input`` doesn't handle Unicode without custom readline hook.
+
+
+Acknowledgements
+----------------
+
+- The code of ``streams`` module is based on the code submitted to http://bugs.python.org/issue1602.
+- The idea of providing custom readline hook and the code of ``readline_hook`` module is based on https://github.com/pyreadline/pyreadline.
+- The code related to ``unicode_argv.get_full_unicode_argv`` is based on http://code.activestate.com/recipes/572200/.
+- The idea of using path hooks and the code related to ``unicode_argv.argv_setter_hook`` is based on https://mail.python.org/pipermail/python-list/2016-June/710183.html.
+
+
diff --git a/contrib/deprecated/python/win-unicode-console/.dist-info/top_level.txt b/contrib/deprecated/python/win-unicode-console/.dist-info/top_level.txt
new file mode 100644
index 0000000000..f1cd8d49cd
--- /dev/null
+++ b/contrib/deprecated/python/win-unicode-console/.dist-info/top_level.txt
@@ -0,0 +1,2 @@
+run
+win_unicode_console
diff --git a/contrib/deprecated/python/win-unicode-console/README.rst b/contrib/deprecated/python/win-unicode-console/README.rst
new file mode 100644
index 0000000000..21fc11166d
--- /dev/null
+++ b/contrib/deprecated/python/win-unicode-console/README.rst
@@ -0,0 +1,109 @@
+
+win-unicode-console
+===================
+
+A Python package to enable Unicode input and display when running Python from Windows console.
+
+General information
+-------------------
+
+When running Python in the standard console on Windows, there are several problems when one tries to enter or display Unicode characters. The relevant issue is http://bugs.python.org/issue1602. This package solves some of them.
+
+- First, when you want to display Unicode characters in Windows console, you have to select a font able to display them. Similarly, if you want to enter Unicode characters, you have to have you keyboard properly configured. This has nothing to do with Python, but is included here for completeness.
+
+- The standard stream objects (``sys.stdin``, ``sys.stdout``, ``sys.stderr``) are not capable of reading and displaying Unicode characters in Windows console. This has nothing to do with encoding, since even ``sys.stdin.buffer.raw.readline()`` returns ``b"?\n"`` when entering ``α`` and there is no encoding under which ``sys.stdout.buffer.raw.write`` displays ``α``.
+
+ The ``streams`` module provides several alternative stream objects. ``stdin_raw``, ``stdout_raw``, and ``stderr_raw`` are raw stream objects using WinAPI functions ``ReadConsoleW`` and ``WriteConsoleW`` to interact with Windows console through UTF-16-LE encoded bytes. The ``stdin_text``, ``stdout_text``, and ``stderr_text`` are standard text IO wrappers over standard buffered IO over our raw streams, and are intended to be primary replacements to ``sys.std*`` streams. Unfortunately, other wrappers around ``std*_text`` are needed (see below), so there are more stream objects in ``streams`` module.
+
+ The function ``streams.enable`` installs chosen stream objects instead of the original ones. By default, it chooses appropriate stream objects itself. The function ``streams.disable`` restores the original stream objects (these are stored in ``sys.__std*__`` attributes by Python).
+
+ After replacing the stream objects, also using ``print`` with a string containing Unicode characters and displaying Unicode characters in the interactive loop works. For ``input``, see below.
+
+- Python interactive loop doesn't use ``sys.stdin`` to read input so fixing it doesn't help. Also the ``input`` function may or may not use ``sys.stdin`` depending on whether ``sys.stdin`` and ``sys.stdout`` have the standard filenos and whether they are interactive. See http://bugs.python.org/issue17620 for more information.
+
+ To solve this, we install a custom readline hook. Readline hook is a function which is used to read a single line interactively by Python REPL. It may also be used by ``input`` function under certain conditions (see above). On Linux, this hook is usually set to GNU readline function, which provides features like autocompletion, history, …
+
+ The module ``readline_hook`` provides our custom readline hook, which uses ``sys.stdin`` to get the input and is (de)activated by functions ``readline_hook.enable``, ``readline_hook.disable``.
+
+ As we said, readline hook can be called from two places – from the REPL and from ``input`` function. In the first case the prompt is encoded using ``sys.stdin.encoding``, but in the second case ``sys.stdout.encoding`` is used. So Python currently makes an assumption that these two encodings are equal.
+
+- Python tokenizer, which is used when parsing the input from REPL, cannot handle UTF-16 or generally any encoding containing null bytes. Because UTF-16-LE is the encoding of Unicode used by Windows, we have to additionally wrap our text stream objects (``std*_text``). Thus, ``streams`` module contains also stream objects ``stdin_text_transcoded``, ``stdout_text_transcoded``, and ``stderr_text_transcoded``. They basically just hide the underlying UTF-16-LE encoded buffered IO, and sets encoding to UTF-8. These transcoding wrappers are used by default by ``streams.enable``.
+
+There are additional issues on Python 2.
+
+- Since default Python 2 strings correspond to ``bytes`` rather than ``unicode``, people are usually calling ``print`` with ``bytes`` argument. Therefore, ``sys.stdout.write`` and ``sys.stderr.write`` should support ``bytes`` argument. That is why we add ``stdout_text_str`` and ``stderr_text_str`` stream objects to ``streams`` module. They are used by default on Python 2.
+
+- When we enter a Unicode literal into interactive interpreter, it gets processed by the Python tokenizer, which is bytes-based. When we enter ``u"\u03b1"`` into the interactive interpreter, the tokenizer gets essentially ``b'u"\xce\xb1"'`` plus the information that the encoding used is UTF-8. The problem is that the tokenizer uses the encoding only if ``sys.stdin`` is a file object (see https://hg.python.org/cpython/file/d356e68de236/Parser/tokenizer.c#l797). Hence, we introduce another stream object ``streams.stdin_text_fileobj`` that wraps ``stdin_text_transcoded`` and also is structurally compatible with Python file object. This object is used by default on Python 2.
+
+- The check for interactive streams done by ``raw_input`` unfortunately requires that both ``sys.stdin`` and ``sys.stdout`` are file objects. Besides ``stdin_text_fileobj`` for stdin we could use also ``stdout_text_str_fileobj`` for stdout. Unfortunately, that breaks ``print``.
+
+ Using ``print`` statement or function leads to calling ``PyFile_WriteObject`` with ``sys.stdout`` as argument. Unfortunately, its generic ``write`` method is used only if it is *not* a file object. Otherwise, ``PyObject_Print`` is called, and this function is file-based, so it ends with a ``fprintf`` call, which is not something we want. In conclusion, we need stdout *not* to be a file object.
+
+ Given the situation described, the best solution seems to be reimplementing ``raw_input`` and ``input`` builtin functions and monkeypatching ``__builtins__``. This is done by our ``raw_input`` module on Python 2.
+
+- Similarly to the input from from ``sys.stdin`` the arguments in ``sys.argv`` are also ``bytes`` on Python 2 and the original ones may not be reconstructable. To overcome this we add ``unicode_argv`` module. The function ``unicode_argv.get_unicode_argv`` returns Unicode version of ``sys.argv`` obtained by WinAPI functions ``GetCommandLineW`` and ``CommandLineToArgvW``. The function ``unicode_argv.enable`` monkeypatches ``sys.argv`` with the Unicode arguments.
+
+
+Installation
+------------
+
+Install the package from PyPI via ``pip install win-unicode-console`` (recommended), or download the archive and install it from the archive (e.g. ``pip install win_unicode_console-0.x.zip``), or install the package manually by placing directory ``win_unicode_console`` and module ``run.py`` from the archive to the ``site-packages`` directory of your Python installation.
+
+
+Usage
+-----
+
+The top-level ``win_unicode_console`` module contains a function ``enable``, which install various fixes offered by ``win_unicode_console`` modules, and a function ``disable``, which restores the original environment. By default, custom stream objects are installed as well as a custom readline hook. On Python 2, ``raw_input`` and ``input`` functions are monkeypatched. ``sys.argv`` is not monkeypatched by default since unfortunately some Python 2 code strictly assumes ``str`` instances in ``sys.argv`` list. Use ``enable(use_unicode_argv=True)`` if you want the monkeypathcing. For further customization, see the sources. The logic should be clear.
+
+Generic usage of the package is just calling ``win_unicode_console.enable()`` whenever the fixes should be applied and ``win_unicode_console.disable()`` to revert all the changes. Note that it should be a responsibility of a Python user on Windows to install ``win_unicode_console`` and fix his Python environment regarding Unicode interaction with console, rather than of a third-party developer enabling ``win_unicode_console`` in his application, which adds a dependency. Our package should be seen as an external patch to Python on Windows rather than a feature package for other packages not directly related to fixing Unicode issues.
+
+Different ways of how ``win_unicode_console`` can be used to fix a Python environment on Windows follow.
+
+- *Python patch (recommended).* Just call ``win_unicode_console.enable()`` in your ``sitecustomize`` or ``usercustomize`` module (see https://docs.python.org/3/tutorial/appendix.html#the-customization-modules for more information). This will enable ``win_unicode_console`` on every run of the Python interpreter (unless ``site`` is disabled). Doing so should not break executed scripts in any way. Otherwise, it is a bug of ``win_unicode_console`` that should be fixed.
+
+- *Opt-in runner.* You may easily run a script with ``win_unicode_console`` enabled by using our ``runner`` module and its helper ``run`` script. To do so, execute ``py -i -m run script.py`` instead of ``py -i script.py`` for interactive mode, and similarly ``py -m run script.py`` instead of ``py script.py`` for non-interactive mode. Of course you may provide arguments to your script: ``py -i -m run script.py arg1 arg2``. To run the bare interactive interpreter with ``win_unicode_console`` enabled, execute ``py -i -m run``.
+
+- *Opt-out runner.* In case you are using ``win_unicode_console`` as Python patch, but you want to run a particular script with ``win_unicode_console`` disabled, you can also use the runner. To do so, execute ``py -i -m run --init-disable script.py``.
+
+- *Customized runner.* To move arbitrary initialization (e.g. enabling ``win_unicode_console`` with non-default arguments) from ``sitecustomize`` to opt-in runner, move it to a separate module and use ``py -i -m run --init-module module script.py``. That will import a module ``module`` on startup instead of enabling ``win_unicode_console`` with default arguments.
+
+
+Compatibility
+-------------
+
+``win_unicode_console`` package was tested on Python 3.4, Python 3.5, and Python 2.7. 32-bit or 64-bit shouldn't matter. It also interacts well with the following packages:
+
+- ``colorama`` package (https://pypi.python.org/pypi/colorama) makes ANSI escape character sequences (for producing colored terminal text and cursor positioning) work under MS Windows. It does so by wrapping ``sys.stdout`` and ``sys.stderr`` streams. Since ``win_unicode_console`` replaces the streams in order to support Unicode, ``win_unicode_console.enable`` has to be called before ``colorama.init`` so everything works as expected.
+
+ As of ``colorama`` v0.3.3, there was an early binding issue (https://github.com/tartley/colorama/issues/32), so ``win_unicode_console.enable`` has to be called even before importing ``colorama``. Note that is already the case when ``win_unicode_console`` is used as Python patch or as opt-in runner. The issue was already fixed.
+
+- ``pyreadline`` package (https://pypi.python.org/pypi/pyreadline/2.0) implements GNU readline features on Windows. It provides its own readline hook, which actually supports Unicode input. ``win_unicode_console.readline_hook`` detects when ``pyreadline`` is active, and in that case, by default, reuses its readline hook rather than installing its own, so GNU readline features are preserved on top of our Unicode streams.
+
+- ``IPython`` (https://pypi.python.org/pypi/ipython) can be also used with ``win_unicode_console``.
+
+ As of ``IPython`` 3.2.1, there is an early binding issue (https://github.com/ipython/ipython/issues/8669), so ``win_unicode_console.enable`` has to be called even before importing ``IPython``. That is the case when ``win_unicode_console`` is used as Python patch.
+
+ There was also an issue that IPython was not compatible with the builtin function ``raw_input`` returning unicode on Python 2 (https://github.com/ipython/ipython/issues/8670). If you hit this issue, you can make ``win_unicode_console.raw_input.raw_input`` return bytes by enabling it as ``win_unicode_console.enable(raw_input__return_unicode=False)``. This was fixed in IPython 4.
+
+
+Backward incompatibility
+------------------------
+
+- Since version 0.4, the signature of ``streams.enable`` has been changed because there are now more options for the stream objects to be used. It now accepts a keyword argument for each ``stdin``, ``stdout``, ``stderr``, setting the corresponding stream. ``None`` means “do not set”, ``Ellipsis`` means “use the default value”.
+
+ A function ``streams.enable_only`` was added. It works the same way as ``streams.enable``, but the default value for each parameter is ``None``.
+
+ Functions ``streams.enable_reader``, ``streams.enable_writer``, and ``streams.enable_error_writer`` have been removed. Example: instead of ``streams.enable_reader(transcode=True)`` use ``streams.enable_only(stdin=streams.stdin_text_transcoding)``.
+
+ There are also corresponding changes in top-level ``enable`` function.
+
+- Since version 0.3, the custom stream objects have the standard filenos, so calling ``input`` doesn't handle Unicode without custom readline hook.
+
+
+Acknowledgements
+----------------
+
+- The code of ``streams`` module is based on the code submitted to http://bugs.python.org/issue1602.
+- The idea of providing custom readline hook and the code of ``readline_hook`` module is based on https://github.com/pyreadline/pyreadline.
+- The code related to ``unicode_argv.get_full_unicode_argv`` is based on http://code.activestate.com/recipes/572200/.
+- The idea of using path hooks and the code related to ``unicode_argv.argv_setter_hook`` is based on https://mail.python.org/pipermail/python-list/2016-June/710183.html.
diff --git a/contrib/deprecated/python/win-unicode-console/win_unicode_console/__init__.py b/contrib/deprecated/python/win-unicode-console/win_unicode_console/__init__.py
new file mode 100644
index 0000000000..7ec03c523f
--- /dev/null
+++ b/contrib/deprecated/python/win-unicode-console/win_unicode_console/__init__.py
@@ -0,0 +1,54 @@
+
+from . import streams, console #, readline_hook
+from .info import WINDOWS, PY2
+
+if PY2:
+ from . import raw_input
+
+if PY2 and WINDOWS:
+ from . import unicode_argv
+
+
+# PY3 # def enable(*,
+def enable(
+ stdin = Ellipsis,
+ stdout = Ellipsis,
+ stderr = Ellipsis,
+ use_readline_hook = False,
+ use_pyreadline = True,
+ use_raw_input = True, # PY2
+ raw_input__return_unicode = raw_input.RETURN_UNICODE if PY2 else None,
+ use_unicode_argv = False, # PY2, has some issues
+ use_repl = False#,
+ ):
+
+ if not WINDOWS:
+ return
+
+ streams.enable(stdin=stdin, stdout=stdout, stderr=stderr)
+
+ #if use_readline_hook:
+ # readline_hook.enable(use_pyreadline=use_pyreadline)
+
+ if PY2 and use_raw_input:
+ raw_input.enable(raw_input__return_unicode)
+
+ if PY2 and use_unicode_argv:
+ unicode_argv.enable()
+
+ if use_repl:
+ console.enable()
+
+def disable():
+ if not WINDOWS:
+ return
+
+ if console.running_console is not None:
+ console.disable()
+
+ if PY2:
+ unicode_argv.disable()
+ raw_input.disable()
+
+ #readline_hook.disable()
+ streams.disable()
diff --git a/contrib/deprecated/python/win-unicode-console/win_unicode_console/buffer.py b/contrib/deprecated/python/win-unicode-console/win_unicode_console/buffer.py
new file mode 100644
index 0000000000..4f87d5ffb6
--- /dev/null
+++ b/contrib/deprecated/python/win-unicode-console/win_unicode_console/buffer.py
@@ -0,0 +1,54 @@
+
+import ctypes
+from ctypes import (byref, POINTER, pythonapi,
+ c_int, c_char, c_char_p, c_void_p, py_object, c_ssize_t)
+
+from .info import PY2
+
+
+c_ssize_p = POINTER(c_ssize_t)
+
+PyObject_GetBuffer = pythonapi.PyObject_GetBuffer
+PyBuffer_Release = pythonapi.PyBuffer_Release
+
+
+PyBUF_SIMPLE = 0
+PyBUF_WRITABLE = 1
+
+
+class Py_buffer(ctypes.Structure):
+ _fields_ = [
+ ("buf", c_void_p),
+ ("obj", py_object),
+ ("len", c_ssize_t),
+ ("itemsize", c_ssize_t),
+ ("readonly", c_int),
+ ("ndim", c_int),
+ ("format", c_char_p),
+ ("shape", c_ssize_p),
+ ("strides", c_ssize_p),
+ ("suboffsets", c_ssize_p),
+ ("internal", c_void_p)
+ ]
+
+ if PY2:
+ _fields_.insert(-1, ("smalltable", c_ssize_t * 2))
+
+ @classmethod
+ def get_from(cls, obj, flags=PyBUF_SIMPLE):
+ buf = cls()
+ PyObject_GetBuffer(py_object(obj), byref(buf), flags)
+ return buf
+
+ def release(self):
+ PyBuffer_Release(byref(self))
+
+
+def get_buffer(obj, writable=False):
+ buf = Py_buffer.get_from(obj, PyBUF_WRITABLE if writable else PyBUF_SIMPLE)
+ try:
+ buffer_type = c_char * buf.len
+ return buffer_type.from_address(buf.buf)
+ finally:
+ buf.release()
+
diff --git a/contrib/deprecated/python/win-unicode-console/win_unicode_console/console.py b/contrib/deprecated/python/win-unicode-console/win_unicode_console/console.py
new file mode 100644
index 0000000000..f5da52ca88
--- /dev/null
+++ b/contrib/deprecated/python/win-unicode-console/win_unicode_console/console.py
@@ -0,0 +1,106 @@
+
+from __future__ import print_function # PY2
+
+import __main__
+import code
+import sys
+
+from .info import PY2
+
+
+def print_banner(file=sys.stderr):
+ print("Python {} on {}".format(sys.version, sys.platform), file=file)
+ print('Type "help", "copyright", "credits" or "license" for more information.', file=file)
+
+# PY3 # class InteractiveConsole(code.InteractiveConsole):
+class InteractiveConsole(code.InteractiveConsole, object):
+ # code.InteractiveConsole without banner
+ # exits on EOF
+ # also more robust treating of sys.ps1, sys.ps2
+ # prints prompt into stderr rather than stdout
+ # flushes sys.stderr and sys.stdout
+
+ def __init__(self, locals=None, filename="<stdin>"):
+ self.done = False
+ # PY3 # super().__init__(locals, filename)
+ super(InteractiveConsole, self).__init__(locals, filename)
+
+ def raw_input(self, prompt=""):
+ sys.stderr.write(prompt)
+ if PY2:
+ return raw_input()
+ else:
+ return input()
+
+ def runcode(self, code):
+ # PY3 # super().runcode(code)
+ super(InteractiveConsole, self).runcode(code)
+ sys.stderr.flush()
+ sys.stdout.flush()
+
+ def interact(self):
+ #sys.ps1 = "~>> "
+ #sys.ps2 = "~.. "
+
+ try:
+ sys.ps1
+ except AttributeError:
+ sys.ps1 = ">>> "
+
+ try:
+ sys.ps2
+ except AttributeError:
+ sys.ps2 = "... "
+
+ more = 0
+ while not self.done:
+ try:
+ if more:
+ try:
+ prompt = sys.ps2
+ except AttributeError:
+ prompt = ""
+ else:
+ try:
+ prompt = sys.ps1
+ except AttributeError:
+ prompt = ""
+
+ try:
+ line = self.raw_input(prompt)
+ except EOFError:
+ self.on_EOF()
+ else:
+ more = self.push(line)
+
+ except KeyboardInterrupt:
+ self.write("\nKeyboardInterrupt\n")
+ self.resetbuffer()
+ more = 0
+
+ def on_EOF(self):
+ self.write("\n")
+ # PY3 # raise SystemExit from None
+ raise SystemExit
+
+
+running_console = None
+
+def enable():
+ global running_console
+
+ if running_console is not None:
+ raise RuntimeError("interactive console already running")
+ else:
+ running_console = InteractiveConsole(__main__.__dict__)
+ running_console.interact()
+
+def disable():
+ global running_console
+
+ if running_console is None:
+ raise RuntimeError("interactive console is not running")
+ else:
+ running_console.done = True
+ running_console = None
+
diff --git a/contrib/deprecated/python/win-unicode-console/win_unicode_console/file_object.py b/contrib/deprecated/python/win-unicode-console/win_unicode_console/file_object.py
new file mode 100644
index 0000000000..3c9b56ccb4
--- /dev/null
+++ b/contrib/deprecated/python/win-unicode-console/win_unicode_console/file_object.py
@@ -0,0 +1,55 @@
+
+from .info import check_PY2
+check_PY2()
+
+import ctypes
+from ctypes import (byref, pythonapi,
+ c_int, c_char_p, c_void_p, py_object, c_ssize_t)
+
+
+class FileObject(ctypes.Structure):
+ _fields_ = [
+ #("_ob_next", c_void_p),
+ #("_ob_prev", c_void_p),
+ ("ob_refcnt", c_ssize_t),
+ ("ob_type", c_void_p),
+
+ ("fp", c_void_p),
+ ("name", py_object),
+ ("mode", py_object),
+ ("close", c_void_p),
+ ("softspace", c_int),
+ ("binary", c_int),
+ ("buf", c_char_p),
+ ("bufend", c_char_p),
+ ("bufptr", c_char_p),
+ ("setbuf", c_char_p),
+ ("univ_newline", c_int),
+ ("newlinetypes", c_int),
+ ("skipnextlf", c_int),
+ ("encoding", py_object),
+ ("errors", py_object),
+ ("weakreflist", py_object),
+ ("unlocked_count", c_int),
+ ("readable", c_int),
+ ("writable", c_int),
+ ]
+
+ @classmethod
+ def from_file(cls, f):
+ if not isinstance(f, file):
+ raise TypeError("f has to be a file")
+
+ return cls.from_address(id(f))
+
+ def set_encoding(self, encoding):
+ if not isinstance(encoding, str):
+ raise TypeError("encoding has to be a str")
+
+ pythonapi.PyFile_SetEncoding(byref(self), encoding)
+
+ def copy_file_pointer(self, f):
+ if not isinstance(f, file):
+ raise TypeError("f has to be a file")
+
+ self.fp = pythonapi.PyFile_AsFile(py_object(f))
diff --git a/contrib/deprecated/python/win-unicode-console/win_unicode_console/info.py b/contrib/deprecated/python/win-unicode-console/win_unicode_console/info.py
new file mode 100644
index 0000000000..ed058484a5
--- /dev/null
+++ b/contrib/deprecated/python/win-unicode-console/win_unicode_console/info.py
@@ -0,0 +1,17 @@
+
+import sys
+import platform
+
+
+WINDOWS = platform.system().lower() == "windows"
+PY2 = sys.version_info.major < 3
+
+def check_Windows():
+ current_platform = platform.system()
+
+ if not WINDOWS:
+ raise RuntimeError("available only for Windows, not {}.".format(current_platform))
+
+def check_PY2():
+ if not PY2:
+ raise RuntimeError("needed only in Python 2")
diff --git a/contrib/deprecated/python/win-unicode-console/win_unicode_console/raw_input.py b/contrib/deprecated/python/win-unicode-console/win_unicode_console/raw_input.py
new file mode 100644
index 0000000000..35adcbb292
--- /dev/null
+++ b/contrib/deprecated/python/win-unicode-console/win_unicode_console/raw_input.py
@@ -0,0 +1,125 @@
+
+from .info import check_PY2
+check_PY2()
+
+import __builtin__ as builtins
+import sys
+from ctypes import pythonapi, c_char_p, c_void_p, py_object
+
+from .streams import STDIN, STDOUT
+from .readline_hook import check_encodings, stdio_readline
+
+
+original_raw_input = builtins.raw_input
+original_input = builtins.input
+
+RETURN_UNICODE = True
+
+
+PyOS_Readline = pythonapi.PyOS_Readline
+PyOS_Readline.restype = c_char_p
+PyOS_Readline.argtypes = [c_void_p, c_void_p, c_char_p]
+
+PyFile_AsFile = pythonapi.PyFile_AsFile
+PyFile_AsFile.restype = c_void_p
+PyFile_AsFile.argtypes = [py_object]
+
+STDIN_FILE_POINTER = PyFile_AsFile(sys.stdin)
+STDOUT_FILE_POINTER = PyFile_AsFile(sys.stdout)
+
+
+def stdout_encode(s):
+ if isinstance(s, bytes):
+ return s
+ encoding = sys.stdout.encoding
+ errors = sys.stdout.errors
+ if errors is not None:
+ return s.encode(encoding, errors)
+ else:
+ return s.encode(encoding)
+
+def stdin_encode(s):
+ if isinstance(s, bytes):
+ return s
+ encoding = sys.stdin.encoding
+ errors = sys.stdin.errors
+ if errors is not None:
+ return s.encode(encoding, errors)
+ else:
+ return s.encode(encoding)
+
+def stdin_decode(b):
+ if isinstance(b, unicode):
+ return b
+ encoding = sys.stdin.encoding
+ errors = sys.stdin.errors
+ if errors is not None:
+ return b.decode(encoding, errors)
+ else:
+ return b.decode(encoding)
+
+def readline(prompt=""):
+ check_encodings()
+ prompt_bytes = stdout_encode(prompt)
+ line_bytes = PyOS_Readline(STDIN_FILE_POINTER, STDOUT_FILE_POINTER, prompt_bytes)
+ if line_bytes is None:
+ raise KeyboardInterrupt
+ else:
+ return line_bytes
+
+
+def raw_input(prompt=""):
+ """raw_input([prompt]) -> string
+
+Read a string from standard input. The trailing newline is stripped.
+If the user hits EOF (Unix: Ctl-D, Windows: Ctl-Z+Return), raise EOFError.
+On Unix, GNU readline is used if enabled. The prompt string, if given,
+is printed without a trailing newline before reading."""
+
+ sys.stderr.flush()
+
+ tty = STDIN.is_a_TTY() and STDOUT.is_a_TTY()
+
+ if RETURN_UNICODE:
+ if tty:
+ line_bytes = readline(prompt)
+ line = stdin_decode(line_bytes)
+ else:
+ line = stdio_readline(prompt)
+
+ else:
+ if tty:
+ line = readline(prompt)
+ else:
+ line_unicode = stdio_readline(prompt)
+ line = stdin_encode(line_unicode)
+
+ if line:
+ return line[:-1] # strip strailing "\n"
+ else:
+ raise EOFError
+
+def input(prompt=""):
+ """input([prompt]) -> value
+
+Equivalent to eval(raw_input(prompt))."""
+
+ string = stdin_decode(raw_input(prompt))
+
+ caller_frame = sys._getframe(1)
+ globals = caller_frame.f_globals
+ locals = caller_frame.f_locals
+
+ return eval(string, globals, locals)
+
+
+def enable(return_unicode=RETURN_UNICODE):
+ global RETURN_UNICODE
+ RETURN_UNICODE = return_unicode
+
+ builtins.raw_input = raw_input
+ builtins.input = input
+
+def disable():
+ builtins.raw_input = original_raw_input
+ builtins.input = original_input
diff --git a/contrib/deprecated/python/win-unicode-console/win_unicode_console/readline_hook.py b/contrib/deprecated/python/win-unicode-console/win_unicode_console/readline_hook.py
new file mode 100644
index 0000000000..c7688d9681
--- /dev/null
+++ b/contrib/deprecated/python/win-unicode-console/win_unicode_console/readline_hook.py
@@ -0,0 +1,149 @@
+
+from __future__ import print_function # PY2
+
+import sys
+import traceback
+import warnings
+import ctypes.util
+from ctypes import (pythonapi, cdll, cast,
+ c_char_p, c_void_p, c_size_t, CFUNCTYPE)
+
+from .info import WINDOWS
+
+try:
+ import pyreadline
+except ImportError:
+ pyreadline = None
+
+
+def get_libc():
+ if WINDOWS:
+ path = "msvcrt"
+ else:
+ path = ctypes.util.find_library("c")
+ if path is None:
+ raise RuntimeError("cannot locate libc")
+
+ return cdll[path]
+
+LIBC = get_libc()
+
+PyMem_Malloc = pythonapi.PyMem_Malloc
+PyMem_Malloc.restype = c_size_t
+PyMem_Malloc.argtypes = [c_size_t]
+
+strncpy = LIBC.strncpy
+strncpy.restype = c_char_p
+strncpy.argtypes = [c_char_p, c_char_p, c_size_t]
+
+HOOKFUNC = CFUNCTYPE(c_char_p, c_void_p, c_void_p, c_char_p)
+
+#PyOS_ReadlineFunctionPointer = c_void_p.in_dll(pythonapi, "PyOS_ReadlineFunctionPointer")
+
+
+def new_zero_terminated_string(b):
+ p = PyMem_Malloc(len(b) + 1)
+ strncpy(cast(p, c_char_p), b, len(b) + 1)
+ return p
+
+def check_encodings():
+ if sys.stdin.encoding != sys.stdout.encoding:
+ # raise RuntimeError("sys.stdin.encoding != sys.stdout.encoding, readline hook doesn't know, which one to use to decode prompt")
+
+ warnings.warn("sys.stdin.encoding == {!r}, whereas sys.stdout.encoding == {!r}, readline hook consumer may assume they are the same".format(sys.stdin.encoding, sys.stdout.encoding),
+ RuntimeWarning, stacklevel=3)
+
+def stdio_readline(prompt=""):
+ sys.stdout.write(prompt)
+ sys.stdout.flush()
+ return sys.stdin.readline()
+
+
+class ReadlineHookManager:
+ def __init__(self):
+ self.readline_wrapper_ref = HOOKFUNC(self.readline_wrapper)
+ self.address = cast(self.readline_wrapper_ref, c_void_p).value
+ #self.original_address = PyOS_ReadlineFunctionPointer.value
+ self.readline_hook = None
+
+ def readline_wrapper(self, stdin, stdout, prompt):
+ try:
+ try:
+ check_encodings()
+ except RuntimeError:
+ traceback.print_exc(file=sys.stderr)
+ try:
+ prompt = prompt.decode("utf-8")
+ except UnicodeDecodeError:
+ prompt = ""
+
+ else:
+ prompt = prompt.decode(sys.stdout.encoding)
+
+ try:
+ line = self.readline_hook(prompt)
+ except KeyboardInterrupt:
+ return 0
+ else:
+ return new_zero_terminated_string(line.encode(sys.stdin.encoding))
+
+ except:
+ self.restore_original()
+ print("Internal win_unicode_console error, disabling custom readline hook...", file=sys.stderr)
+ traceback.print_exc(file=sys.stderr)
+ return new_zero_terminated_string(b"\n")
+
+ def install_hook(self, hook):
+ self.readline_hook = hook
+ PyOS_ReadlineFunctionPointer.value = self.address
+
+ def restore_original(self):
+ self.readline_hook = None
+ PyOS_ReadlineFunctionPointer.value = self.original_address
+
+
+class PyReadlineManager:
+ def __init__(self):
+ self.original_codepage = pyreadline.unicode_helper.pyreadline_codepage
+
+ def set_codepage(self, codepage):
+ pyreadline.unicode_helper.pyreadline_codepage = codepage
+
+ def restore_original(self):
+ self.set_codepage(self.original_codepage)
+
+def pyreadline_is_active():
+ if not pyreadline:
+ return False
+
+ ref = pyreadline.console.console.readline_ref
+ if ref is None:
+ return False
+
+ return cast(ref, c_void_p).value == PyOS_ReadlineFunctionPointer.value
+
+
+manager = ReadlineHookManager()
+
+if pyreadline:
+ pyreadline_manager = PyReadlineManager()
+
+
+# PY3 # def enable(*, use_pyreadline=True):
+def enable(use_pyreadline=True):
+ check_encodings()
+
+ if use_pyreadline and pyreadline:
+ pyreadline_manager.set_codepage(sys.stdin.encoding)
+ # pyreadline assumes that encoding of all sys.stdio objects is the same
+ if not pyreadline_is_active():
+ manager.install_hook(stdio_readline)
+
+ else:
+ manager.install_hook(stdio_readline)
+
+def disable():
+ if pyreadline:
+ pyreadline_manager.restore_original()
+ else:
+ manager.restore_original()
diff --git a/contrib/deprecated/python/win-unicode-console/win_unicode_console/runner.py b/contrib/deprecated/python/win-unicode-console/win_unicode_console/runner.py
new file mode 100644
index 0000000000..f3c04e685e
--- /dev/null
+++ b/contrib/deprecated/python/win-unicode-console/win_unicode_console/runner.py
@@ -0,0 +1,199 @@
+
+from __future__ import print_function # PY2
+
+import __main__
+import argparse
+import sys
+import traceback
+import tokenize
+from ctypes import pythonapi, POINTER, c_long, cast
+from types import CodeType as Code
+
+from . import console, enable, disable
+from .info import PY2
+
+
+inspect_flag = cast(pythonapi.Py_InspectFlag, POINTER(c_long)).contents
+
+def set_inspect_flag(value):
+ inspect_flag.value = int(value)
+
+
+CODE_FIELDS = ["argcount", "kwonlyargcount", "nlocals", "stacksize",
+ "flags", "code", "consts", "names", "varnames", "filename",
+ "name", "firstlineno", "lnotab", "freevars", "cellvars"]
+if PY2:
+ CODE_FIELDS.remove("kwonlyargcount")
+
+def update_code(codeobj, **kwargs):
+ def field_values():
+ for field in CODE_FIELDS:
+ original_value = getattr(codeobj, "co_{}".format(field))
+ value = kwargs.get(field, original_value)
+ yield value
+
+ return Code(*field_values())
+
+def update_code_recursively(codeobj, **kwargs):
+ updated = {}
+
+ def update(codeobj, **kwargs):
+ result = updated.get(codeobj, None)
+ if result is not None:
+ return result
+
+ if any(isinstance(c, Code) for c in codeobj.co_consts):
+ consts = tuple(update(c, **kwargs) if isinstance(c, Code) else c
+ for c in codeobj.co_consts)
+ else:
+ consts = codeobj.co_consts
+
+ result = update_code(codeobj, consts=consts, **kwargs)
+ updated[codeobj] = result
+ return result
+
+ return update(codeobj, **kwargs)
+
+
+def get_code(path):
+ if PY2:
+ from .tokenize_open import read_source_lines
+ source = u"".join(read_source_lines(path))
+ else:
+ with tokenize.open(path) as f: # opens with detected source encoding
+ source = f.read()
+
+ try:
+ code = compile(source, path, "exec", dont_inherit=True)
+ except UnicodeEncodeError:
+ code = compile(source, "<encoding error>", "exec", dont_inherit=True)
+ if PY2:
+ path = path.encode("utf-8")
+ code = update_code_recursively(code, filename=path)
+ # so code constains correct filename (even if it contains Unicode)
+ # and tracebacks show contents of code lines
+
+ return code
+
+
+def print_exception_without_first_line(etype, value, tb, limit=None, file=None, chain=True):
+ if file is None:
+ file = sys.stderr
+
+ lines = iter(traceback.TracebackException(
+ type(value), value, tb, limit=limit).format(chain=chain))
+
+ next(lines)
+ for line in lines:
+ print(line, file=file, end="")
+
+
+def run_script(args):
+ sys.argv = [args.script] + args.script_arguments
+ path = args.script
+ __main__.__file__ = path
+
+ try:
+ code = get_code(path)
+ except Exception as e:
+ traceback.print_exception(e.__class__, e, None, file=sys.stderr)
+ else:
+ try:
+ exec(code, __main__.__dict__)
+ except BaseException as e:
+ if not sys.flags.inspect and isinstance(e, SystemExit):
+ raise
+
+ elif PY2: # Python 2 produces tracebacks in mixed encoding (!)
+ etype, e, tb = sys.exc_info()
+ for line in traceback.format_exception(etype, e, tb.tb_next):
+ line = line.decode("utf-8", "replace")
+ try:
+ sys.stderr.write(line)
+ except UnicodeEncodeError:
+ line = line.encode(sys.stderr.encoding, "backslashreplace")
+ sys.stderr.write(line)
+
+ sys.stderr.flush() # is this needed?
+
+ else: # PY3
+ traceback.print_exception(e.__class__, e, e.__traceback__.tb_next, file=sys.stderr)
+
+def run_init(args):
+ if args.init == "enable":
+ enable()
+ elif args.init == "disable":
+ disable()
+ elif args.init == "module":
+ __import__(args.module)
+ elif args.init == "none":
+ pass
+ else:
+ raise ValueError("unknown runner init mode {}".format(repr(args.init)))
+
+def run_with_custom_repl(args):
+ run_init(args)
+
+ if args.script:
+ run_script(args)
+
+ if sys.flags.interactive or not args.script:
+ if sys.flags.interactive and not args.script:
+ console.print_banner()
+ try:
+ console.enable()
+ finally:
+ set_inspect_flag(0)
+
+def run_with_standard_repl(args):
+ run_init(args)
+
+ if args.script:
+ run_script(args)
+
+ if sys.flags.interactive and not args.script:
+ console.print_banner()
+
+def run_arguments():
+ parser = argparse.ArgumentParser(description="Runs a script after customizable initialization. By default, win_unicode_console is enabled.")
+
+ init_group = parser.add_mutually_exclusive_group()
+ init_group.add_argument(
+ "-e", "--init-enable", dest="init", action="store_const", const="enable",
+ help="enable win_unicode_console on init (default)")
+ init_group.add_argument(
+ "-d", "--init-disable", dest="init", action="store_const", const="disable",
+ help="disable win_unicode_console on init")
+ init_group.add_argument(
+ "-m", "--init-module", dest="module",
+ help="import the given module on init")
+ init_group.add_argument(
+ "-n", "--no-init", dest="init", action="store_const", const="none",
+ help="do nothing special on init")
+ parser.set_defaults(init="enable")
+
+ repl_group = parser.add_mutually_exclusive_group()
+ repl_group.add_argument(
+ "-s", "--standard-repl", dest="use_repl", action="store_false",
+ help="use the standard Python REPL (default)")
+ repl_group.add_argument(
+ "-c", "--custom-repl", dest="use_repl", action="store_true",
+ help="use win_unicode_console.console REPL")
+ parser.set_defaults(use_repl=False)
+
+ parser.add_argument("script", nargs="?")
+ parser.add_argument("script_arguments", nargs=argparse.REMAINDER, metavar="script-arguments")
+
+ try:
+ args = parser.parse_args(sys.argv[1:])
+ except SystemExit:
+ set_inspect_flag(0) # don't go interactive after printing help
+ raise
+
+ if args.module:
+ args.init = "module"
+
+ if args.use_repl:
+ run_with_custom_repl(args)
+ else:
+ run_with_standard_repl(args)
diff --git a/contrib/deprecated/python/win-unicode-console/win_unicode_console/streams.py b/contrib/deprecated/python/win-unicode-console/win_unicode_console/streams.py
new file mode 100644
index 0000000000..6a5eda0c18
--- /dev/null
+++ b/contrib/deprecated/python/win-unicode-console/win_unicode_console/streams.py
@@ -0,0 +1,337 @@
+
+import io
+import sys
+import time
+from ctypes import byref, c_ulong
+
+from .buffer import get_buffer
+from .info import WINDOWS, PY2
+
+if PY2:
+ from .file_object import FileObject
+
+
+if WINDOWS:
+ from ctypes import WinDLL, get_last_error, set_last_error, WinError
+ from msvcrt import get_osfhandle
+
+ kernel32 = WinDLL("kernel32", use_last_error=True)
+ ReadConsoleW = kernel32.ReadConsoleW
+ WriteConsoleW = kernel32.WriteConsoleW
+ GetConsoleMode = kernel32.GetConsoleMode
+
+
+ERROR_SUCCESS = 0
+ERROR_INVALID_HANDLE = 6
+ERROR_NOT_ENOUGH_MEMORY = 8
+ERROR_OPERATION_ABORTED = 995
+
+EOF = b"\x1a"
+
+MAX_BYTES_WRITTEN = 32767 # arbitrary because WriteConsoleW ability to write big buffers depends on heap usage
+
+
+class StandardStreamInfo:
+ def __init__(self, name, standard_fileno):
+ self.name = name
+ self.fileno = standard_fileno
+ self.handle = get_osfhandle(standard_fileno) if WINDOWS else None
+
+ def __repr__(self):
+ return "<{} '{}' fileno={} handle={}>".format(self.__class__.__name__, self.name, self.fileno, self.handle)
+
+ @property
+ def stream(self):
+ return getattr(sys, self.name)
+
+ def is_a_TTY(self):
+ # the test used in input()
+ try:
+ get_fileno = self.stream.fileno
+ except AttributeError: # e.g. StringIO in Python 2
+ return False
+
+ try:
+ fileno = get_fileno()
+ except io.UnsupportedOperation:
+ return False
+ else:
+ return fileno == self.fileno and self.stream.isatty()
+
+ def is_a_console(self):
+ if self.handle is None:
+ return False
+
+ if GetConsoleMode(self.handle, byref(c_ulong())):
+ return True
+ else:
+ last_error = get_last_error()
+ if last_error == ERROR_INVALID_HANDLE:
+ return False
+ else:
+ raise WinError(last_error)
+
+ def should_be_fixed(self):
+ if self.stream is None: # e.g. with IDLE
+ return True
+
+ return self.is_a_TTY() and self.is_a_console()
+
+STDIN = StandardStreamInfo("stdin", standard_fileno=0)
+STDOUT = StandardStreamInfo("stdout", standard_fileno=1)
+STDERR = StandardStreamInfo("stderr", standard_fileno=2)
+
+
+class _ReprMixin:
+ def __repr__(self):
+ modname = self.__class__.__module__
+
+ if PY2:
+ clsname = self.__class__.__name__
+ else:
+ clsname = self.__class__.__qualname__
+
+ attributes = []
+ for name in ["name", "encoding"]:
+ try:
+ value = getattr(self, name)
+ except AttributeError:
+ pass
+ else:
+ attributes.append("{}={}".format(name, repr(value)))
+
+ return "<{}.{} {}>".format(modname, clsname, " ".join(attributes))
+
+
+class WindowsConsoleRawIOBase(_ReprMixin, io.RawIOBase):
+ def __init__(self, name, handle, fileno):
+ self.name = name
+ self.handle = handle
+ self.file_no = fileno
+
+ def fileno(self):
+ return self.file_no
+
+ def isatty(self):
+ # PY3 # super().isatty() # for close check in default implementation
+ super(WindowsConsoleRawIOBase, self).isatty()
+ return True
+
+class WindowsConsoleRawReader(WindowsConsoleRawIOBase):
+ def readable(self):
+ return True
+
+ def readinto(self, b):
+ bytes_to_be_read = len(b)
+ if not bytes_to_be_read:
+ return 0
+ elif bytes_to_be_read % 2:
+ raise ValueError("cannot read odd number of bytes from UTF-16-LE encoded console")
+
+ buffer = get_buffer(b, writable=True)
+ code_units_to_be_read = bytes_to_be_read // 2
+ code_units_read = c_ulong()
+
+ set_last_error(ERROR_SUCCESS)
+ ReadConsoleW(self.handle, buffer, code_units_to_be_read, byref(code_units_read), None)
+ last_error = get_last_error()
+ if last_error == ERROR_OPERATION_ABORTED:
+ time.sleep(0.1) # wait for KeyboardInterrupt
+ if last_error != ERROR_SUCCESS:
+ raise WinError(last_error)
+
+ if buffer[0] == EOF:
+ return 0
+ else:
+ return 2 * code_units_read.value # bytes read
+
+class WindowsConsoleRawWriter(WindowsConsoleRawIOBase):
+ def writable(self):
+ return True
+
+ def write(self, b):
+ bytes_to_be_written = len(b)
+ buffer = get_buffer(b)
+ code_units_to_be_written = min(bytes_to_be_written, MAX_BYTES_WRITTEN) // 2
+ code_units_written = c_ulong()
+
+ if code_units_to_be_written == 0 != bytes_to_be_written:
+ raise ValueError("two-byte code units expected, just one byte given")
+
+ if not WriteConsoleW(self.handle, buffer, code_units_to_be_written, byref(code_units_written), None):
+ exc = WinError(get_last_error())
+ if exc.winerror == ERROR_NOT_ENOUGH_MEMORY:
+ exc.strerror += " Try to lower `win_unicode_console.streams.MAX_BYTES_WRITTEN`."
+ raise exc
+
+ return 2 * code_units_written.value # bytes written
+
+
+class _TextStreamWrapperMixin(_ReprMixin):
+ def __init__(self, base):
+ self.base = base
+
+ @property
+ def encoding(self):
+ return self.base.encoding
+
+ @property
+ def errors(self):
+ return self.base.errors
+
+ @property
+ def line_buffering(self):
+ return self.base.line_buffering
+
+ def seekable(self):
+ return self.base.seekable()
+
+ def readable(self):
+ return self.base.readable()
+
+ def writable(self):
+ return self.base.writable()
+
+ def flush(self):
+ self.base.flush()
+
+ def close(self):
+ self.base.close()
+
+ @property
+ def closed(self):
+ return self.base.closed
+
+ @property
+ def name(self):
+ return self.base.name
+
+ def fileno(self):
+ return self.base.fileno()
+
+ def isatty(self):
+ return self.base.isatty()
+
+ def write(self, s):
+ return self.base.write(s)
+
+ def tell(self):
+ return self.base.tell()
+
+ def truncate(self, pos=None):
+ return self.base.truncate(pos)
+
+ def seek(self, cookie, whence=0):
+ return self.base.seek(cookie, whence)
+
+ def read(self, size=None):
+ return self.base.read(size)
+
+ def __next__(self):
+ return next(self.base)
+
+ def readline(self, size=-1):
+ return self.base.readline(size)
+
+ @property
+ def newlines(self):
+ return self.base.newlines
+
+class TextStreamWrapper(_TextStreamWrapperMixin, io.TextIOBase):
+ pass
+
+class TextTranscodingWrapper(TextStreamWrapper):
+ encoding = None # disable the descriptor
+
+ def __init__(self, base, encoding):
+ # PY3 # super().__init__(base)
+ super(TextTranscodingWrapper, self).__init__(base)
+ self.encoding = encoding
+
+class StrStreamWrapper(TextStreamWrapper):
+ def write(self, s):
+ if isinstance(s, bytes):
+ s = s.decode(self.encoding)
+
+ self.base.write(s)
+
+if PY2:
+ class FileobjWrapper(_TextStreamWrapperMixin, file):
+ def __init__(self, base, f):
+ super(FileobjWrapper, self).__init__(base)
+ fileobj = self._fileobj = FileObject.from_file(self)
+ fileobj.set_encoding(base.encoding)
+ fileobj.copy_file_pointer(f)
+ fileobj.readable = base.readable()
+ fileobj.writable = base.writable()
+
+ # needed for the right interpretation of unicode literals in interactive mode when win_unicode_console is enabled in sitecustomize since Py_Initialize changes encoding afterwards
+ def _reset_encoding(self):
+ self._fileobj.set_encoding(self.base.encoding)
+
+ def readline(self, size=-1):
+ self._reset_encoding()
+ return self.base.readline(size)
+
+
+if WINDOWS:
+ stdin_raw = WindowsConsoleRawReader("<stdin>", STDIN.handle, STDIN.fileno)
+ stdout_raw = WindowsConsoleRawWriter("<stdout>", STDOUT.handle, STDOUT.fileno)
+ stderr_raw = WindowsConsoleRawWriter("<stderr>", STDERR.handle, STDERR.fileno)
+
+ stdin_text = io.TextIOWrapper(io.BufferedReader(stdin_raw), encoding="utf-16-le", line_buffering=True)
+ stdout_text = io.TextIOWrapper(io.BufferedWriter(stdout_raw), encoding="utf-16-le", line_buffering=True)
+ stderr_text = io.TextIOWrapper(io.BufferedWriter(stderr_raw), encoding="utf-16-le", line_buffering=True)
+
+ stdin_text_transcoded = TextTranscodingWrapper(stdin_text, encoding="utf-8")
+ stdout_text_transcoded = TextTranscodingWrapper(stdout_text, encoding="utf-8")
+ stderr_text_transcoded = TextTranscodingWrapper(stderr_text, encoding="utf-8")
+
+ stdout_text_str = StrStreamWrapper(stdout_text_transcoded)
+ stderr_text_str = StrStreamWrapper(stderr_text_transcoded)
+ if PY2:
+ stdin_text_fileobj = FileobjWrapper(stdin_text_transcoded, sys.__stdin__)
+ stdout_text_str_fileobj = FileobjWrapper(stdout_text_str, sys.__stdout__)
+
+
+def disable():
+ sys.stdin.flush()
+ sys.stdout.flush()
+ sys.stderr.flush()
+ sys.stdin = sys.__stdin__
+ sys.stdout = sys.__stdout__
+ sys.stderr = sys.__stderr__
+
+# PY3 # def enable(*, stdin=Ellipsis, stdout=Ellipsis, stderr=Ellipsis):
+def enable(stdin=Ellipsis, stdout=Ellipsis, stderr=Ellipsis):
+ if not WINDOWS:
+ return
+
+ # defaults
+ if PY2:
+ if stdin is Ellipsis:
+ stdin = stdin_text_fileobj
+ if stdout is Ellipsis:
+ stdout = stdout_text_str
+ if stderr is Ellipsis:
+ stderr = stderr_text_str
+ else: # transcoding because Python tokenizer cannot handle UTF-16
+ if stdin is Ellipsis:
+ stdin = stdin_text_transcoded
+ if stdout is Ellipsis:
+ stdout = stdout_text_transcoded
+ if stderr is Ellipsis:
+ stderr = stderr_text_transcoded
+
+ if stdin is not None and STDIN.should_be_fixed():
+ sys.stdin = stdin
+ if stdout is not None and STDOUT.should_be_fixed():
+ sys.stdout.flush()
+ sys.stdout = stdout
+ if stderr is not None and STDERR.should_be_fixed():
+ sys.stderr.flush()
+ sys.stderr = stderr
+
+# PY3 # def enable_only(*, stdin=None, stdout=None, stderr=None):
+def enable_only(stdin=None, stdout=None, stderr=None):
+ enable(stdin=stdin, stdout=stdout, stderr=stderr)
diff --git a/contrib/deprecated/python/win-unicode-console/win_unicode_console/tokenize_open.py b/contrib/deprecated/python/win-unicode-console/win_unicode_console/tokenize_open.py
new file mode 100644
index 0000000000..aa583dfa5f
--- /dev/null
+++ b/contrib/deprecated/python/win-unicode-console/win_unicode_console/tokenize_open.py
@@ -0,0 +1,162 @@
+"""Backport of tokenize.open from Python 3.5
+
+This is the exact Python 3.5 with the following differences:
+ - detect_encoding_ex is detect_encoding from Python 3.5 returning also a bool whether a cookie was found
+ - detect_encoding calls detect_encoding_ex, so that its signature is the same as in Python 3.5
+ - function read_source_lines was added
+"""
+
+from codecs import lookup, BOM_UTF8
+from io import TextIOWrapper, open as _builtin_open
+import re
+
+re_ASCII = 256 # not present in Python 2
+cookie_re = re.compile(r'^[ \t\f]*#.*?coding[:=][ \t]*([-\w.]+)', re_ASCII)
+blank_re = re.compile(br'^[ \t\f]*(?:[#\r\n]|$)', re_ASCII)
+
+
+def _get_normal_name(orig_enc):
+ """Imitates get_normal_name in tokenizer.c."""
+ # Only care about the first 12 characters.
+ enc = orig_enc[:12].lower().replace("_", "-")
+ if enc == "utf-8" or enc.startswith("utf-8-"):
+ return "utf-8"
+ if enc in ("latin-1", "iso-8859-1", "iso-latin-1") or \
+ enc.startswith(("latin-1-", "iso-8859-1-", "iso-latin-1-")):
+ return "iso-8859-1"
+ return orig_enc
+
+
+def detect_encoding(readline):
+ """
+ The detect_encoding() function is used to detect the encoding that should
+ be used to decode a Python source file. It requires one argument, readline,
+ in the same way as the tokenize() generator.
+
+ It will call readline a maximum of twice, and return the encoding used
+ (as a string) and a list of any lines (left as bytes) it has read in.
+
+ It detects the encoding from the presence of a utf-8 bom or an encoding
+ cookie as specified in pep-0263. If both a bom and a cookie are present,
+ but disagree, a SyntaxError will be raised. If the encoding cookie is an
+ invalid charset, raise a SyntaxError. Note that if a utf-8 bom is found,
+ 'utf-8-sig' is returned.
+
+ If no encoding is specified, then the default of 'utf-8' will be returned.
+ """
+
+ return detect_encoding_ex(readline)[:2]
+
+
+def detect_encoding_ex(readline):
+ try:
+ filename = readline.__self__.name
+ except AttributeError:
+ filename = None
+ bom_found = False
+ encoding = None
+ default = 'utf-8'
+ def read_or_stop():
+ try:
+ return readline()
+ except StopIteration:
+ return b''
+
+ def find_cookie(line):
+ try:
+ # Decode as UTF-8. Either the line is an encoding declaration,
+ # in which case it should be pure ASCII, or it must be UTF-8
+ # per default encoding.
+ line_string = line.decode('utf-8')
+ except UnicodeDecodeError:
+ msg = "invalid or missing encoding declaration"
+ if filename is not None:
+ msg = '{} for {!r}'.format(msg, filename)
+ raise SyntaxError(msg)
+
+ match = cookie_re.match(line_string)
+ if not match:
+ return None
+ encoding = _get_normal_name(match.group(1))
+ try:
+ codec = lookup(encoding)
+ except LookupError:
+ # This behaviour mimics the Python interpreter
+ if filename is None:
+ msg = "unknown encoding: " + encoding
+ else:
+ msg = "unknown encoding for {!r}: {}".format(filename,
+ encoding)
+ raise SyntaxError(msg)
+
+ if bom_found:
+ if encoding != 'utf-8':
+ # This behaviour mimics the Python interpreter
+ if filename is None:
+ msg = 'encoding problem: utf-8'
+ else:
+ msg = 'encoding problem for {!r}: utf-8'.format(filename)
+ raise SyntaxError(msg)
+ encoding += '-sig'
+ return encoding
+
+ first = read_or_stop()
+ if first.startswith(BOM_UTF8):
+ bom_found = True
+ first = first[3:]
+ default = 'utf-8-sig'
+ if not first:
+ return default, [], False
+
+ encoding = find_cookie(first)
+ if encoding:
+ return encoding, [first], True
+ if not blank_re.match(first):
+ return default, [first], False
+
+ second = read_or_stop()
+ if not second:
+ return default, [first], False
+
+ encoding = find_cookie(second)
+ if encoding:
+ return encoding, [first, second], True
+
+ return default, [first, second], False
+
+
+def open(filename):
+ """Open a file in read only mode using the encoding detected by
+ detect_encoding().
+ """
+ buffer = _builtin_open(filename, 'rb')
+ try:
+ encoding, lines = detect_encoding(buffer.readline)
+ buffer.seek(0)
+ text = TextIOWrapper(buffer, encoding, line_buffering=True)
+ text.mode = 'r'
+ return text
+ except:
+ buffer.close()
+ raise
+
+def read_source_lines(filename):
+ buffer = _builtin_open(filename, 'rb')
+ try:
+ encoding, lines, cookie_present = detect_encoding_ex(buffer.readline)
+ buffer.seek(0)
+ text = TextIOWrapper(buffer, encoding, line_buffering=True)
+ text.mode = 'r'
+ except:
+ buffer.close()
+ raise
+
+ with text:
+ if cookie_present:
+ for i in lines:
+ yield text.readline().replace("coding", "Coding")
+ # so compile() won't complain about encoding declatation in a Unicode string
+ # see 2.7/Python/ast.c:228
+
+ for line in text:
+ yield line
diff --git a/contrib/deprecated/python/win-unicode-console/win_unicode_console/unicode_argv.py b/contrib/deprecated/python/win-unicode-console/win_unicode_console/unicode_argv.py
new file mode 100644
index 0000000000..d23bc05f12
--- /dev/null
+++ b/contrib/deprecated/python/win-unicode-console/win_unicode_console/unicode_argv.py
@@ -0,0 +1,79 @@
+"""Get Unicode argv strings in Python 2 on Windows
+
+get_full_unicode_argv based on
+http://code.activestate.com/recipes/572200/
+
+argv_setter_hook based on
+https://mail.python.org/pipermail/python-list/2016-June/710183.html
+"""
+
+import sys
+from ctypes import WinDLL, c_int, POINTER, byref
+from ctypes.wintypes import LPCWSTR, LPWSTR
+
+kernel32 = WinDLL("kernel32", use_last_error=True)
+shell32 = WinDLL("shell32", use_last_error=True)
+
+GetCommandLineW = kernel32.GetCommandLineW
+GetCommandLineW.argtypes = ()
+GetCommandLineW.restype = LPCWSTR
+
+CommandLineToArgvW = shell32.CommandLineToArgvW
+CommandLineToArgvW.argtypes = (LPCWSTR, POINTER(c_int))
+CommandLineToArgvW.restype = POINTER(LPWSTR)
+
+LocalFree = kernel32.LocalFree
+
+
+def get_full_unicode_argv():
+ cmd = GetCommandLineW()
+ argc = c_int(0)
+ argv = CommandLineToArgvW(cmd, byref(argc))
+ py_argv = [arg for i, arg in zip(range(argc.value), argv)]
+ LocalFree(argv)
+ return py_argv
+
+def get_unicode_argv():
+ if original_argv == [""]:
+ return [u""]
+
+ new_argv = get_full_unicode_argv()[-len(original_argv):]
+
+ if original_argv[0] == "-c":
+ new_argv[0] = u"-c"
+
+ return new_argv
+
+
+original_argv = None
+
+def argv_setter_hook(path):
+ global original_argv
+
+ if original_argv is not None: # already got it
+ raise ImportError
+
+ try:
+ original_argv = sys.argv
+ except AttributeError:
+ pass
+ else:
+ enable()
+ finally:
+ raise ImportError
+
+def enable():
+ global original_argv
+
+ if original_argv is None:
+ try:
+ original_argv = sys.argv
+ except AttributeError: # in sitecustomize in Python 2
+ sys.path_hooks[:0] = [argv_setter_hook]
+ return
+
+ sys.argv = get_unicode_argv()
+
+def disable():
+ if original_argv is not None:
+ sys.argv = original_argv
diff --git a/contrib/deprecated/python/win-unicode-console/ya.make b/contrib/deprecated/python/win-unicode-console/ya.make
new file mode 100644
index 0000000000..b56f61a378
--- /dev/null
+++ b/contrib/deprecated/python/win-unicode-console/ya.make
@@ -0,0 +1,40 @@
+# Generated by devtools/yamaker (pypi).
+
+PY2_LIBRARY()
+
+VERSION(0.5)
+
+LICENSE(MIT)
+
+PEERDIR(
+ library/python/symbols/win_unicode_console
+)
+
+NO_LINT()
+
+NO_CHECK_IMPORTS(
+ win_unicode_console.runner
+)
+
+PY_SRCS(
+ TOP_LEVEL
+ win_unicode_console/__init__.py
+ win_unicode_console/buffer.py
+ win_unicode_console/console.py
+ win_unicode_console/file_object.py
+ win_unicode_console/info.py
+ win_unicode_console/raw_input.py
+ win_unicode_console/readline_hook.py
+ win_unicode_console/runner.py
+ win_unicode_console/streams.py
+ win_unicode_console/tokenize_open.py
+ win_unicode_console/unicode_argv.py
+)
+
+RESOURCE_FILES(
+ PREFIX contrib/deprecated/python/win-unicode-console/
+ .dist-info/METADATA
+ .dist-info/top_level.txt
+)
+
+END()