aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/tools/python3/src/Lib
diff options
context:
space:
mode:
authorshadchin <shadchin@yandex-team.ru>2022-02-10 16:44:30 +0300
committerDaniil Cherednik <dcherednik@yandex-team.ru>2022-02-10 16:44:30 +0300
commit2598ef1d0aee359b4b6d5fdd1758916d5907d04f (patch)
tree012bb94d777798f1f56ac1cec429509766d05181 /contrib/tools/python3/src/Lib
parent6751af0b0c1b952fede40b19b71da8025b5d8bcf (diff)
downloadydb-2598ef1d0aee359b4b6d5fdd1758916d5907d04f.tar.gz
Restoring authorship annotation for <shadchin@yandex-team.ru>. Commit 1 of 2.
Diffstat (limited to 'contrib/tools/python3/src/Lib')
-rw-r--r--contrib/tools/python3/src/Lib/__future__.py32
-rw-r--r--contrib/tools/python3/src/Lib/_aix_support.py182
-rw-r--r--contrib/tools/python3/src/Lib/_bootsubprocess.py194
-rw-r--r--contrib/tools/python3/src/Lib/_collections_abc.py248
-rw-r--r--contrib/tools/python3/src/Lib/_osx_support.py194
-rw-r--r--contrib/tools/python3/src/Lib/_py_abc.py2
-rw-r--r--contrib/tools/python3/src/Lib/_pydecimal.py12
-rw-r--r--contrib/tools/python3/src/Lib/_pyio.py198
-rw-r--r--contrib/tools/python3/src/Lib/_strptime.py4
-rw-r--r--contrib/tools/python3/src/Lib/_threading_local.py4
-rw-r--r--contrib/tools/python3/src/Lib/_weakrefset.py26
-rw-r--r--contrib/tools/python3/src/Lib/abc.py52
-rw-r--r--contrib/tools/python3/src/Lib/aifc.py2
-rw-r--r--contrib/tools/python3/src/Lib/antigravity.py2
-rw-r--r--contrib/tools/python3/src/Lib/argparse.py298
-rw-r--r--contrib/tools/python3/src/Lib/ast.py2636
-rw-r--r--contrib/tools/python3/src/Lib/asynchat.py2
-rw-r--r--contrib/tools/python3/src/Lib/asyncio/__init__.py8
-rw-r--r--contrib/tools/python3/src/Lib/asyncio/__main__.py250
-rw-r--r--contrib/tools/python3/src/Lib/asyncio/base_events.py454
-rw-r--r--contrib/tools/python3/src/Lib/asyncio/base_futures.py44
-rw-r--r--contrib/tools/python3/src/Lib/asyncio/base_subprocess.py10
-rw-r--r--contrib/tools/python3/src/Lib/asyncio/base_tasks.py28
-rw-r--r--contrib/tools/python3/src/Lib/asyncio/coroutines.py8
-rw-r--r--contrib/tools/python3/src/Lib/asyncio/events.py70
-rw-r--r--contrib/tools/python3/src/Lib/asyncio/exceptions.py116
-rw-r--r--contrib/tools/python3/src/Lib/asyncio/futures.py102
-rw-r--r--contrib/tools/python3/src/Lib/asyncio/locks.py82
-rw-r--r--contrib/tools/python3/src/Lib/asyncio/proactor_events.py398
-rw-r--r--contrib/tools/python3/src/Lib/asyncio/protocols.py20
-rw-r--r--contrib/tools/python3/src/Lib/asyncio/queues.py16
-rw-r--r--contrib/tools/python3/src/Lib/asyncio/runners.py12
-rw-r--r--contrib/tools/python3/src/Lib/asyncio/selector_events.py332
-rw-r--r--contrib/tools/python3/src/Lib/asyncio/sslproto.py38
-rw-r--r--contrib/tools/python3/src/Lib/asyncio/staggered.py298
-rw-r--r--contrib/tools/python3/src/Lib/asyncio/streams.py192
-rw-r--r--contrib/tools/python3/src/Lib/asyncio/subprocess.py50
-rw-r--r--contrib/tools/python3/src/Lib/asyncio/tasks.py408
-rw-r--r--contrib/tools/python3/src/Lib/asyncio/threads.py50
-rw-r--r--contrib/tools/python3/src/Lib/asyncio/transports.py44
-rw-r--r--contrib/tools/python3/src/Lib/asyncio/trsock.py412
-rw-r--r--contrib/tools/python3/src/Lib/asyncio/unix_events.py778
-rw-r--r--contrib/tools/python3/src/Lib/asyncio/windows_events.py166
-rw-r--r--contrib/tools/python3/src/Lib/asyncio/windows_utils.py4
-rw-r--r--contrib/tools/python3/src/Lib/asyncore.py4
-rw-r--r--contrib/tools/python3/src/Lib/base64.py6
-rw-r--r--contrib/tools/python3/src/Lib/bdb.py14
-rw-r--r--contrib/tools/python3/src/Lib/binhex.py70
-rw-r--r--contrib/tools/python3/src/Lib/bisect.py8
-rw-r--r--contrib/tools/python3/src/Lib/bz2.py26
-rw-r--r--contrib/tools/python3/src/Lib/cProfile.py38
-rw-r--r--contrib/tools/python3/src/Lib/calendar.py12
-rw-r--r--contrib/tools/python3/src/Lib/cgi.py52
-rw-r--r--contrib/tools/python3/src/Lib/cgitb.py12
-rw-r--r--contrib/tools/python3/src/Lib/code.py2
-rw-r--r--contrib/tools/python3/src/Lib/codecs.py44
-rw-r--r--contrib/tools/python3/src/Lib/codeop.py54
-rw-r--r--contrib/tools/python3/src/Lib/collections/__init__.py796
-rw-r--r--contrib/tools/python3/src/Lib/collections/abc.py2
-rw-r--r--contrib/tools/python3/src/Lib/compileall.py390
-rw-r--r--contrib/tools/python3/src/Lib/concurrent/futures/__init__.py2
-rw-r--r--contrib/tools/python3/src/Lib/concurrent/futures/_base.py100
-rw-r--r--contrib/tools/python3/src/Lib/concurrent/futures/process.py644
-rw-r--r--contrib/tools/python3/src/Lib/concurrent/futures/thread.py114
-rw-r--r--contrib/tools/python3/src/Lib/configparser.py22
-rw-r--r--contrib/tools/python3/src/Lib/contextlib.py134
-rw-r--r--contrib/tools/python3/src/Lib/copy.py26
-rw-r--r--contrib/tools/python3/src/Lib/copyreg.py26
-rw-r--r--contrib/tools/python3/src/Lib/crypt.py38
-rw-r--r--contrib/tools/python3/src/Lib/csv.py4
-rw-r--r--contrib/tools/python3/src/Lib/ctypes/__init__.py54
-rw-r--r--contrib/tools/python3/src/Lib/ctypes/_aix.py6
-rw-r--r--contrib/tools/python3/src/Lib/ctypes/macholib/dyld.py28
-rw-r--r--contrib/tools/python3/src/Lib/ctypes/util.py50
-rw-r--r--contrib/tools/python3/src/Lib/curses/__init__.py2
-rw-r--r--contrib/tools/python3/src/Lib/dataclasses.py178
-rw-r--r--contrib/tools/python3/src/Lib/datetime.py268
-rw-r--r--contrib/tools/python3/src/Lib/dbm/__init__.py4
-rw-r--r--contrib/tools/python3/src/Lib/dbm/dumb.py16
-rw-r--r--contrib/tools/python3/src/Lib/difflib.py50
-rw-r--r--contrib/tools/python3/src/Lib/dis.py44
-rw-r--r--contrib/tools/python3/src/Lib/distutils/_msvccompiler.py32
-rw-r--r--contrib/tools/python3/src/Lib/distutils/archive_util.py22
-rw-r--r--contrib/tools/python3/src/Lib/distutils/bcppcompiler.py4
-rw-r--r--contrib/tools/python3/src/Lib/distutils/ccompiler.py10
-rw-r--r--contrib/tools/python3/src/Lib/distutils/command/bdist_msi.py42
-rw-r--r--contrib/tools/python3/src/Lib/distutils/command/bdist_rpm.py6
-rw-r--r--contrib/tools/python3/src/Lib/distutils/command/bdist_wininst.py98
-rw-r--r--contrib/tools/python3/src/Lib/distutils/command/build.py2
-rw-r--r--contrib/tools/python3/src/Lib/distutils/command/build_ext.py78
-rw-r--r--contrib/tools/python3/src/Lib/distutils/command/build_py.py6
-rw-r--r--contrib/tools/python3/src/Lib/distutils/command/check.py20
-rw-r--r--contrib/tools/python3/src/Lib/distutils/command/config.py34
-rw-r--r--contrib/tools/python3/src/Lib/distutils/command/install.py8
-rw-r--r--contrib/tools/python3/src/Lib/distutils/command/sdist.py14
-rw-r--r--contrib/tools/python3/src/Lib/distutils/command/upload.py56
-rw-r--r--contrib/tools/python3/src/Lib/distutils/msvc9compiler.py4
-rw-r--r--contrib/tools/python3/src/Lib/distutils/msvccompiler.py2
-rw-r--r--contrib/tools/python3/src/Lib/distutils/spawn.py132
-rw-r--r--contrib/tools/python3/src/Lib/distutils/sysconfig.py38
-rw-r--r--contrib/tools/python3/src/Lib/distutils/unixccompiler.py8
-rw-r--r--contrib/tools/python3/src/Lib/distutils/util.py70
-rw-r--r--contrib/tools/python3/src/Lib/distutils/version.py8
-rw-r--r--contrib/tools/python3/src/Lib/doctest.py28
-rw-r--r--contrib/tools/python3/src/Lib/email/_header_value_parser.py454
-rw-r--r--contrib/tools/python3/src/Lib/email/_parseaddr.py28
-rw-r--r--contrib/tools/python3/src/Lib/email/charset.py2
-rw-r--r--contrib/tools/python3/src/Lib/email/contentmanager.py18
-rw-r--r--contrib/tools/python3/src/Lib/email/feedparser.py2
-rw-r--r--contrib/tools/python3/src/Lib/email/generator.py10
-rw-r--r--contrib/tools/python3/src/Lib/email/header.py2
-rw-r--r--contrib/tools/python3/src/Lib/email/headerregistry.py62
-rw-r--r--contrib/tools/python3/src/Lib/email/message.py32
-rw-r--r--contrib/tools/python3/src/Lib/email/policy.py4
-rw-r--r--contrib/tools/python3/src/Lib/email/utils.py10
-rw-r--r--contrib/tools/python3/src/Lib/encodings/__init__.py4
-rw-r--r--contrib/tools/python3/src/Lib/encodings/aliases.py8
-rw-r--r--contrib/tools/python3/src/Lib/encodings/punycode.py2
-rw-r--r--contrib/tools/python3/src/Lib/encodings/raw_unicode_escape.py10
-rw-r--r--contrib/tools/python3/src/Lib/encodings/unicode_escape.py10
-rw-r--r--contrib/tools/python3/src/Lib/encodings/uu_codec.py8
-rw-r--r--contrib/tools/python3/src/Lib/ensurepip/__init__.py60
-rw-r--r--contrib/tools/python3/src/Lib/enum.py592
-rw-r--r--contrib/tools/python3/src/Lib/filecmp.py18
-rw-r--r--contrib/tools/python3/src/Lib/fileinput.py42
-rw-r--r--contrib/tools/python3/src/Lib/fnmatch.py126
-rw-r--r--contrib/tools/python3/src/Lib/fractions.py116
-rw-r--r--contrib/tools/python3/src/Lib/ftplib.py70
-rw-r--r--contrib/tools/python3/src/Lib/functools.py450
-rw-r--r--contrib/tools/python3/src/Lib/genericpath.py20
-rw-r--r--contrib/tools/python3/src/Lib/getpass.py4
-rw-r--r--contrib/tools/python3/src/Lib/gettext.py334
-rw-r--r--contrib/tools/python3/src/Lib/glob.py18
-rw-r--r--contrib/tools/python3/src/Lib/graphlib.py492
-rw-r--r--contrib/tools/python3/src/Lib/gzip.py130
-rw-r--r--contrib/tools/python3/src/Lib/hashlib.py54
-rw-r--r--contrib/tools/python3/src/Lib/heapq.py8
-rw-r--r--contrib/tools/python3/src/Lib/hmac.py132
-rw-r--r--contrib/tools/python3/src/Lib/html/parser.py4
-rw-r--r--contrib/tools/python3/src/Lib/http/__init__.py26
-rw-r--r--contrib/tools/python3/src/Lib/http/client.py272
-rw-r--r--contrib/tools/python3/src/Lib/http/cookiejar.py36
-rw-r--r--contrib/tools/python3/src/Lib/http/cookies.py10
-rw-r--r--contrib/tools/python3/src/Lib/http/server.py130
-rw-r--r--contrib/tools/python3/src/Lib/imaplib.py122
-rw-r--r--contrib/tools/python3/src/Lib/imghdr.py2
-rw-r--r--contrib/tools/python3/src/Lib/importlib/__init__.py4
-rw-r--r--contrib/tools/python3/src/Lib/importlib/_bootstrap.py236
-rw-r--r--contrib/tools/python3/src/Lib/importlib/_bootstrap_external.py432
-rw-r--r--contrib/tools/python3/src/Lib/importlib/_common.py124
-rw-r--r--contrib/tools/python3/src/Lib/importlib/abc.py174
-rw-r--r--contrib/tools/python3/src/Lib/importlib/metadata.py1190
-rw-r--r--contrib/tools/python3/src/Lib/importlib/resources.py122
-rw-r--r--contrib/tools/python3/src/Lib/importlib/util.py4
-rw-r--r--contrib/tools/python3/src/Lib/inspect.py346
-rw-r--r--contrib/tools/python3/src/Lib/io.py6
-rw-r--r--contrib/tools/python3/src/Lib/ipaddress.py350
-rw-r--r--contrib/tools/python3/src/Lib/json/__init__.py6
-rw-r--r--contrib/tools/python3/src/Lib/json/encoder.py8
-rw-r--r--contrib/tools/python3/src/Lib/json/tool.py106
-rw-r--r--contrib/tools/python3/src/Lib/keyword.py106
-rw-r--r--contrib/tools/python3/src/Lib/lib2to3/__init__.py16
-rw-r--r--contrib/tools/python3/src/Lib/lib2to3/fixer_util.py2
-rw-r--r--contrib/tools/python3/src/Lib/lib2to3/fixes/fix_apply.py2
-rw-r--r--contrib/tools/python3/src/Lib/lib2to3/fixes/fix_filter.py14
-rw-r--r--contrib/tools/python3/src/Lib/lib2to3/fixes/fix_intern.py2
-rw-r--r--contrib/tools/python3/src/Lib/lib2to3/fixes/fix_metaclass.py4
-rw-r--r--contrib/tools/python3/src/Lib/lib2to3/fixes/fix_paren.py2
-rw-r--r--contrib/tools/python3/src/Lib/lib2to3/fixes/fix_reload.py2
-rw-r--r--contrib/tools/python3/src/Lib/lib2to3/fixes/fix_urllib.py2
-rw-r--r--contrib/tools/python3/src/Lib/lib2to3/main.py14
-rw-r--r--contrib/tools/python3/src/Lib/lib2to3/pgen2/grammar.py6
-rw-r--r--contrib/tools/python3/src/Lib/lib2to3/pgen2/parse.py6
-rw-r--r--contrib/tools/python3/src/Lib/lib2to3/pgen2/token.py4
-rw-r--r--contrib/tools/python3/src/Lib/lib2to3/pgen2/tokenize.py14
-rw-r--r--contrib/tools/python3/src/Lib/lib2to3/pygram.py6
-rw-r--r--contrib/tools/python3/src/Lib/lib2to3/refactor.py24
-rw-r--r--contrib/tools/python3/src/Lib/linecache.py44
-rw-r--r--contrib/tools/python3/src/Lib/locale.py12
-rw-r--r--contrib/tools/python3/src/Lib/logging/__init__.py360
-rw-r--r--contrib/tools/python3/src/Lib/logging/config.py34
-rw-r--r--contrib/tools/python3/src/Lib/logging/handlers.py226
-rw-r--r--contrib/tools/python3/src/Lib/lzma.py24
-rw-r--r--contrib/tools/python3/src/Lib/mailbox.py12
-rw-r--r--contrib/tools/python3/src/Lib/mailcap.py2
-rw-r--r--contrib/tools/python3/src/Lib/mimetypes.py286
-rw-r--r--contrib/tools/python3/src/Lib/modulefinder.py158
-rw-r--r--contrib/tools/python3/src/Lib/msilib/__init__.py4
-rw-r--r--contrib/tools/python3/src/Lib/msilib/schema.py2
-rw-r--r--contrib/tools/python3/src/Lib/multiprocessing/__init__.py4
-rw-r--r--contrib/tools/python3/src/Lib/multiprocessing/connection.py56
-rw-r--r--contrib/tools/python3/src/Lib/multiprocessing/context.py24
-rw-r--r--contrib/tools/python3/src/Lib/multiprocessing/dummy/__init__.py2
-rw-r--r--contrib/tools/python3/src/Lib/multiprocessing/forkserver.py60
-rw-r--r--contrib/tools/python3/src/Lib/multiprocessing/heap.py182
-rw-r--r--contrib/tools/python3/src/Lib/multiprocessing/managers.py358
-rw-r--r--contrib/tools/python3/src/Lib/multiprocessing/pool.py398
-rw-r--r--contrib/tools/python3/src/Lib/multiprocessing/popen_fork.py16
-rw-r--r--contrib/tools/python3/src/Lib/multiprocessing/popen_forkserver.py10
-rw-r--r--contrib/tools/python3/src/Lib/multiprocessing/popen_spawn_posix.py18
-rw-r--r--contrib/tools/python3/src/Lib/multiprocessing/popen_spawn_win32.py18
-rw-r--r--contrib/tools/python3/src/Lib/multiprocessing/process.py144
-rw-r--r--contrib/tools/python3/src/Lib/multiprocessing/queues.py44
-rw-r--r--contrib/tools/python3/src/Lib/multiprocessing/reduction.py14
-rw-r--r--contrib/tools/python3/src/Lib/multiprocessing/resource_sharer.py6
-rw-r--r--contrib/tools/python3/src/Lib/multiprocessing/resource_tracker.py462
-rw-r--r--contrib/tools/python3/src/Lib/multiprocessing/shared_memory.py1064
-rw-r--r--contrib/tools/python3/src/Lib/multiprocessing/spawn.py34
-rw-r--r--contrib/tools/python3/src/Lib/multiprocessing/synchronize.py10
-rw-r--r--contrib/tools/python3/src/Lib/multiprocessing/util.py146
-rw-r--r--contrib/tools/python3/src/Lib/nntplib.py108
-rw-r--r--contrib/tools/python3/src/Lib/ntpath.py320
-rw-r--r--contrib/tools/python3/src/Lib/nturl2path.py18
-rw-r--r--contrib/tools/python3/src/Lib/numbers.py12
-rw-r--r--contrib/tools/python3/src/Lib/opcode.py40
-rw-r--r--contrib/tools/python3/src/Lib/operator.py12
-rw-r--r--contrib/tools/python3/src/Lib/optparse.py2
-rw-r--r--contrib/tools/python3/src/Lib/os.py176
-rw-r--r--contrib/tools/python3/src/Lib/pathlib.py438
-rw-r--r--contrib/tools/python3/src/Lib/pdb.py136
-rw-r--r--contrib/tools/python3/src/Lib/pickle.py452
-rw-r--r--contrib/tools/python3/src/Lib/pickletools.py158
-rw-r--r--contrib/tools/python3/src/Lib/pkgutil.py142
-rw-r--r--contrib/tools/python3/src/Lib/platform.py426
-rw-r--r--contrib/tools/python3/src/Lib/plistlib.py174
-rw-r--r--contrib/tools/python3/src/Lib/poplib.py12
-rw-r--r--contrib/tools/python3/src/Lib/posixpath.py18
-rw-r--r--contrib/tools/python3/src/Lib/pprint.py118
-rw-r--r--contrib/tools/python3/src/Lib/profile.py66
-rw-r--r--contrib/tools/python3/src/Lib/pstats.py122
-rw-r--r--contrib/tools/python3/src/Lib/pty.py4
-rw-r--r--contrib/tools/python3/src/Lib/py_compile.py24
-rw-r--r--contrib/tools/python3/src/Lib/pyclbr.py12
-rw-r--r--contrib/tools/python3/src/Lib/pydoc.py350
-rw-r--r--contrib/tools/python3/src/Lib/pydoc_data/topics.py4518
-rw-r--r--contrib/tools/python3/src/Lib/queue.py12
-rw-r--r--contrib/tools/python3/src/Lib/quopri.py8
-rw-r--r--contrib/tools/python3/src/Lib/random.py750
-rw-r--r--contrib/tools/python3/src/Lib/re.py74
-rw-r--r--contrib/tools/python3/src/Lib/rlcompleter.py26
-rw-r--r--contrib/tools/python3/src/Lib/runpy.py16
-rw-r--r--contrib/tools/python3/src/Lib/secrets.py2
-rw-r--r--contrib/tools/python3/src/Lib/selectors.py64
-rw-r--r--contrib/tools/python3/src/Lib/shlex.py36
-rw-r--r--contrib/tools/python3/src/Lib/shutil.py802
-rw-r--r--contrib/tools/python3/src/Lib/signal.py42
-rw-r--r--contrib/tools/python3/src/Lib/site.py38
-rw-r--r--contrib/tools/python3/src/Lib/smtpd.py4
-rw-r--r--contrib/tools/python3/src/Lib/smtplib.py84
-rw-r--r--contrib/tools/python3/src/Lib/sndhdr.py2
-rw-r--r--contrib/tools/python3/src/Lib/socket.py440
-rw-r--r--contrib/tools/python3/src/Lib/socketserver.py82
-rw-r--r--contrib/tools/python3/src/Lib/sqlite3/__init__.py68
-rw-r--r--contrib/tools/python3/src/Lib/sre_compile.py20
-rw-r--r--contrib/tools/python3/src/Lib/sre_constants.py2
-rw-r--r--contrib/tools/python3/src/Lib/sre_parse.py104
-rw-r--r--contrib/tools/python3/src/Lib/ssl.py440
-rw-r--r--contrib/tools/python3/src/Lib/stat.py34
-rw-r--r--contrib/tools/python3/src/Lib/statistics.py1054
-rw-r--r--contrib/tools/python3/src/Lib/string.py62
-rw-r--r--contrib/tools/python3/src/Lib/subprocess.py942
-rw-r--r--contrib/tools/python3/src/Lib/sunau.py2
-rw-r--r--contrib/tools/python3/src/Lib/symbol.py116
-rw-r--r--contrib/tools/python3/src/Lib/symtable.py54
-rw-r--r--contrib/tools/python3/src/Lib/sysconfig.py60
-rw-r--r--contrib/tools/python3/src/Lib/tarfile.py218
-rw-r--r--contrib/tools/python3/src/Lib/telnetlib.py4
-rw-r--r--contrib/tools/python3/src/Lib/tempfile.py146
-rw-r--r--contrib/tools/python3/src/Lib/textwrap.py6
-rw-r--r--contrib/tools/python3/src/Lib/threading.py622
-rw-r--r--contrib/tools/python3/src/Lib/timeit.py4
-rw-r--r--contrib/tools/python3/src/Lib/token.py126
-rw-r--r--contrib/tools/python3/src/Lib/tokenize.py44
-rw-r--r--contrib/tools/python3/src/Lib/trace.py76
-rw-r--r--contrib/tools/python3/src/Lib/traceback.py150
-rw-r--r--contrib/tools/python3/src/Lib/tracemalloc.py68
-rw-r--r--contrib/tools/python3/src/Lib/turtle.py34
-rw-r--r--contrib/tools/python3/src/Lib/types.py26
-rw-r--r--contrib/tools/python3/src/Lib/typing.py2336
-rw-r--r--contrib/tools/python3/src/Lib/unittest/__init__.py42
-rw-r--r--contrib/tools/python3/src/Lib/unittest/_log.py138
-rw-r--r--contrib/tools/python3/src/Lib/unittest/async_case.py336
-rw-r--r--contrib/tools/python3/src/Lib/unittest/case.py276
-rw-r--r--contrib/tools/python3/src/Lib/unittest/mock.py1406
-rw-r--r--contrib/tools/python3/src/Lib/unittest/result.py2
-rw-r--r--contrib/tools/python3/src/Lib/unittest/runner.py18
-rw-r--r--contrib/tools/python3/src/Lib/unittest/suite.py196
-rw-r--r--contrib/tools/python3/src/Lib/urllib/parse.py394
-rw-r--r--contrib/tools/python3/src/Lib/urllib/request.py292
-rw-r--r--contrib/tools/python3/src/Lib/urllib/response.py8
-rw-r--r--contrib/tools/python3/src/Lib/urllib/robotparser.py40
-rw-r--r--contrib/tools/python3/src/Lib/uu.py14
-rw-r--r--contrib/tools/python3/src/Lib/uuid.py426
-rw-r--r--contrib/tools/python3/src/Lib/venv/__init__.py208
-rw-r--r--contrib/tools/python3/src/Lib/warnings.py30
-rw-r--r--contrib/tools/python3/src/Lib/wave.py16
-rw-r--r--contrib/tools/python3/src/Lib/weakref.py158
-rw-r--r--contrib/tools/python3/src/Lib/webbrowser.py58
-rw-r--r--contrib/tools/python3/src/Lib/wsgiref/handlers.py32
-rw-r--r--contrib/tools/python3/src/Lib/wsgiref/simple_server.py4
-rw-r--r--contrib/tools/python3/src/Lib/wsgiref/util.py20
-rw-r--r--contrib/tools/python3/src/Lib/wsgiref/validate.py4
-rw-r--r--contrib/tools/python3/src/Lib/xml/dom/expatbuilder.py8
-rw-r--r--contrib/tools/python3/src/Lib/xml/dom/minidom.py86
-rw-r--r--contrib/tools/python3/src/Lib/xml/dom/pulldom.py14
-rw-r--r--contrib/tools/python3/src/Lib/xml/etree/ElementInclude.py102
-rw-r--r--contrib/tools/python3/src/Lib/xml/etree/ElementPath.py206
-rw-r--r--contrib/tools/python3/src/Lib/xml/etree/ElementTree.py1050
-rw-r--r--contrib/tools/python3/src/Lib/xml/etree/__init__.py2
-rw-r--r--contrib/tools/python3/src/Lib/xml/sax/__init__.py10
-rw-r--r--contrib/tools/python3/src/Lib/xml/sax/expatreader.py14
-rw-r--r--contrib/tools/python3/src/Lib/xml/sax/saxutils.py6
-rw-r--r--contrib/tools/python3/src/Lib/xmlrpc/client.py82
-rw-r--r--contrib/tools/python3/src/Lib/xmlrpc/server.py8
-rw-r--r--contrib/tools/python3/src/Lib/zipfile.py800
-rw-r--r--contrib/tools/python3/src/Lib/zipimport.py1584
-rw-r--r--contrib/tools/python3/src/Lib/zoneinfo/__init__.py62
-rw-r--r--contrib/tools/python3/src/Lib/zoneinfo/_common.py330
-rw-r--r--contrib/tools/python3/src/Lib/zoneinfo/_tzpath.py350
-rw-r--r--contrib/tools/python3/src/Lib/zoneinfo/_zoneinfo.py1504
320 files changed, 25768 insertions, 25768 deletions
diff --git a/contrib/tools/python3/src/Lib/__future__.py b/contrib/tools/python3/src/Lib/__future__.py
index 0e7b555234..9327d3c464 100644
--- a/contrib/tools/python3/src/Lib/__future__.py
+++ b/contrib/tools/python3/src/Lib/__future__.py
@@ -66,20 +66,20 @@ __all__ = ["all_feature_names"] + all_feature_names
# code.h and used by compile.h, so that an editor search will find them here.
# However, they're not exported in __all__, because they don't really belong to
# this module.
-CO_NESTED = 0x0010 # nested_scopes
-CO_GENERATOR_ALLOWED = 0 # generators (obsolete, was 0x1000)
-CO_FUTURE_DIVISION = 0x20000 # division
-CO_FUTURE_ABSOLUTE_IMPORT = 0x40000 # perform absolute imports by default
-CO_FUTURE_WITH_STATEMENT = 0x80000 # with statement
-CO_FUTURE_PRINT_FUNCTION = 0x100000 # print function
-CO_FUTURE_UNICODE_LITERALS = 0x200000 # unicode string literals
-CO_FUTURE_BARRY_AS_BDFL = 0x400000
-CO_FUTURE_GENERATOR_STOP = 0x800000 # StopIteration becomes RuntimeError in generators
-CO_FUTURE_ANNOTATIONS = 0x1000000 # annotations become strings at runtime
-
-
+CO_NESTED = 0x0010 # nested_scopes
+CO_GENERATOR_ALLOWED = 0 # generators (obsolete, was 0x1000)
+CO_FUTURE_DIVISION = 0x20000 # division
+CO_FUTURE_ABSOLUTE_IMPORT = 0x40000 # perform absolute imports by default
+CO_FUTURE_WITH_STATEMENT = 0x80000 # with statement
+CO_FUTURE_PRINT_FUNCTION = 0x100000 # print function
+CO_FUTURE_UNICODE_LITERALS = 0x200000 # unicode string literals
+CO_FUTURE_BARRY_AS_BDFL = 0x400000
+CO_FUTURE_GENERATOR_STOP = 0x800000 # StopIteration becomes RuntimeError in generators
+CO_FUTURE_ANNOTATIONS = 0x1000000 # annotations become strings at runtime
+
+
class _Feature:
-
+
def __init__(self, optionalRelease, mandatoryRelease, compiler_flag):
self.optional = optionalRelease
self.mandatory = mandatoryRelease
@@ -105,7 +105,7 @@ class _Feature:
self.mandatory,
self.compiler_flag))
-
+
nested_scopes = _Feature((2, 1, 0, "beta", 1),
(2, 2, 0, "alpha", 0),
CO_NESTED)
@@ -135,7 +135,7 @@ unicode_literals = _Feature((2, 6, 0, "alpha", 2),
CO_FUTURE_UNICODE_LITERALS)
barry_as_FLUFL = _Feature((3, 1, 0, "alpha", 2),
- (4, 0, 0, "alpha", 0),
+ (4, 0, 0, "alpha", 0),
CO_FUTURE_BARRY_AS_BDFL)
generator_stop = _Feature((3, 5, 0, "beta", 1),
@@ -143,5 +143,5 @@ generator_stop = _Feature((3, 5, 0, "beta", 1),
CO_FUTURE_GENERATOR_STOP)
annotations = _Feature((3, 7, 0, "beta", 1),
- (3, 10, 0, "alpha", 0),
+ (3, 10, 0, "alpha", 0),
CO_FUTURE_ANNOTATIONS)
diff --git a/contrib/tools/python3/src/Lib/_aix_support.py b/contrib/tools/python3/src/Lib/_aix_support.py
index d27a1e8735..06b65a253b 100644
--- a/contrib/tools/python3/src/Lib/_aix_support.py
+++ b/contrib/tools/python3/src/Lib/_aix_support.py
@@ -1,91 +1,91 @@
-"""Shared AIX support functions."""
-
-import sys
-import sysconfig
-
-try:
- import subprocess
-except ImportError: # pragma: no cover
- # _aix_support is used in distutils by setup.py to build C extensions,
- # before subprocess dependencies like _posixsubprocess are available.
- import _bootsubprocess as subprocess
-
-
-def _aix_tag(vrtl, bd):
- # type: (List[int], int) -> str
- # Infer the ABI bitwidth from maxsize (assuming 64 bit as the default)
- _sz = 32 if sys.maxsize == (2**31-1) else 64
- _bd = bd if bd != 0 else 9988
- # vrtl[version, release, technology_level]
- return "aix-{:1x}{:1d}{:02d}-{:04d}-{}".format(vrtl[0], vrtl[1], vrtl[2], _bd, _sz)
-
-
-# extract version, release and technology level from a VRMF string
-def _aix_vrtl(vrmf):
- # type: (str) -> List[int]
- v, r, tl = vrmf.split(".")[:3]
- return [int(v[-1]), int(r), int(tl)]
-
-
-def _aix_bos_rte():
- # type: () -> Tuple[str, int]
- """
- Return a Tuple[str, int] e.g., ['7.1.4.34', 1806]
- The fileset bos.rte represents the current AIX run-time level. It's VRMF and
- builddate reflect the current ABI levels of the runtime environment.
- If no builddate is found give a value that will satisfy pep425 related queries
- """
- # All AIX systems to have lslpp installed in this location
- out = subprocess.check_output(["/usr/bin/lslpp", "-Lqc", "bos.rte"])
- out = out.decode("utf-8")
- out = out.strip().split(":") # type: ignore
- _bd = int(out[-1]) if out[-1] != '' else 9988
- return (str(out[2]), _bd)
-
-
-def aix_platform():
- # type: () -> str
- """
- AIX filesets are identified by four decimal values: V.R.M.F.
- V (version) and R (release) can be retreived using ``uname``
- Since 2007, starting with AIX 5.3 TL7, the M value has been
- included with the fileset bos.rte and represents the Technology
- Level (TL) of AIX. The F (Fix) value also increases, but is not
- relevant for comparing releases and binary compatibility.
- For binary compatibility the so-called builddate is needed.
- Again, the builddate of an AIX release is associated with bos.rte.
- AIX ABI compatibility is described as guaranteed at: https://www.ibm.com/\
- support/knowledgecenter/en/ssw_aix_72/install/binary_compatability.html
-
- For pep425 purposes the AIX platform tag becomes:
- "aix-{:1x}{:1d}{:02d}-{:04d}-{}".format(v, r, tl, builddate, bitsize)
- e.g., "aix-6107-1415-32" for AIX 6.1 TL7 bd 1415, 32-bit
- and, "aix-6107-1415-64" for AIX 6.1 TL7 bd 1415, 64-bit
- """
- vrmf, bd = _aix_bos_rte()
- return _aix_tag(_aix_vrtl(vrmf), bd)
-
-
-# extract vrtl from the BUILD_GNU_TYPE as an int
-def _aix_bgt():
- # type: () -> List[int]
- gnu_type = sysconfig.get_config_var("BUILD_GNU_TYPE")
- if not gnu_type:
- raise ValueError("BUILD_GNU_TYPE is not defined")
- return _aix_vrtl(vrmf=gnu_type)
-
-
-def aix_buildtag():
- # type: () -> str
- """
- Return the platform_tag of the system Python was built on.
- """
- # AIX_BUILDDATE is defined by configure with:
- # lslpp -Lcq bos.rte | awk -F: '{ print $NF }'
- build_date = sysconfig.get_config_var("AIX_BUILDDATE")
- try:
- build_date = int(build_date)
- except (ValueError, TypeError):
- raise ValueError(f"AIX_BUILDDATE is not defined or invalid: "
- f"{build_date!r}")
- return _aix_tag(_aix_bgt(), build_date)
+"""Shared AIX support functions."""
+
+import sys
+import sysconfig
+
+try:
+ import subprocess
+except ImportError: # pragma: no cover
+ # _aix_support is used in distutils by setup.py to build C extensions,
+ # before subprocess dependencies like _posixsubprocess are available.
+ import _bootsubprocess as subprocess
+
+
+def _aix_tag(vrtl, bd):
+ # type: (List[int], int) -> str
+ # Infer the ABI bitwidth from maxsize (assuming 64 bit as the default)
+ _sz = 32 if sys.maxsize == (2**31-1) else 64
+ _bd = bd if bd != 0 else 9988
+ # vrtl[version, release, technology_level]
+ return "aix-{:1x}{:1d}{:02d}-{:04d}-{}".format(vrtl[0], vrtl[1], vrtl[2], _bd, _sz)
+
+
+# extract version, release and technology level from a VRMF string
+def _aix_vrtl(vrmf):
+ # type: (str) -> List[int]
+ v, r, tl = vrmf.split(".")[:3]
+ return [int(v[-1]), int(r), int(tl)]
+
+
+def _aix_bos_rte():
+ # type: () -> Tuple[str, int]
+ """
+ Return a Tuple[str, int] e.g., ['7.1.4.34', 1806]
+ The fileset bos.rte represents the current AIX run-time level. It's VRMF and
+ builddate reflect the current ABI levels of the runtime environment.
+ If no builddate is found give a value that will satisfy pep425 related queries
+ """
+ # All AIX systems to have lslpp installed in this location
+ out = subprocess.check_output(["/usr/bin/lslpp", "-Lqc", "bos.rte"])
+ out = out.decode("utf-8")
+ out = out.strip().split(":") # type: ignore
+ _bd = int(out[-1]) if out[-1] != '' else 9988
+ return (str(out[2]), _bd)
+
+
+def aix_platform():
+ # type: () -> str
+ """
+ AIX filesets are identified by four decimal values: V.R.M.F.
+ V (version) and R (release) can be retreived using ``uname``
+ Since 2007, starting with AIX 5.3 TL7, the M value has been
+ included with the fileset bos.rte and represents the Technology
+ Level (TL) of AIX. The F (Fix) value also increases, but is not
+ relevant for comparing releases and binary compatibility.
+ For binary compatibility the so-called builddate is needed.
+ Again, the builddate of an AIX release is associated with bos.rte.
+ AIX ABI compatibility is described as guaranteed at: https://www.ibm.com/\
+ support/knowledgecenter/en/ssw_aix_72/install/binary_compatability.html
+
+ For pep425 purposes the AIX platform tag becomes:
+ "aix-{:1x}{:1d}{:02d}-{:04d}-{}".format(v, r, tl, builddate, bitsize)
+ e.g., "aix-6107-1415-32" for AIX 6.1 TL7 bd 1415, 32-bit
+ and, "aix-6107-1415-64" for AIX 6.1 TL7 bd 1415, 64-bit
+ """
+ vrmf, bd = _aix_bos_rte()
+ return _aix_tag(_aix_vrtl(vrmf), bd)
+
+
+# extract vrtl from the BUILD_GNU_TYPE as an int
+def _aix_bgt():
+ # type: () -> List[int]
+ gnu_type = sysconfig.get_config_var("BUILD_GNU_TYPE")
+ if not gnu_type:
+ raise ValueError("BUILD_GNU_TYPE is not defined")
+ return _aix_vrtl(vrmf=gnu_type)
+
+
+def aix_buildtag():
+ # type: () -> str
+ """
+ Return the platform_tag of the system Python was built on.
+ """
+ # AIX_BUILDDATE is defined by configure with:
+ # lslpp -Lcq bos.rte | awk -F: '{ print $NF }'
+ build_date = sysconfig.get_config_var("AIX_BUILDDATE")
+ try:
+ build_date = int(build_date)
+ except (ValueError, TypeError):
+ raise ValueError(f"AIX_BUILDDATE is not defined or invalid: "
+ f"{build_date!r}")
+ return _aix_tag(_aix_bgt(), build_date)
diff --git a/contrib/tools/python3/src/Lib/_bootsubprocess.py b/contrib/tools/python3/src/Lib/_bootsubprocess.py
index 014782f616..d692e2aee1 100644
--- a/contrib/tools/python3/src/Lib/_bootsubprocess.py
+++ b/contrib/tools/python3/src/Lib/_bootsubprocess.py
@@ -1,97 +1,97 @@
-"""
-Basic subprocess implementation for POSIX which only uses os functions. Only
-implement features required by setup.py to build C extension modules when
-subprocess is unavailable. setup.py is not used on Windows.
-"""
-import os
-
-
-# distutils.spawn used by distutils.command.build_ext
-# calls subprocess.Popen().wait()
-class Popen:
- def __init__(self, cmd, env=None):
- self._cmd = cmd
- self._env = env
- self.returncode = None
-
- def wait(self):
- pid = os.fork()
- if pid == 0:
- # Child process
- try:
- if self._env is not None:
- os.execve(self._cmd[0], self._cmd, self._env)
- else:
- os.execv(self._cmd[0], self._cmd)
- finally:
- os._exit(1)
- else:
- # Parent process
- _, status = os.waitpid(pid, 0)
- self.returncode = os.waitstatus_to_exitcode(status)
-
- return self.returncode
-
-
-def _check_cmd(cmd):
- # Use regex [a-zA-Z0-9./-]+: reject empty string, space, etc.
- safe_chars = []
- for first, last in (("a", "z"), ("A", "Z"), ("0", "9")):
- for ch in range(ord(first), ord(last) + 1):
- safe_chars.append(chr(ch))
- safe_chars.append("./-")
- safe_chars = ''.join(safe_chars)
-
- if isinstance(cmd, (tuple, list)):
- check_strs = cmd
- elif isinstance(cmd, str):
- check_strs = [cmd]
- else:
- return False
-
- for arg in check_strs:
- if not isinstance(arg, str):
- return False
- if not arg:
- # reject empty string
- return False
- for ch in arg:
- if ch not in safe_chars:
- return False
-
- return True
-
-
-# _aix_support used by distutil.util calls subprocess.check_output()
-def check_output(cmd, **kwargs):
- if kwargs:
- raise NotImplementedError(repr(kwargs))
-
- if not _check_cmd(cmd):
- raise ValueError(f"unsupported command: {cmd!r}")
-
- tmp_filename = "check_output.tmp"
- if not isinstance(cmd, str):
- cmd = " ".join(cmd)
- cmd = f"{cmd} >{tmp_filename}"
-
- try:
- # system() spawns a shell
- status = os.system(cmd)
- exitcode = os.waitstatus_to_exitcode(status)
- if exitcode:
- raise ValueError(f"Command {cmd!r} returned non-zero "
- f"exit status {exitcode!r}")
-
- try:
- with open(tmp_filename, "rb") as fp:
- stdout = fp.read()
- except FileNotFoundError:
- stdout = b''
- finally:
- try:
- os.unlink(tmp_filename)
- except OSError:
- pass
-
- return stdout
+"""
+Basic subprocess implementation for POSIX which only uses os functions. Only
+implement features required by setup.py to build C extension modules when
+subprocess is unavailable. setup.py is not used on Windows.
+"""
+import os
+
+
+# distutils.spawn used by distutils.command.build_ext
+# calls subprocess.Popen().wait()
+class Popen:
+ def __init__(self, cmd, env=None):
+ self._cmd = cmd
+ self._env = env
+ self.returncode = None
+
+ def wait(self):
+ pid = os.fork()
+ if pid == 0:
+ # Child process
+ try:
+ if self._env is not None:
+ os.execve(self._cmd[0], self._cmd, self._env)
+ else:
+ os.execv(self._cmd[0], self._cmd)
+ finally:
+ os._exit(1)
+ else:
+ # Parent process
+ _, status = os.waitpid(pid, 0)
+ self.returncode = os.waitstatus_to_exitcode(status)
+
+ return self.returncode
+
+
+def _check_cmd(cmd):
+ # Use regex [a-zA-Z0-9./-]+: reject empty string, space, etc.
+ safe_chars = []
+ for first, last in (("a", "z"), ("A", "Z"), ("0", "9")):
+ for ch in range(ord(first), ord(last) + 1):
+ safe_chars.append(chr(ch))
+ safe_chars.append("./-")
+ safe_chars = ''.join(safe_chars)
+
+ if isinstance(cmd, (tuple, list)):
+ check_strs = cmd
+ elif isinstance(cmd, str):
+ check_strs = [cmd]
+ else:
+ return False
+
+ for arg in check_strs:
+ if not isinstance(arg, str):
+ return False
+ if not arg:
+ # reject empty string
+ return False
+ for ch in arg:
+ if ch not in safe_chars:
+ return False
+
+ return True
+
+
+# _aix_support used by distutil.util calls subprocess.check_output()
+def check_output(cmd, **kwargs):
+ if kwargs:
+ raise NotImplementedError(repr(kwargs))
+
+ if not _check_cmd(cmd):
+ raise ValueError(f"unsupported command: {cmd!r}")
+
+ tmp_filename = "check_output.tmp"
+ if not isinstance(cmd, str):
+ cmd = " ".join(cmd)
+ cmd = f"{cmd} >{tmp_filename}"
+
+ try:
+ # system() spawns a shell
+ status = os.system(cmd)
+ exitcode = os.waitstatus_to_exitcode(status)
+ if exitcode:
+ raise ValueError(f"Command {cmd!r} returned non-zero "
+ f"exit status {exitcode!r}")
+
+ try:
+ with open(tmp_filename, "rb") as fp:
+ stdout = fp.read()
+ except FileNotFoundError:
+ stdout = b''
+ finally:
+ try:
+ os.unlink(tmp_filename)
+ except OSError:
+ pass
+
+ return stdout
diff --git a/contrib/tools/python3/src/Lib/_collections_abc.py b/contrib/tools/python3/src/Lib/_collections_abc.py
index acfaff802f..33e6a0b71f 100644
--- a/contrib/tools/python3/src/Lib/_collections_abc.py
+++ b/contrib/tools/python3/src/Lib/_collections_abc.py
@@ -9,12 +9,12 @@ Unit tests are in test_collections.
from abc import ABCMeta, abstractmethod
import sys
-GenericAlias = type(list[int])
-EllipsisType = type(...)
-def _f(): pass
-FunctionType = type(_f)
-del _f
-
+GenericAlias = type(list[int])
+EllipsisType = type(...)
+def _f(): pass
+FunctionType = type(_f)
+del _f
+
__all__ = ["Awaitable", "Coroutine",
"AsyncIterable", "AsyncIterator", "AsyncGenerator",
"Hashable", "Iterable", "Iterator", "Generator", "Reversible",
@@ -116,9 +116,9 @@ class Awaitable(metaclass=ABCMeta):
return _check_methods(C, "__await__")
return NotImplemented
- __class_getitem__ = classmethod(GenericAlias)
-
+ __class_getitem__ = classmethod(GenericAlias)
+
class Coroutine(Awaitable):
__slots__ = ()
@@ -177,9 +177,9 @@ class AsyncIterable(metaclass=ABCMeta):
return _check_methods(C, "__aiter__")
return NotImplemented
- __class_getitem__ = classmethod(GenericAlias)
-
+ __class_getitem__ = classmethod(GenericAlias)
+
class AsyncIterator(AsyncIterable):
__slots__ = ()
@@ -265,9 +265,9 @@ class Iterable(metaclass=ABCMeta):
return _check_methods(C, "__iter__")
return NotImplemented
- __class_getitem__ = classmethod(GenericAlias)
-
+ __class_getitem__ = classmethod(GenericAlias)
+
class Iterator(Iterable):
__slots__ = ()
@@ -286,7 +286,7 @@ class Iterator(Iterable):
return _check_methods(C, '__iter__', '__next__')
return NotImplemented
-
+
Iterator.register(bytes_iterator)
Iterator.register(bytearray_iterator)
#Iterator.register(callable_iterator)
@@ -366,7 +366,7 @@ class Generator(Iterator):
'send', 'throw', 'close')
return NotImplemented
-
+
Generator.register(generator)
@@ -399,9 +399,9 @@ class Container(metaclass=ABCMeta):
return _check_methods(C, "__contains__")
return NotImplemented
- __class_getitem__ = classmethod(GenericAlias)
-
-
+ __class_getitem__ = classmethod(GenericAlias)
+
+
class Collection(Sized, Iterable, Container):
__slots__ = ()
@@ -412,87 +412,87 @@ class Collection(Sized, Iterable, Container):
return _check_methods(C, "__len__", "__iter__", "__contains__")
return NotImplemented
-
-class _CallableGenericAlias(GenericAlias):
- """ Represent `Callable[argtypes, resulttype]`.
-
- This sets ``__args__`` to a tuple containing the flattened``argtypes``
- followed by ``resulttype``.
-
- Example: ``Callable[[int, str], float]`` sets ``__args__`` to
- ``(int, str, float)``.
- """
-
- __slots__ = ()
-
- def __new__(cls, origin, args):
- try:
- return cls.__create_ga(origin, args)
- except TypeError as exc:
- import warnings
- warnings.warn(f'{str(exc)} '
- f'(This will raise a TypeError in Python 3.10.)',
- DeprecationWarning)
- return GenericAlias(origin, args)
-
- @classmethod
- def __create_ga(cls, origin, args):
- if not isinstance(args, tuple) or len(args) != 2:
- raise TypeError(
- "Callable must be used as Callable[[arg, ...], result].")
- t_args, t_result = args
- if isinstance(t_args, (list, tuple)):
- ga_args = tuple(t_args) + (t_result,)
- # This relaxes what t_args can be on purpose to allow things like
- # PEP 612 ParamSpec. Responsibility for whether a user is using
- # Callable[...] properly is deferred to static type checkers.
- else:
- ga_args = args
- return super().__new__(cls, origin, ga_args)
-
- def __repr__(self):
- if len(self.__args__) == 2 and self.__args__[0] is Ellipsis:
- return super().__repr__()
- return (f'collections.abc.Callable'
- f'[[{", ".join([_type_repr(a) for a in self.__args__[:-1]])}], '
- f'{_type_repr(self.__args__[-1])}]')
-
- def __reduce__(self):
- args = self.__args__
- if not (len(args) == 2 and args[0] is Ellipsis):
- args = list(args[:-1]), args[-1]
- return _CallableGenericAlias, (Callable, args)
-
- def __getitem__(self, item):
- # Called during TypeVar substitution, returns the custom subclass
- # rather than the default types.GenericAlias object.
- ga = super().__getitem__(item)
- args = ga.__args__
- t_result = args[-1]
- t_args = args[:-1]
- args = (t_args, t_result)
- return _CallableGenericAlias(Callable, args)
-
-
-def _type_repr(obj):
- """Return the repr() of an object, special-casing types (internal helper).
-
- Copied from :mod:`typing` since collections.abc
- shouldn't depend on that module.
- """
- if isinstance(obj, GenericAlias):
- return repr(obj)
- if isinstance(obj, type):
- if obj.__module__ == 'builtins':
- return obj.__qualname__
- return f'{obj.__module__}.{obj.__qualname__}'
- if obj is Ellipsis:
- return '...'
- if isinstance(obj, FunctionType):
- return obj.__name__
- return repr(obj)
-
-
+
+class _CallableGenericAlias(GenericAlias):
+ """ Represent `Callable[argtypes, resulttype]`.
+
+ This sets ``__args__`` to a tuple containing the flattened``argtypes``
+ followed by ``resulttype``.
+
+ Example: ``Callable[[int, str], float]`` sets ``__args__`` to
+ ``(int, str, float)``.
+ """
+
+ __slots__ = ()
+
+ def __new__(cls, origin, args):
+ try:
+ return cls.__create_ga(origin, args)
+ except TypeError as exc:
+ import warnings
+ warnings.warn(f'{str(exc)} '
+ f'(This will raise a TypeError in Python 3.10.)',
+ DeprecationWarning)
+ return GenericAlias(origin, args)
+
+ @classmethod
+ def __create_ga(cls, origin, args):
+ if not isinstance(args, tuple) or len(args) != 2:
+ raise TypeError(
+ "Callable must be used as Callable[[arg, ...], result].")
+ t_args, t_result = args
+ if isinstance(t_args, (list, tuple)):
+ ga_args = tuple(t_args) + (t_result,)
+ # This relaxes what t_args can be on purpose to allow things like
+ # PEP 612 ParamSpec. Responsibility for whether a user is using
+ # Callable[...] properly is deferred to static type checkers.
+ else:
+ ga_args = args
+ return super().__new__(cls, origin, ga_args)
+
+ def __repr__(self):
+ if len(self.__args__) == 2 and self.__args__[0] is Ellipsis:
+ return super().__repr__()
+ return (f'collections.abc.Callable'
+ f'[[{", ".join([_type_repr(a) for a in self.__args__[:-1]])}], '
+ f'{_type_repr(self.__args__[-1])}]')
+
+ def __reduce__(self):
+ args = self.__args__
+ if not (len(args) == 2 and args[0] is Ellipsis):
+ args = list(args[:-1]), args[-1]
+ return _CallableGenericAlias, (Callable, args)
+
+ def __getitem__(self, item):
+ # Called during TypeVar substitution, returns the custom subclass
+ # rather than the default types.GenericAlias object.
+ ga = super().__getitem__(item)
+ args = ga.__args__
+ t_result = args[-1]
+ t_args = args[:-1]
+ args = (t_args, t_result)
+ return _CallableGenericAlias(Callable, args)
+
+
+def _type_repr(obj):
+ """Return the repr() of an object, special-casing types (internal helper).
+
+ Copied from :mod:`typing` since collections.abc
+ shouldn't depend on that module.
+ """
+ if isinstance(obj, GenericAlias):
+ return repr(obj)
+ if isinstance(obj, type):
+ if obj.__module__ == 'builtins':
+ return obj.__qualname__
+ return f'{obj.__module__}.{obj.__qualname__}'
+ if obj is Ellipsis:
+ return '...'
+ if isinstance(obj, FunctionType):
+ return obj.__name__
+ return repr(obj)
+
+
class Callable(metaclass=ABCMeta):
__slots__ = ()
@@ -507,9 +507,9 @@ class Callable(metaclass=ABCMeta):
return _check_methods(C, "__call__")
return NotImplemented
- __class_getitem__ = classmethod(_CallableGenericAlias)
-
+ __class_getitem__ = classmethod(_CallableGenericAlias)
+
### SETS ###
@@ -642,7 +642,7 @@ class Set(Collection):
hx = hash(x)
h ^= (hx ^ (hx << 16) ^ 89869747) * 3644798167
h &= MASK
- h ^= (h >> 11) ^ (h >> 25)
+ h ^= (h >> 11) ^ (h >> 25)
h = h * 69069 + 907133923
h &= MASK
if h > MAX:
@@ -651,7 +651,7 @@ class Set(Collection):
h = 590923713
return h
-
+
Set.register(frozenset)
@@ -734,7 +734,7 @@ class MutableSet(Set):
self.discard(value)
return self
-
+
MutableSet.register(set)
@@ -791,7 +791,7 @@ class Mapping(Collection):
__reversed__ = None
-
+
Mapping.register(mappingproxy)
@@ -808,9 +808,9 @@ class MappingView(Sized):
def __repr__(self):
return '{0.__class__.__name__}({0._mapping!r})'.format(self)
- __class_getitem__ = classmethod(GenericAlias)
-
+ __class_getitem__ = classmethod(GenericAlias)
+
class KeysView(MappingView, Set):
__slots__ = ()
@@ -825,7 +825,7 @@ class KeysView(MappingView, Set):
def __iter__(self):
yield from self._mapping
-
+
KeysView.register(dict_keys)
@@ -850,7 +850,7 @@ class ItemsView(MappingView, Set):
for key in self._mapping:
yield (key, self._mapping[key])
-
+
ItemsView.register(dict_items)
@@ -869,7 +869,7 @@ class ValuesView(MappingView, Collection):
for key in self._mapping:
yield self._mapping[key]
-
+
ValuesView.register(dict_values)
@@ -930,21 +930,21 @@ class MutableMapping(Mapping):
except KeyError:
pass
- def update(self, other=(), /, **kwds):
+ def update(self, other=(), /, **kwds):
''' D.update([E, ]**F) -> None. Update D from mapping/iterable E and F.
If E present and has a .keys() method, does: for k in E: D[k] = E[k]
If E present and lacks .keys() method, does: for (k, v) in E: D[k] = v
In either case, this is followed by: for k, v in F.items(): D[k] = v
'''
- if isinstance(other, Mapping):
- for key in other:
- self[key] = other[key]
- elif hasattr(other, "keys"):
- for key in other.keys():
- self[key] = other[key]
- else:
- for key, value in other:
- self[key] = value
+ if isinstance(other, Mapping):
+ for key in other:
+ self[key] = other[key]
+ elif hasattr(other, "keys"):
+ for key in other.keys():
+ self[key] = other[key]
+ else:
+ for key, value in other:
+ self[key] = value
for key, value in kwds.items():
self[key] = value
@@ -956,7 +956,7 @@ class MutableMapping(Mapping):
self[key] = default
return default
-
+
MutableMapping.register(dict)
@@ -1024,7 +1024,7 @@ class Sequence(Reversible, Collection):
'S.count(value) -> integer -- return number of occurrences of value'
return sum(1 for v in self if v is value or v == value)
-
+
Sequence.register(tuple)
Sequence.register(str)
Sequence.register(range)
@@ -1088,8 +1088,8 @@ class MutableSequence(Sequence):
def extend(self, values):
'S.extend(iterable) -- extend sequence by appending elements from the iterable'
- if values is self:
- values = list(values)
+ if values is self:
+ values = list(values)
for v in values:
self.append(v)
@@ -1111,6 +1111,6 @@ class MutableSequence(Sequence):
self.extend(values)
return self
-
+
MutableSequence.register(list)
MutableSequence.register(bytearray) # Multiply inheriting, see ByteString
diff --git a/contrib/tools/python3/src/Lib/_osx_support.py b/contrib/tools/python3/src/Lib/_osx_support.py
index 2fc324a1ef..09be87621b 100644
--- a/contrib/tools/python3/src/Lib/_osx_support.py
+++ b/contrib/tools/python3/src/Lib/_osx_support.py
@@ -52,7 +52,7 @@ def _find_executable(executable, path=None):
return executable
-def _read_output(commandstring, capture_stderr=False):
+def _read_output(commandstring, capture_stderr=False):
"""Output from successful command execution or None"""
# Similar to os.popen(commandstring, "r").read(),
# but without actually using os.popen because that
@@ -67,10 +67,10 @@ def _read_output(commandstring, capture_stderr=False):
os.getpid(),), "w+b")
with contextlib.closing(fp) as fp:
- if capture_stderr:
- cmd = "%s >'%s' 2>&1" % (commandstring, fp.name)
- else:
- cmd = "%s 2>/dev/null >'%s'" % (commandstring, fp.name)
+ if capture_stderr:
+ cmd = "%s >'%s' 2>&1" % (commandstring, fp.name)
+ else:
+ cmd = "%s 2>/dev/null >'%s'" % (commandstring, fp.name)
return fp.read().decode('utf-8').strip() if not os.system(cmd) else None
@@ -113,26 +113,26 @@ def _get_system_version():
return _SYSTEM_VERSION
-_SYSTEM_VERSION_TUPLE = None
-def _get_system_version_tuple():
- """
- Return the macOS system version as a tuple
-
- The return value is safe to use to compare
- two version numbers.
- """
- global _SYSTEM_VERSION_TUPLE
- if _SYSTEM_VERSION_TUPLE is None:
- osx_version = _get_system_version()
- if osx_version:
- try:
- _SYSTEM_VERSION_TUPLE = tuple(int(i) for i in osx_version.split('.'))
- except ValueError:
- _SYSTEM_VERSION_TUPLE = ()
-
- return _SYSTEM_VERSION_TUPLE
-
-
+_SYSTEM_VERSION_TUPLE = None
+def _get_system_version_tuple():
+ """
+ Return the macOS system version as a tuple
+
+ The return value is safe to use to compare
+ two version numbers.
+ """
+ global _SYSTEM_VERSION_TUPLE
+ if _SYSTEM_VERSION_TUPLE is None:
+ osx_version = _get_system_version()
+ if osx_version:
+ try:
+ _SYSTEM_VERSION_TUPLE = tuple(int(i) for i in osx_version.split('.'))
+ except ValueError:
+ _SYSTEM_VERSION_TUPLE = ()
+
+ return _SYSTEM_VERSION_TUPLE
+
+
def _remove_original_values(_config_vars):
"""Remove original unmodified values for testing"""
# This is needed for higher-level cross-platform tests of get_platform.
@@ -148,33 +148,33 @@ def _save_modified_value(_config_vars, cv, newvalue):
_config_vars[_INITPRE + cv] = oldvalue
_config_vars[cv] = newvalue
-
-_cache_default_sysroot = None
-def _default_sysroot(cc):
- """ Returns the root of the default SDK for this system, or '/' """
- global _cache_default_sysroot
-
- if _cache_default_sysroot is not None:
- return _cache_default_sysroot
-
- contents = _read_output('%s -c -E -v - </dev/null' % (cc,), True)
- in_incdirs = False
- for line in contents.splitlines():
- if line.startswith("#include <...>"):
- in_incdirs = True
- elif line.startswith("End of search list"):
- in_incdirs = False
- elif in_incdirs:
- line = line.strip()
- if line == '/usr/include':
- _cache_default_sysroot = '/'
- elif line.endswith(".sdk/usr/include"):
- _cache_default_sysroot = line[:-12]
- if _cache_default_sysroot is None:
- _cache_default_sysroot = '/'
-
- return _cache_default_sysroot
-
+
+_cache_default_sysroot = None
+def _default_sysroot(cc):
+ """ Returns the root of the default SDK for this system, or '/' """
+ global _cache_default_sysroot
+
+ if _cache_default_sysroot is not None:
+ return _cache_default_sysroot
+
+ contents = _read_output('%s -c -E -v - </dev/null' % (cc,), True)
+ in_incdirs = False
+ for line in contents.splitlines():
+ if line.startswith("#include <...>"):
+ in_incdirs = True
+ elif line.startswith("End of search list"):
+ in_incdirs = False
+ elif in_incdirs:
+ line = line.strip()
+ if line == '/usr/include':
+ _cache_default_sysroot = '/'
+ elif line.endswith(".sdk/usr/include"):
+ _cache_default_sysroot = line[:-12]
+ if _cache_default_sysroot is None:
+ _cache_default_sysroot = '/'
+
+ return _cache_default_sysroot
+
def _supports_universal_builds():
"""Returns True if universal builds are supported on this system"""
# As an approximation, we assume that if we are running on 10.4 or above,
@@ -182,19 +182,19 @@ def _supports_universal_builds():
# builds, in particular -isysroot and -arch arguments to the compiler. This
# is in support of allowing 10.4 universal builds to run on 10.3.x systems.
- osx_version = _get_system_version_tuple()
+ osx_version = _get_system_version_tuple()
return bool(osx_version >= (10, 4)) if osx_version else False
-def _supports_arm64_builds():
- """Returns True if arm64 builds are supported on this system"""
- # There are two sets of systems supporting macOS/arm64 builds:
- # 1. macOS 11 and later, unconditionally
- # 2. macOS 10.15 with Xcode 12.2 or later
- # For now the second category is ignored.
- osx_version = _get_system_version_tuple()
- return osx_version >= (11, 0) if osx_version else False
-
+def _supports_arm64_builds():
+ """Returns True if arm64 builds are supported on this system"""
+ # There are two sets of systems supporting macOS/arm64 builds:
+ # 1. macOS 11 and later, unconditionally
+ # 2. macOS 10.15 with Xcode 12.2 or later
+ # For now the second category is ignored.
+ osx_version = _get_system_version_tuple()
+ return osx_version >= (11, 0) if osx_version else False
+
def _find_appropriate_compiler(_config_vars):
"""Find appropriate C compiler for extension module builds"""
@@ -265,7 +265,7 @@ def _remove_universal_flags(_config_vars):
if cv in _config_vars and cv not in os.environ:
flags = _config_vars[cv]
flags = re.sub(r'-arch\s+\w+\s', ' ', flags, flags=re.ASCII)
- flags = re.sub(r'-isysroot\s*\S+', ' ', flags)
+ flags = re.sub(r'-isysroot\s*\S+', ' ', flags)
_save_modified_value(_config_vars, cv, flags)
return _config_vars
@@ -341,7 +341,7 @@ def _check_for_unavailable_sdk(_config_vars):
# to /usr and /System/Library by either a standalone CLT
# package or the CLT component within Xcode.
cflags = _config_vars.get('CFLAGS', '')
- m = re.search(r'-isysroot\s*(\S+)', cflags)
+ m = re.search(r'-isysroot\s*(\S+)', cflags)
if m is not None:
sdk = m.group(1)
if not os.path.exists(sdk):
@@ -349,7 +349,7 @@ def _check_for_unavailable_sdk(_config_vars):
# Do not alter a config var explicitly overridden by env var
if cv in _config_vars and cv not in os.environ:
flags = _config_vars[cv]
- flags = re.sub(r'-isysroot\s*\S+(?:\s|$)', ' ', flags)
+ flags = re.sub(r'-isysroot\s*\S+(?:\s|$)', ' ', flags)
_save_modified_value(_config_vars, cv, flags)
return _config_vars
@@ -374,7 +374,7 @@ def compiler_fixup(compiler_so, cc_args):
stripArch = stripSysroot = True
else:
stripArch = '-arch' in cc_args
- stripSysroot = any(arg for arg in cc_args if arg.startswith('-isysroot'))
+ stripSysroot = any(arg for arg in cc_args if arg.startswith('-isysroot'))
if stripArch or 'ARCHFLAGS' in os.environ:
while True:
@@ -385,12 +385,12 @@ def compiler_fixup(compiler_so, cc_args):
except ValueError:
break
- elif not _supports_arm64_builds():
- # Look for "-arch arm64" and drop that
- for idx in reversed(range(len(compiler_so))):
- if compiler_so[idx] == '-arch' and compiler_so[idx+1] == "arm64":
- del compiler_so[idx:idx+2]
-
+ elif not _supports_arm64_builds():
+ # Look for "-arch arm64" and drop that
+ for idx in reversed(range(len(compiler_so))):
+ if compiler_so[idx] == '-arch' and compiler_so[idx+1] == "arm64":
+ del compiler_so[idx:idx+2]
+
if 'ARCHFLAGS' in os.environ and not stripArch:
# User specified different -arch flags in the environ,
# see also distutils.sysconfig
@@ -398,35 +398,35 @@ def compiler_fixup(compiler_so, cc_args):
if stripSysroot:
while True:
- indices = [i for i,x in enumerate(compiler_so) if x.startswith('-isysroot')]
- if not indices:
- break
- index = indices[0]
- if compiler_so[index] == '-isysroot':
+ indices = [i for i,x in enumerate(compiler_so) if x.startswith('-isysroot')]
+ if not indices:
+ break
+ index = indices[0]
+ if compiler_so[index] == '-isysroot':
# Strip this argument and the next one:
del compiler_so[index:index+2]
- else:
- # It's '-isysroot/some/path' in one arg
- del compiler_so[index:index+1]
+ else:
+ # It's '-isysroot/some/path' in one arg
+ del compiler_so[index:index+1]
# Check if the SDK that is used during compilation actually exists,
# the universal build requires the usage of a universal SDK and not all
# users have that installed by default.
sysroot = None
- argvar = cc_args
- indices = [i for i,x in enumerate(cc_args) if x.startswith('-isysroot')]
- if not indices:
- argvar = compiler_so
- indices = [i for i,x in enumerate(compiler_so) if x.startswith('-isysroot')]
-
- for idx in indices:
- if argvar[idx] == '-isysroot':
- sysroot = argvar[idx+1]
- break
- else:
- sysroot = argvar[idx][len('-isysroot'):]
- break
-
+ argvar = cc_args
+ indices = [i for i,x in enumerate(cc_args) if x.startswith('-isysroot')]
+ if not indices:
+ argvar = compiler_so
+ indices = [i for i,x in enumerate(compiler_so) if x.startswith('-isysroot')]
+
+ for idx in indices:
+ if argvar[idx] == '-isysroot':
+ sysroot = argvar[idx+1]
+ break
+ else:
+ sysroot = argvar[idx][len('-isysroot'):]
+ break
+
if sysroot and not os.path.isdir(sysroot):
from distutils import log
log.warn("Compiling with an SDK that doesn't seem to exist: %s",
@@ -482,7 +482,7 @@ def customize_compiler(_config_vars):
This customization is performed when the first
extension module build is requested
- in distutils.sysconfig.customize_compiler.
+ in distutils.sysconfig.customize_compiler.
"""
# Find a compiler to use for extension module builds
@@ -541,8 +541,8 @@ def get_platform_osx(_config_vars, osname, release, machine):
if len(archs) == 1:
machine = archs[0]
- elif archs == ('arm64', 'x86_64'):
- machine = 'universal2'
+ elif archs == ('arm64', 'x86_64'):
+ machine = 'universal2'
elif archs == ('i386', 'ppc'):
machine = 'fat'
elif archs == ('i386', 'x86_64'):
diff --git a/contrib/tools/python3/src/Lib/_py_abc.py b/contrib/tools/python3/src/Lib/_py_abc.py
index c870ae9048..fd1493f440 100644
--- a/contrib/tools/python3/src/Lib/_py_abc.py
+++ b/contrib/tools/python3/src/Lib/_py_abc.py
@@ -32,7 +32,7 @@ class ABCMeta(type):
# external code.
_abc_invalidation_counter = 0
- def __new__(mcls, name, bases, namespace, /, **kwargs):
+ def __new__(mcls, name, bases, namespace, /, **kwargs):
cls = super().__new__(mcls, name, bases, namespace, **kwargs)
# Compute set of abstract method names
abstracts = {name
diff --git a/contrib/tools/python3/src/Lib/_pydecimal.py b/contrib/tools/python3/src/Lib/_pydecimal.py
index ab989e5206..942cfa405a 100644
--- a/contrib/tools/python3/src/Lib/_pydecimal.py
+++ b/contrib/tools/python3/src/Lib/_pydecimal.py
@@ -140,11 +140,11 @@ __all__ = [
# Limits for the C version for compatibility
'MAX_PREC', 'MAX_EMAX', 'MIN_EMIN', 'MIN_ETINY',
- # C version: compile time choice that enables the thread local context (deprecated, now always true)
- 'HAVE_THREADS',
-
- # C version: compile time choice that enables the coroutine local context
- 'HAVE_CONTEXTVAR'
+ # C version: compile time choice that enables the thread local context (deprecated, now always true)
+ 'HAVE_THREADS',
+
+ # C version: compile time choice that enables the coroutine local context
+ 'HAVE_CONTEXTVAR'
]
__xname__ = __name__ # sys.modules lookup (--without-threads)
@@ -175,7 +175,7 @@ ROUND_05UP = 'ROUND_05UP'
# Compatibility with the C version
HAVE_THREADS = True
-HAVE_CONTEXTVAR = True
+HAVE_CONTEXTVAR = True
if sys.maxsize == 2**63-1:
MAX_PREC = 999999999999999999
MAX_EMAX = 999999999999999999
diff --git a/contrib/tools/python3/src/Lib/_pyio.py b/contrib/tools/python3/src/Lib/_pyio.py
index 4804ed27cd..51390f507b 100644
--- a/contrib/tools/python3/src/Lib/_pyio.py
+++ b/contrib/tools/python3/src/Lib/_pyio.py
@@ -33,13 +33,13 @@ DEFAULT_BUFFER_SIZE = 8 * 1024 # bytes
# Rebind for compatibility
BlockingIOError = BlockingIOError
-# Does io.IOBase finalizer log the exception if the close() method fails?
-# The exception is ignored silently by default in release build.
-_IOBASE_EMITS_UNRAISABLE = (hasattr(sys, "gettotalrefcount") or sys.flags.dev_mode)
-# Does open() check its 'errors' argument?
-_CHECK_ERRORS = _IOBASE_EMITS_UNRAISABLE
-
+# Does io.IOBase finalizer log the exception if the close() method fails?
+# The exception is ignored silently by default in release build.
+_IOBASE_EMITS_UNRAISABLE = (hasattr(sys, "gettotalrefcount") or sys.flags.dev_mode)
+# Does open() check its 'errors' argument?
+_CHECK_ERRORS = _IOBASE_EMITS_UNRAISABLE
+
def open(file, mode="r", buffering=-1, encoding=None, errors=None,
newline=None, closefd=True, opener=None):
@@ -204,11 +204,11 @@ def open(file, mode="r", buffering=-1, encoding=None, errors=None,
raise ValueError("binary mode doesn't take an errors argument")
if binary and newline is not None:
raise ValueError("binary mode doesn't take a newline argument")
- if binary and buffering == 1:
- import warnings
- warnings.warn("line buffering (buffering=1) isn't supported in binary "
- "mode, the default buffer size will be used",
- RuntimeWarning, 2)
+ if binary and buffering == 1:
+ import warnings
+ warnings.warn("line buffering (buffering=1) isn't supported in binary "
+ "mode, the default buffer size will be used",
+ RuntimeWarning, 2)
raw = FileIO(file,
(creating and "x" or "") +
(reading and "r" or "") +
@@ -256,34 +256,34 @@ def open(file, mode="r", buffering=-1, encoding=None, errors=None,
result.close()
raise
-# Define a default pure-Python implementation for open_code()
-# that does not allow hooks. Warn on first use. Defined for tests.
-def _open_code_with_warning(path):
- """Opens the provided file with mode ``'rb'``. This function
- should be used when the intent is to treat the contents as
- executable code.
-
- ``path`` should be an absolute path.
-
- When supported by the runtime, this function can be hooked
- in order to allow embedders more control over code files.
- This functionality is not supported on the current runtime.
- """
- import warnings
- warnings.warn("_pyio.open_code() may not be using hooks",
- RuntimeWarning, 2)
- return open(path, "rb")
-
-try:
- open_code = io.open_code
-except AttributeError:
- open_code = _open_code_with_warning
-
-
+# Define a default pure-Python implementation for open_code()
+# that does not allow hooks. Warn on first use. Defined for tests.
+def _open_code_with_warning(path):
+ """Opens the provided file with mode ``'rb'``. This function
+ should be used when the intent is to treat the contents as
+ executable code.
+
+ ``path`` should be an absolute path.
+
+ When supported by the runtime, this function can be hooked
+ in order to allow embedders more control over code files.
+ This functionality is not supported on the current runtime.
+ """
+ import warnings
+ warnings.warn("_pyio.open_code() may not be using hooks",
+ RuntimeWarning, 2)
+ return open(path, "rb")
+
+try:
+ open_code = io.open_code
+except AttributeError:
+ open_code = _open_code_with_warning
+
+
class DocDescriptor:
"""Helper for builtins.open.__doc__
"""
- def __get__(self, obj, typ=None):
+ def __get__(self, obj, typ=None):
return (
"open(file, mode='r', buffering=-1, encoding=None, "
"errors=None, newline=None, closefd=True)\n\n" +
@@ -321,15 +321,15 @@ class IOBase(metaclass=abc.ABCMeta):
derived classes can override selectively; the default implementations
represent a file that cannot be read, written or seeked.
- Even though IOBase does not declare read or write because
+ Even though IOBase does not declare read or write because
their signatures will vary, implementations and clients should
consider those methods part of the interface. Also, implementations
may raise UnsupportedOperation when operations they do not support are
called.
The basic type used for binary data read from or written to a file is
- bytes. Other bytes-like objects are accepted as method arguments too.
- Text I/O classes work with str data.
+ bytes. Other bytes-like objects are accepted as method arguments too.
+ Text I/O classes work with str data.
Note that calling any method (even inquiries) on a closed stream is
undefined. Implementations may raise OSError in this case.
@@ -408,27 +408,27 @@ class IOBase(metaclass=abc.ABCMeta):
def __del__(self):
"""Destructor. Calls close()."""
try:
- closed = self.closed
- except AttributeError:
- # If getting closed fails, then the object is probably
- # in an unusable state, so ignore.
- return
-
- if closed:
- return
-
- if _IOBASE_EMITS_UNRAISABLE:
+ closed = self.closed
+ except AttributeError:
+ # If getting closed fails, then the object is probably
+ # in an unusable state, so ignore.
+ return
+
+ if closed:
+ return
+
+ if _IOBASE_EMITS_UNRAISABLE:
self.close()
- else:
- # The try/except block is in case this is called at program
- # exit time, when it's possible that globals have already been
- # deleted, and then the close() call might fail. Since
- # there's nothing we can do about such failures and they annoy
- # the end users, we suppress the traceback.
- try:
- self.close()
- except:
- pass
+ else:
+ # The try/except block is in case this is called at program
+ # exit time, when it's possible that globals have already been
+ # deleted, and then the close() call might fail. Since
+ # there's nothing we can do about such failures and they annoy
+ # the end users, we suppress the traceback.
+ try:
+ self.close()
+ except:
+ pass
### Inquiries ###
@@ -593,11 +593,11 @@ class IOBase(metaclass=abc.ABCMeta):
return lines
def writelines(self, lines):
- """Write a list of lines to the stream.
-
- Line separators are not added, so it is usual for each of the lines
- provided to have a line separator at the end.
- """
+ """Write a list of lines to the stream.
+
+ Line separators are not added, so it is usual for each of the lines
+ provided to have a line separator at the end.
+ """
self._checkClosed()
for line in lines:
self.write(line)
@@ -804,9 +804,9 @@ class _BufferedIOMixin(BufferedIOBase):
return pos
def truncate(self, pos=None):
- self._checkClosed()
- self._checkWritable()
-
+ self._checkClosed()
+ self._checkWritable()
+
# Flush the stream. We're mixing buffered I/O with lower-level I/O,
# and a flush may be necessary to synch both views of the current
# file state.
@@ -863,14 +863,14 @@ class _BufferedIOMixin(BufferedIOBase):
return self.raw.mode
def __getstate__(self):
- raise TypeError(f"cannot pickle {self.__class__.__name__!r} object")
+ raise TypeError(f"cannot pickle {self.__class__.__name__!r} object")
def __repr__(self):
modname = self.__class__.__module__
clsname = self.__class__.__qualname__
try:
name = self.name
- except AttributeError:
+ except AttributeError:
return "<{}.{}>".format(modname, clsname)
else:
return "<{}.{} name={!r}>".format(modname, clsname, name)
@@ -888,10 +888,10 @@ class BytesIO(BufferedIOBase):
"""Buffered I/O implementation using an in-memory bytes buffer."""
- # Initialize _buffer as soon as possible since it's used by __del__()
- # which calls close()
- _buffer = None
-
+ # Initialize _buffer as soon as possible since it's used by __del__()
+ # which calls close()
+ _buffer = None
+
def __init__(self, initial_bytes=None):
buf = bytearray()
if initial_bytes is not None:
@@ -919,8 +919,8 @@ class BytesIO(BufferedIOBase):
return memoryview(self._buffer)
def close(self):
- if self._buffer is not None:
- self._buffer.clear()
+ if self._buffer is not None:
+ self._buffer.clear()
super().close()
def read(self, size=-1):
@@ -1576,7 +1576,7 @@ class FileIO(RawIOBase):
raise IsADirectoryError(errno.EISDIR,
os.strerror(errno.EISDIR), file)
except AttributeError:
- # Ignore the AttributeError if stat.S_ISDIR or errno.EISDIR
+ # Ignore the AttributeError if stat.S_ISDIR or errno.EISDIR
# don't exist.
pass
self._blksize = getattr(fdfstat, 'st_blksize', 0)
@@ -1592,11 +1592,11 @@ class FileIO(RawIOBase):
# For consistent behaviour, we explicitly seek to the
# end of file (otherwise, it might be done only on the
# first write()).
- try:
- os.lseek(fd, 0, SEEK_END)
- except OSError as e:
- if e.errno != errno.ESPIPE:
- raise
+ try:
+ os.lseek(fd, 0, SEEK_END)
+ except OSError as e:
+ if e.errno != errno.ESPIPE:
+ raise
except:
if owned_fd is not None:
os.close(owned_fd)
@@ -1611,7 +1611,7 @@ class FileIO(RawIOBase):
self.close()
def __getstate__(self):
- raise TypeError(f"cannot pickle {self.__class__.__name__!r} object")
+ raise TypeError(f"cannot pickle {self.__class__.__name__!r} object")
def __repr__(self):
class_name = '%s.%s' % (self.__class__.__module__,
@@ -1821,7 +1821,7 @@ class TextIOBase(IOBase):
"""Base class for text I/O.
This class provides a character and line based interface to stream
- I/O. There is no public constructor.
+ I/O. There is no public constructor.
"""
def read(self, size=-1):
@@ -1994,10 +1994,10 @@ class TextIOWrapper(TextIOBase):
_CHUNK_SIZE = 2048
- # Initialize _buffer as soon as possible since it's used by __del__()
- # which calls close()
- _buffer = None
-
+ # Initialize _buffer as soon as possible since it's used by __del__()
+ # which calls close()
+ _buffer = None
+
# The write_through argument has no effect here since this
# implementation always writes through. The argument is present only
# so that the signature can match the signature of the C version.
@@ -2031,8 +2031,8 @@ class TextIOWrapper(TextIOBase):
else:
if not isinstance(errors, str):
raise ValueError("invalid errors: %r" % errors)
- if _CHECK_ERRORS:
- codecs.lookup_error(errors)
+ if _CHECK_ERRORS:
+ codecs.lookup_error(errors)
self._buffer = buffer
self._decoded_chars = '' # buffer for text returned from decoder
@@ -2090,13 +2090,13 @@ class TextIOWrapper(TextIOBase):
self.__class__.__qualname__)
try:
name = self.name
- except AttributeError:
+ except AttributeError:
pass
else:
result += " name={0!r}".format(name)
try:
mode = self.mode
- except AttributeError:
+ except AttributeError:
pass
else:
result += " mode={0!r}".format(mode)
@@ -2302,7 +2302,7 @@ class TextIOWrapper(TextIOBase):
return not eof
def _pack_cookie(self, position, dec_flags=0,
- bytes_to_feed=0, need_eof=False, chars_to_skip=0):
+ bytes_to_feed=0, need_eof=False, chars_to_skip=0):
# The meaning of a tell() cookie is: seek to position, set the
# decoder flags to dec_flags, read bytes_to_feed bytes, feed them
# into the decoder with need_eof as the EOF flag, then skip
@@ -2316,7 +2316,7 @@ class TextIOWrapper(TextIOBase):
rest, dec_flags = divmod(rest, 1<<64)
rest, bytes_to_feed = divmod(rest, 1<<64)
need_eof, chars_to_skip = divmod(rest, 1<<64)
- return position, dec_flags, bytes_to_feed, bool(need_eof), chars_to_skip
+ return position, dec_flags, bytes_to_feed, bool(need_eof), chars_to_skip
def tell(self):
if not self._seekable:
@@ -2350,7 +2350,7 @@ class TextIOWrapper(TextIOBase):
# current pos.
# Rationale: calling decoder.decode() has a large overhead
# regardless of chunk size; we want the number of such calls to
- # be O(1) in most situations (common decoders, sensible input).
+ # be O(1) in most situations (common decoders, sensible input).
# Actually, it will be exactly 1 for fixed-size codecs (all
# 8-bit codecs, also UTF-16 and UTF-32).
skip_bytes = int(self._b2cratio * chars_to_skip)
@@ -2390,7 +2390,7 @@ class TextIOWrapper(TextIOBase):
# (a point where the decoder has nothing buffered, so seek()
# can safely start from there and advance to this location).
bytes_fed = 0
- need_eof = False
+ need_eof = False
# Chars decoded since `start_pos`
chars_decoded = 0
for i in range(skip_bytes, len(next_input)):
@@ -2407,7 +2407,7 @@ class TextIOWrapper(TextIOBase):
else:
# We didn't get enough decoded data; signal EOF to get more.
chars_decoded += len(decoder.decode(b'', final=True))
- need_eof = True
+ need_eof = True
if chars_decoded < chars_to_skip:
raise OSError("can't reconstruct logical file position")
@@ -2449,18 +2449,18 @@ class TextIOWrapper(TextIOBase):
raise ValueError("tell on closed file")
if not self._seekable:
raise UnsupportedOperation("underlying stream is not seekable")
- if whence == SEEK_CUR:
+ if whence == SEEK_CUR:
if cookie != 0:
raise UnsupportedOperation("can't do nonzero cur-relative seeks")
# Seeking to the current position should attempt to
# sync the underlying buffer with the current position.
whence = 0
cookie = self.tell()
- elif whence == SEEK_END:
+ elif whence == SEEK_END:
if cookie != 0:
raise UnsupportedOperation("can't do nonzero end-relative seeks")
self.flush()
- position = self.buffer.seek(0, whence)
+ position = self.buffer.seek(0, whence)
self._set_decoded_chars('')
self._snapshot = None
if self._decoder:
diff --git a/contrib/tools/python3/src/Lib/_strptime.py b/contrib/tools/python3/src/Lib/_strptime.py
index b97dfcce1e..8e259d20fb 100644
--- a/contrib/tools/python3/src/Lib/_strptime.py
+++ b/contrib/tools/python3/src/Lib/_strptime.py
@@ -182,7 +182,7 @@ class TimeRE(dict):
self.locale_time = LocaleTime()
base = super()
base.__init__({
- # The " [1-9]" part of the regex is to make %c from ANSI C work
+ # The " [1-9]" part of the regex is to make %c from ANSI C work
'd': r"(?P<d>3[0-1]|[1-2]\d|0[1-9]|[1-9]| [1-9])",
'f': r"(?P<f>[0-9]{1,6})",
'H': r"(?P<H>2[0-3]|[0-1]\d|\d)",
@@ -201,7 +201,7 @@ class TimeRE(dict):
#XXX: Does 'Y' need to worry about having less or more than
# 4 digits?
'Y': r"(?P<Y>\d\d\d\d)",
- 'z': r"(?P<z>[+-]\d\d:?[0-5]\d(:?[0-5]\d(\.\d{1,6})?)?|(?-i:Z))",
+ 'z': r"(?P<z>[+-]\d\d:?[0-5]\d(:?[0-5]\d(\.\d{1,6})?)?|(?-i:Z))",
'A': self.__seqToRE(self.locale_time.f_weekday, 'A'),
'a': self.__seqToRE(self.locale_time.a_weekday, 'a'),
'B': self.__seqToRE(self.locale_time.f_month[1:], 'B'),
diff --git a/contrib/tools/python3/src/Lib/_threading_local.py b/contrib/tools/python3/src/Lib/_threading_local.py
index b006d76c4e..efdbff8ff4 100644
--- a/contrib/tools/python3/src/Lib/_threading_local.py
+++ b/contrib/tools/python3/src/Lib/_threading_local.py
@@ -56,7 +56,7 @@ You can create custom local objects by subclassing the local class:
>>> class MyLocal(local):
... number = 2
- ... def __init__(self, /, **kw):
+ ... def __init__(self, /, **kw):
... self.__dict__.update(kw)
... def squared(self):
... return self.number ** 2
@@ -204,7 +204,7 @@ def _patch(self):
class local:
__slots__ = '_local__impl', '__dict__'
- def __new__(cls, /, *args, **kw):
+ def __new__(cls, /, *args, **kw):
if (args or kw) and (cls.__init__ is object.__init__):
raise TypeError("Initialization arguments are not supported")
self = object.__new__(cls)
diff --git a/contrib/tools/python3/src/Lib/_weakrefset.py b/contrib/tools/python3/src/Lib/_weakrefset.py
index 2a27684324..a3b2d0c28e 100644
--- a/contrib/tools/python3/src/Lib/_weakrefset.py
+++ b/contrib/tools/python3/src/Lib/_weakrefset.py
@@ -3,7 +3,7 @@
# by abc.py to load everything else at startup.
from _weakref import ref
-from types import GenericAlias
+from types import GenericAlias
__all__ = ['WeakSet']
@@ -51,14 +51,14 @@ class WeakSet:
self.update(data)
def _commit_removals(self):
- pop = self._pending_removals.pop
+ pop = self._pending_removals.pop
discard = self.data.discard
- while True:
- try:
- item = pop()
- except IndexError:
- return
- discard(item)
+ while True:
+ try:
+ item = pop()
+ except IndexError:
+ return
+ discard(item)
def __iter__(self):
with _IterationGuard(self):
@@ -199,8 +199,8 @@ class WeakSet:
def isdisjoint(self, other):
return len(self.intersection(other)) == 0
-
- def __repr__(self):
- return repr(self.data)
-
- __class_getitem__ = classmethod(GenericAlias)
+
+ def __repr__(self):
+ return repr(self.data)
+
+ __class_getitem__ = classmethod(GenericAlias)
diff --git a/contrib/tools/python3/src/Lib/abc.py b/contrib/tools/python3/src/Lib/abc.py
index 9de128e236..38498eeef4 100644
--- a/contrib/tools/python3/src/Lib/abc.py
+++ b/contrib/tools/python3/src/Lib/abc.py
@@ -11,8 +11,8 @@ def abstractmethod(funcobj):
class that has a metaclass derived from ABCMeta cannot be
instantiated unless all of its abstract methods are overridden.
The abstract methods can be called using any of the normal
- 'super' call mechanisms. abstractmethod() may be used to declare
- abstract methods for properties and descriptors.
+ 'super' call mechanisms. abstractmethod() may be used to declare
+ abstract methods for properties and descriptors.
Usage:
@@ -28,14 +28,14 @@ def abstractmethod(funcobj):
class abstractclassmethod(classmethod):
"""A decorator indicating abstract classmethods.
- Deprecated, use 'classmethod' with 'abstractmethod' instead:
-
- class C(ABC):
- @classmethod
- @abstractmethod
- def my_abstract_classmethod(cls, ...):
- ...
-
+ Deprecated, use 'classmethod' with 'abstractmethod' instead:
+
+ class C(ABC):
+ @classmethod
+ @abstractmethod
+ def my_abstract_classmethod(cls, ...):
+ ...
+
"""
__isabstractmethod__ = True
@@ -48,14 +48,14 @@ class abstractclassmethod(classmethod):
class abstractstaticmethod(staticmethod):
"""A decorator indicating abstract staticmethods.
- Deprecated, use 'staticmethod' with 'abstractmethod' instead:
-
- class C(ABC):
- @staticmethod
- @abstractmethod
- def my_abstract_staticmethod(...):
- ...
-
+ Deprecated, use 'staticmethod' with 'abstractmethod' instead:
+
+ class C(ABC):
+ @staticmethod
+ @abstractmethod
+ def my_abstract_staticmethod(...):
+ ...
+
"""
__isabstractmethod__ = True
@@ -68,14 +68,14 @@ class abstractstaticmethod(staticmethod):
class abstractproperty(property):
"""A decorator indicating abstract properties.
- Deprecated, use 'property' with 'abstractmethod' instead:
-
- class C(ABC):
- @property
- @abstractmethod
- def my_abstract_property(self):
- ...
-
+ Deprecated, use 'property' with 'abstractmethod' instead:
+
+ class C(ABC):
+ @property
+ @abstractmethod
+ def my_abstract_property(self):
+ ...
+
"""
__isabstractmethod__ = True
diff --git a/contrib/tools/python3/src/Lib/aifc.py b/contrib/tools/python3/src/Lib/aifc.py
index ed5da7d893..d35a9bb36a 100644
--- a/contrib/tools/python3/src/Lib/aifc.py
+++ b/contrib/tools/python3/src/Lib/aifc.py
@@ -138,7 +138,7 @@ import struct
import builtins
import warnings
-__all__ = ["Error", "open"]
+__all__ = ["Error", "open"]
class Error(Exception):
pass
diff --git a/contrib/tools/python3/src/Lib/antigravity.py b/contrib/tools/python3/src/Lib/antigravity.py
index 6dc5207335..43ed91f689 100644
--- a/contrib/tools/python3/src/Lib/antigravity.py
+++ b/contrib/tools/python3/src/Lib/antigravity.py
@@ -12,6 +12,6 @@ def geohash(latitude, longitude, datedow):
'''
# https://xkcd.com/426/
- h = hashlib.md5(datedow, usedforsecurity=False).hexdigest()
+ h = hashlib.md5(datedow, usedforsecurity=False).hexdigest()
p, q = [('%f' % float.fromhex('0.' + x)) for x in (h[:16], h[16:32])]
print('%d%s %d%s' % (latitude, p[1:], longitude, q[1:]))
diff --git a/contrib/tools/python3/src/Lib/argparse.py b/contrib/tools/python3/src/Lib/argparse.py
index 40569437ac..f9b0dd4aa4 100644
--- a/contrib/tools/python3/src/Lib/argparse.py
+++ b/contrib/tools/python3/src/Lib/argparse.py
@@ -1,5 +1,5 @@
# Author: Steven J. Bethard <steven.bethard@gmail.com>.
-# New maintainer as of 29 August 2019: Raymond Hettinger <raymond.hettinger@gmail.com>
+# New maintainer as of 29 August 2019: Raymond Hettinger <raymond.hettinger@gmail.com>
"""Command-line parsing library
@@ -67,7 +67,7 @@ __all__ = [
'ArgumentParser',
'ArgumentError',
'ArgumentTypeError',
- 'BooleanOptionalAction',
+ 'BooleanOptionalAction',
'FileType',
'HelpFormatter',
'ArgumentDefaultsHelpFormatter',
@@ -129,7 +129,7 @@ class _AttributeHolder(object):
return '%s(%s)' % (type_name, ', '.join(arg_strings))
def _get_kwargs(self):
- return list(self.__dict__.items())
+ return list(self.__dict__.items())
def _get_args(self):
return []
@@ -166,8 +166,8 @@ class HelpFormatter(object):
# default setting for width
if width is None:
- import shutil
- width = shutil.get_terminal_size().columns
+ import shutil
+ width = shutil.get_terminal_size().columns
width -= 2
self._prog = prog
@@ -264,7 +264,7 @@ class HelpFormatter(object):
invocations.append(get_invocation(subaction))
# update the maximum item length
- invocation_length = max(map(len, invocations))
+ invocation_length = max(map(len, invocations))
action_length = invocation_length + self._current_indent
self._action_max_length = max(self._action_max_length,
action_length)
@@ -392,9 +392,9 @@ class HelpFormatter(object):
group_actions = set()
inserts = {}
for group in groups:
- if not group._group_actions:
- raise ValueError(f'empty group {group}')
-
+ if not group._group_actions:
+ raise ValueError(f'empty group {group}')
+
try:
start = actions.index(group._group_actions[0])
except ValueError:
@@ -409,19 +409,19 @@ class HelpFormatter(object):
inserts[start] += ' ['
else:
inserts[start] = '['
- if end in inserts:
- inserts[end] += ']'
- else:
- inserts[end] = ']'
+ if end in inserts:
+ inserts[end] += ']'
+ else:
+ inserts[end] = ']'
else:
if start in inserts:
inserts[start] += ' ('
else:
inserts[start] = '('
- if end in inserts:
- inserts[end] += ')'
- else:
- inserts[end] = ')'
+ if end in inserts:
+ inserts[end] += ')'
+ else:
+ inserts[end] = ')'
for i in range(start + 1, end):
inserts[i] = '|'
@@ -458,7 +458,7 @@ class HelpFormatter(object):
# if the Optional doesn't take a value, format is:
# -s or --long
if action.nargs == 0:
- part = action.format_usage()
+ part = action.format_usage()
# if the Optional takes a value, format is:
# -s ARGS or --long ARGS
@@ -529,13 +529,13 @@ class HelpFormatter(object):
parts = [action_header]
# if there was help for the action, add lines of help text
- if action.help and action.help.strip():
+ if action.help and action.help.strip():
help_text = self._expand_help(action)
- if help_text:
- help_lines = self._split_lines(help_text, help_width)
- parts.append('%*s%s\n' % (indent_first, '', help_lines[0]))
- for line in help_lines[1:]:
- parts.append('%*s%s\n' % (help_position, '', line))
+ if help_text:
+ help_lines = self._split_lines(help_text, help_width)
+ parts.append('%*s%s\n' % (indent_first, '', help_lines[0]))
+ for line in help_lines[1:]:
+ parts.append('%*s%s\n' % (help_position, '', line))
# or add a newline if the description doesn't end with one
elif not action_header.endswith('\n'):
@@ -595,11 +595,11 @@ class HelpFormatter(object):
elif action.nargs == OPTIONAL:
result = '[%s]' % get_metavar(1)
elif action.nargs == ZERO_OR_MORE:
- metavar = get_metavar(1)
- if len(metavar) == 2:
- result = '[%s [%s ...]]' % metavar
- else:
- result = '[%s ...]' % metavar
+ metavar = get_metavar(1)
+ if len(metavar) == 2:
+ result = '[%s [%s ...]]' % metavar
+ else:
+ result = '[%s ...]' % metavar
elif action.nargs == ONE_OR_MORE:
result = '%s [%s ...]' % get_metavar(2)
elif action.nargs == REMAINDER:
@@ -609,10 +609,10 @@ class HelpFormatter(object):
elif action.nargs == SUPPRESS:
result = ''
else:
- try:
- formats = ['%s' for _ in range(action.nargs)]
- except TypeError:
- raise ValueError("invalid nargs value") from None
+ try:
+ formats = ['%s' for _ in range(action.nargs)]
+ except TypeError:
+ raise ValueError("invalid nargs value") from None
result = ' '.join(formats) % get_metavar(action.nargs)
return result
@@ -731,8 +731,8 @@ def _get_action_name(argument):
return argument.metavar
elif argument.dest not in (None, SUPPRESS):
return argument.dest
- elif argument.choices:
- return '{' + ','.join(argument.choices) + '}'
+ elif argument.choices:
+ return '{' + ','.join(argument.choices) + '}'
else:
return None
@@ -853,53 +853,53 @@ class Action(_AttributeHolder):
]
return [(name, getattr(self, name)) for name in names]
- def format_usage(self):
- return self.option_strings[0]
-
+ def format_usage(self):
+ return self.option_strings[0]
+
def __call__(self, parser, namespace, values, option_string=None):
raise NotImplementedError(_('.__call__() not defined'))
-class BooleanOptionalAction(Action):
- def __init__(self,
- option_strings,
- dest,
- default=None,
- type=None,
- choices=None,
- required=False,
- help=None,
- metavar=None):
-
- _option_strings = []
- for option_string in option_strings:
- _option_strings.append(option_string)
-
- if option_string.startswith('--'):
- option_string = '--no-' + option_string[2:]
- _option_strings.append(option_string)
-
- if help is not None and default is not None:
- help += " (default: %(default)s)"
-
- super().__init__(
- option_strings=_option_strings,
- dest=dest,
- nargs=0,
- default=default,
- type=type,
- choices=choices,
- required=required,
- help=help,
- metavar=metavar)
-
- def __call__(self, parser, namespace, values, option_string=None):
- if option_string in self.option_strings:
- setattr(namespace, self.dest, not option_string.startswith('--no-'))
-
- def format_usage(self):
- return ' | '.join(self.option_strings)
-
-
+class BooleanOptionalAction(Action):
+ def __init__(self,
+ option_strings,
+ dest,
+ default=None,
+ type=None,
+ choices=None,
+ required=False,
+ help=None,
+ metavar=None):
+
+ _option_strings = []
+ for option_string in option_strings:
+ _option_strings.append(option_string)
+
+ if option_string.startswith('--'):
+ option_string = '--no-' + option_string[2:]
+ _option_strings.append(option_string)
+
+ if help is not None and default is not None:
+ help += " (default: %(default)s)"
+
+ super().__init__(
+ option_strings=_option_strings,
+ dest=dest,
+ nargs=0,
+ default=default,
+ type=type,
+ choices=choices,
+ required=required,
+ help=help,
+ metavar=metavar)
+
+ def __call__(self, parser, namespace, values, option_string=None):
+ if option_string in self.option_strings:
+ setattr(namespace, self.dest, not option_string.startswith('--no-'))
+
+ def format_usage(self):
+ return ' | '.join(self.option_strings)
+
+
class _StoreAction(Action):
def __init__(self,
@@ -914,7 +914,7 @@ class _StoreAction(Action):
help=None,
metavar=None):
if nargs == 0:
- raise ValueError('nargs for store actions must be != 0; if you '
+ raise ValueError('nargs for store actions must be != 0; if you '
'have nothing to store, actions such as store '
'true or store const may be more appropriate')
if const is not None and nargs != OPTIONAL:
@@ -1006,7 +1006,7 @@ class _AppendAction(Action):
help=None,
metavar=None):
if nargs == 0:
- raise ValueError('nargs for append actions must be != 0; if arg '
+ raise ValueError('nargs for append actions must be != 0; if arg '
'strings are not supplying the value to append, '
'the append const action may be more appropriate')
if const is not None and nargs != OPTIONAL:
@@ -1212,18 +1212,18 @@ class _SubParsersAction(Action):
# namespace for the relevant parts.
subnamespace, arg_strings = parser.parse_known_args(arg_strings, None)
for key, value in vars(subnamespace).items():
- setattr(namespace, key, value)
+ setattr(namespace, key, value)
if arg_strings:
vars(namespace).setdefault(_UNRECOGNIZED_ARGS_ATTR, [])
getattr(namespace, _UNRECOGNIZED_ARGS_ATTR).extend(arg_strings)
-class _ExtendAction(_AppendAction):
- def __call__(self, parser, namespace, values, option_string=None):
- items = getattr(namespace, self.dest, None)
- items = _copy_items(items)
- items.extend(values)
- setattr(namespace, self.dest, items)
+class _ExtendAction(_AppendAction):
+ def __call__(self, parser, namespace, values, option_string=None):
+ items = getattr(namespace, self.dest, None)
+ items = _copy_items(items)
+ items.extend(values)
+ setattr(namespace, self.dest, items)
# ==============
# Type classes
@@ -1268,9 +1268,9 @@ class FileType(object):
return open(string, self._mode, self._bufsize, self._encoding,
self._errors)
except OSError as e:
- args = {'filename': string, 'error': e}
- message = _("can't open '%(filename)s': %(error)s")
- raise ArgumentTypeError(message % args)
+ args = {'filename': string, 'error': e}
+ message = _("can't open '%(filename)s': %(error)s")
+ raise ArgumentTypeError(message % args)
def __repr__(self):
args = self._mode, self._bufsize
@@ -1333,7 +1333,7 @@ class _ActionsContainer(object):
self.register('action', 'help', _HelpAction)
self.register('action', 'version', _VersionAction)
self.register('action', 'parsers', _SubParsersAction)
- self.register('action', 'extend', _ExtendAction)
+ self.register('action', 'extend', _ExtendAction)
# raise an exception if the conflict handler is invalid
self._get_handler()
@@ -1426,10 +1426,10 @@ class _ActionsContainer(object):
if not callable(type_func):
raise ValueError('%r is not callable' % (type_func,))
- if type_func is FileType:
- raise ValueError('%r is a FileType class object, instance of it'
- ' must be passed' % (type_func,))
-
+ if type_func is FileType:
+ raise ValueError('%r is a FileType class object, instance of it'
+ ' must be passed' % (type_func,))
+
# raise an error if the metavar does not match the type
if hasattr(self, "_get_formatter"):
try:
@@ -1544,8 +1544,8 @@ class _ActionsContainer(object):
# strings starting with two prefix characters are long options
option_strings.append(option_string)
- if len(option_string) > 1 and option_string[1] in self.prefix_chars:
- long_option_strings.append(option_string)
+ if len(option_string) > 1 and option_string[1] in self.prefix_chars:
+ long_option_strings.append(option_string)
# infer destination, '--foo-bar' -> 'foo_bar' and '-x' -> 'x'
dest = kwargs.pop('dest', None)
@@ -1685,8 +1685,8 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer):
- conflict_handler -- String indicating how to handle conflicts
- add_help -- Add a -h/-help option
- allow_abbrev -- Allow long options to be abbreviated unambiguously
- - exit_on_error -- Determines whether or not ArgumentParser exits with
- error info when an error occurs
+ - exit_on_error -- Determines whether or not ArgumentParser exits with
+ error info when an error occurs
"""
def __init__(self,
@@ -1701,8 +1701,8 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer):
argument_default=None,
conflict_handler='error',
add_help=True,
- allow_abbrev=True,
- exit_on_error=True):
+ allow_abbrev=True,
+ exit_on_error=True):
superinit = super(ArgumentParser, self).__init__
superinit(description=description,
@@ -1721,7 +1721,7 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer):
self.fromfile_prefix_chars = fromfile_prefix_chars
self.add_help = add_help
self.allow_abbrev = allow_abbrev
- self.exit_on_error = exit_on_error
+ self.exit_on_error = exit_on_error
add_group = self.add_argument_group
self._positionals = add_group(_('positional arguments'))
@@ -1846,26 +1846,26 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer):
if action.default is not SUPPRESS:
setattr(namespace, action.dest, action.default)
- # add any parser defaults that aren't present
- for dest in self._defaults:
- if not hasattr(namespace, dest):
- setattr(namespace, dest, self._defaults[dest])
-
+ # add any parser defaults that aren't present
+ for dest in self._defaults:
+ if not hasattr(namespace, dest):
+ setattr(namespace, dest, self._defaults[dest])
+
# parse the arguments and exit if there are any errors
- if self.exit_on_error:
- try:
- namespace, args = self._parse_known_args(args, namespace)
- except ArgumentError:
- err = _sys.exc_info()[1]
- self.error(str(err))
- else:
+ if self.exit_on_error:
+ try:
+ namespace, args = self._parse_known_args(args, namespace)
+ except ArgumentError:
+ err = _sys.exc_info()[1]
+ self.error(str(err))
+ else:
namespace, args = self._parse_known_args(args, namespace)
- if hasattr(namespace, _UNRECOGNIZED_ARGS_ATTR):
- args.extend(getattr(namespace, _UNRECOGNIZED_ARGS_ATTR))
- delattr(namespace, _UNRECOGNIZED_ARGS_ATTR)
- return namespace, args
-
+ if hasattr(namespace, _UNRECOGNIZED_ARGS_ATTR):
+ args.extend(getattr(namespace, _UNRECOGNIZED_ARGS_ATTR))
+ delattr(namespace, _UNRECOGNIZED_ARGS_ATTR)
+ return namespace, args
+
def _parse_known_args(self, arg_strings, namespace):
# replace arg strings that are file references
if self.fromfile_prefix_chars is not None:
@@ -2153,9 +2153,9 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer):
OPTIONAL: _('expected at most one argument'),
ONE_OR_MORE: _('expected at least one argument'),
}
- msg = nargs_errors.get(action.nargs)
- if msg is None:
- msg = ngettext('expected %s argument',
+ msg = nargs_errors.get(action.nargs)
+ if msg is None:
+ msg = ngettext('expected %s argument',
'expected %s arguments',
action.nargs) % action.nargs
raise ArgumentError(action, msg)
@@ -2204,23 +2204,23 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer):
action = self._option_string_actions[option_string]
return action, option_string, explicit_arg
- # search through all possible prefixes of the option string
- # and all actions in the parser for possible interpretations
- option_tuples = self._get_option_tuples(arg_string)
+ # search through all possible prefixes of the option string
+ # and all actions in the parser for possible interpretations
+ option_tuples = self._get_option_tuples(arg_string)
- # if multiple actions match, the option string was ambiguous
- if len(option_tuples) > 1:
- options = ', '.join([option_string
- for action, option_string, explicit_arg in option_tuples])
- args = {'option': arg_string, 'matches': options}
- msg = _('ambiguous option: %(option)s could match %(matches)s')
- self.error(msg % args)
+ # if multiple actions match, the option string was ambiguous
+ if len(option_tuples) > 1:
+ options = ', '.join([option_string
+ for action, option_string, explicit_arg in option_tuples])
+ args = {'option': arg_string, 'matches': options}
+ msg = _('ambiguous option: %(option)s could match %(matches)s')
+ self.error(msg % args)
- # if exactly one action matched, this segmentation is good,
- # so return the parsed action
- elif len(option_tuples) == 1:
- option_tuple, = option_tuples
- return option_tuple
+ # if exactly one action matched, this segmentation is good,
+ # so return the parsed action
+ elif len(option_tuples) == 1:
+ option_tuple, = option_tuples
+ return option_tuple
# if it was not found as an option, but it looks like a negative
# number, it was meant to be positional
@@ -2244,17 +2244,17 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer):
# split at the '='
chars = self.prefix_chars
if option_string[0] in chars and option_string[1] in chars:
- if self.allow_abbrev:
- if '=' in option_string:
- option_prefix, explicit_arg = option_string.split('=', 1)
- else:
- option_prefix = option_string
- explicit_arg = None
- for option_string in self._option_string_actions:
- if option_string.startswith(option_prefix):
- action = self._option_string_actions[option_string]
- tup = action, option_string, explicit_arg
- result.append(tup)
+ if self.allow_abbrev:
+ if '=' in option_string:
+ option_prefix, explicit_arg = option_string.split('=', 1)
+ else:
+ option_prefix = option_string
+ explicit_arg = None
+ for option_string in self._option_string_actions:
+ if option_string.startswith(option_prefix):
+ action = self._option_string_actions[option_string]
+ tup = action, option_string, explicit_arg
+ result.append(tup)
# single character options can be concatenated with their arguments
# but multiple character options always have to have their argument
diff --git a/contrib/tools/python3/src/Lib/ast.py b/contrib/tools/python3/src/Lib/ast.py
index 396eea1830..0426c11766 100644
--- a/contrib/tools/python3/src/Lib/ast.py
+++ b/contrib/tools/python3/src/Lib/ast.py
@@ -24,31 +24,31 @@
:copyright: Copyright 2008 by Armin Ronacher.
:license: Python License.
"""
-import sys
+import sys
from _ast import *
-from contextlib import contextmanager, nullcontext
-from enum import IntEnum, auto
+from contextlib import contextmanager, nullcontext
+from enum import IntEnum, auto
-def parse(source, filename='<unknown>', mode='exec', *,
- type_comments=False, feature_version=None):
+def parse(source, filename='<unknown>', mode='exec', *,
+ type_comments=False, feature_version=None):
"""
Parse the source into an AST node.
Equivalent to compile(source, filename, mode, PyCF_ONLY_AST).
- Pass type_comments=True to get back type comments where the syntax allows.
+ Pass type_comments=True to get back type comments where the syntax allows.
"""
- flags = PyCF_ONLY_AST
- if type_comments:
- flags |= PyCF_TYPE_COMMENTS
- if isinstance(feature_version, tuple):
- major, minor = feature_version # Should be a 2-tuple.
- assert major == 3
- feature_version = minor
- elif feature_version is None:
- feature_version = -1
- # Else it should be an int giving the minor version for 3.x.
- return compile(source, filename, mode, flags,
- _feature_version=feature_version)
+ flags = PyCF_ONLY_AST
+ if type_comments:
+ flags |= PyCF_TYPE_COMMENTS
+ if isinstance(feature_version, tuple):
+ major, minor = feature_version # Should be a 2-tuple.
+ assert major == 3
+ feature_version = minor
+ elif feature_version is None:
+ feature_version = -1
+ # Else it should be an int giving the minor version for 3.x.
+ return compile(source, filename, mode, flags,
+ _feature_version=feature_version)
def literal_eval(node_or_string):
@@ -62,12 +62,12 @@ def literal_eval(node_or_string):
node_or_string = parse(node_or_string, mode='eval')
if isinstance(node_or_string, Expression):
node_or_string = node_or_string.body
- def _raise_malformed_node(node):
- raise ValueError(f'malformed node or string: {node!r}')
+ def _raise_malformed_node(node):
+ raise ValueError(f'malformed node or string: {node!r}')
def _convert_num(node):
- if not isinstance(node, Constant) or type(node.value) not in (int, float, complex):
- _raise_malformed_node(node)
- return node.value
+ if not isinstance(node, Constant) or type(node.value) not in (int, float, complex):
+ _raise_malformed_node(node)
+ return node.value
def _convert_signed_num(node):
if isinstance(node, UnaryOp) and isinstance(node.op, (UAdd, USub)):
operand = _convert_num(node.operand)
@@ -85,12 +85,12 @@ def literal_eval(node_or_string):
return list(map(_convert, node.elts))
elif isinstance(node, Set):
return set(map(_convert, node.elts))
- elif (isinstance(node, Call) and isinstance(node.func, Name) and
- node.func.id == 'set' and node.args == node.keywords == []):
- return set()
+ elif (isinstance(node, Call) and isinstance(node.func, Name) and
+ node.func.id == 'set' and node.args == node.keywords == []):
+ return set()
elif isinstance(node, Dict):
- if len(node.keys) != len(node.values):
- _raise_malformed_node(node)
+ if len(node.keys) != len(node.values):
+ _raise_malformed_node(node)
return dict(zip(map(_convert, node.keys),
map(_convert, node.values)))
elif isinstance(node, BinOp) and isinstance(node.op, (Add, Sub)):
@@ -105,87 +105,87 @@ def literal_eval(node_or_string):
return _convert(node_or_string)
-def dump(node, annotate_fields=True, include_attributes=False, *, indent=None):
+def dump(node, annotate_fields=True, include_attributes=False, *, indent=None):
"""
- Return a formatted dump of the tree in node. This is mainly useful for
- debugging purposes. If annotate_fields is true (by default),
- the returned string will show the names and the values for fields.
- If annotate_fields is false, the result string will be more compact by
- omitting unambiguous field names. Attributes such as line
+ Return a formatted dump of the tree in node. This is mainly useful for
+ debugging purposes. If annotate_fields is true (by default),
+ the returned string will show the names and the values for fields.
+ If annotate_fields is false, the result string will be more compact by
+ omitting unambiguous field names. Attributes such as line
numbers and column offsets are not dumped by default. If this is wanted,
- include_attributes can be set to true. If indent is a non-negative
- integer or string, then the tree will be pretty-printed with that indent
- level. None (the default) selects the single line representation.
+ include_attributes can be set to true. If indent is a non-negative
+ integer or string, then the tree will be pretty-printed with that indent
+ level. None (the default) selects the single line representation.
"""
- def _format(node, level=0):
- if indent is not None:
- level += 1
- prefix = '\n' + indent * level
- sep = ',\n' + indent * level
- else:
- prefix = ''
- sep = ', '
+ def _format(node, level=0):
+ if indent is not None:
+ level += 1
+ prefix = '\n' + indent * level
+ sep = ',\n' + indent * level
+ else:
+ prefix = ''
+ sep = ', '
if isinstance(node, AST):
- cls = type(node)
- args = []
- allsimple = True
- keywords = annotate_fields
- for name in node._fields:
- try:
- value = getattr(node, name)
- except AttributeError:
- keywords = True
- continue
- if value is None and getattr(cls, name, ...) is None:
- keywords = True
- continue
- value, simple = _format(value, level)
- allsimple = allsimple and simple
- if keywords:
- args.append('%s=%s' % (name, value))
- else:
- args.append(value)
+ cls = type(node)
+ args = []
+ allsimple = True
+ keywords = annotate_fields
+ for name in node._fields:
+ try:
+ value = getattr(node, name)
+ except AttributeError:
+ keywords = True
+ continue
+ if value is None and getattr(cls, name, ...) is None:
+ keywords = True
+ continue
+ value, simple = _format(value, level)
+ allsimple = allsimple and simple
+ if keywords:
+ args.append('%s=%s' % (name, value))
+ else:
+ args.append(value)
if include_attributes and node._attributes:
- for name in node._attributes:
- try:
- value = getattr(node, name)
- except AttributeError:
- continue
- if value is None and getattr(cls, name, ...) is None:
- continue
- value, simple = _format(value, level)
- allsimple = allsimple and simple
- args.append('%s=%s' % (name, value))
- if allsimple and len(args) <= 3:
- return '%s(%s)' % (node.__class__.__name__, ', '.join(args)), not args
- return '%s(%s%s)' % (node.__class__.__name__, prefix, sep.join(args)), False
+ for name in node._attributes:
+ try:
+ value = getattr(node, name)
+ except AttributeError:
+ continue
+ if value is None and getattr(cls, name, ...) is None:
+ continue
+ value, simple = _format(value, level)
+ allsimple = allsimple and simple
+ args.append('%s=%s' % (name, value))
+ if allsimple and len(args) <= 3:
+ return '%s(%s)' % (node.__class__.__name__, ', '.join(args)), not args
+ return '%s(%s%s)' % (node.__class__.__name__, prefix, sep.join(args)), False
elif isinstance(node, list):
- if not node:
- return '[]', True
- return '[%s%s]' % (prefix, sep.join(_format(x, level)[0] for x in node)), False
- return repr(node), True
-
+ if not node:
+ return '[]', True
+ return '[%s%s]' % (prefix, sep.join(_format(x, level)[0] for x in node)), False
+ return repr(node), True
+
if not isinstance(node, AST):
raise TypeError('expected AST, got %r' % node.__class__.__name__)
- if indent is not None and not isinstance(indent, str):
- indent = ' ' * indent
- return _format(node)[0]
+ if indent is not None and not isinstance(indent, str):
+ indent = ' ' * indent
+ return _format(node)[0]
def copy_location(new_node, old_node):
"""
- Copy source location (`lineno`, `col_offset`, `end_lineno`, and `end_col_offset`
- attributes) from *old_node* to *new_node* if possible, and return *new_node*.
+ Copy source location (`lineno`, `col_offset`, `end_lineno`, and `end_col_offset`
+ attributes) from *old_node* to *new_node* if possible, and return *new_node*.
"""
- for attr in 'lineno', 'col_offset', 'end_lineno', 'end_col_offset':
- if attr in old_node._attributes and attr in new_node._attributes:
- value = getattr(old_node, attr, None)
- # end_lineno and end_col_offset are optional attributes, and they
- # should be copied whether the value is None or not.
- if value is not None or (
- hasattr(old_node, attr) and attr.startswith("end_")
- ):
- setattr(new_node, attr, value)
+ for attr in 'lineno', 'col_offset', 'end_lineno', 'end_col_offset':
+ if attr in old_node._attributes and attr in new_node._attributes:
+ value = getattr(old_node, attr, None)
+ # end_lineno and end_col_offset are optional attributes, and they
+ # should be copied whether the value is None or not.
+ if value is not None or (
+ hasattr(old_node, attr) and attr.startswith("end_")
+ ):
+ setattr(new_node, attr, value)
return new_node
@@ -197,47 +197,47 @@ def fix_missing_locations(node):
recursively where not already set, by setting them to the values of the
parent node. It works recursively starting at *node*.
"""
- def _fix(node, lineno, col_offset, end_lineno, end_col_offset):
+ def _fix(node, lineno, col_offset, end_lineno, end_col_offset):
if 'lineno' in node._attributes:
if not hasattr(node, 'lineno'):
node.lineno = lineno
else:
lineno = node.lineno
- if 'end_lineno' in node._attributes:
- if getattr(node, 'end_lineno', None) is None:
- node.end_lineno = end_lineno
- else:
- end_lineno = node.end_lineno
+ if 'end_lineno' in node._attributes:
+ if getattr(node, 'end_lineno', None) is None:
+ node.end_lineno = end_lineno
+ else:
+ end_lineno = node.end_lineno
if 'col_offset' in node._attributes:
if not hasattr(node, 'col_offset'):
node.col_offset = col_offset
else:
col_offset = node.col_offset
- if 'end_col_offset' in node._attributes:
- if getattr(node, 'end_col_offset', None) is None:
- node.end_col_offset = end_col_offset
- else:
- end_col_offset = node.end_col_offset
+ if 'end_col_offset' in node._attributes:
+ if getattr(node, 'end_col_offset', None) is None:
+ node.end_col_offset = end_col_offset
+ else:
+ end_col_offset = node.end_col_offset
for child in iter_child_nodes(node):
- _fix(child, lineno, col_offset, end_lineno, end_col_offset)
- _fix(node, 1, 0, 1, 0)
+ _fix(child, lineno, col_offset, end_lineno, end_col_offset)
+ _fix(node, 1, 0, 1, 0)
return node
def increment_lineno(node, n=1):
"""
- Increment the line number and end line number of each node in the tree
- starting at *node* by *n*. This is useful to "move code" to a different
- location in a file.
+ Increment the line number and end line number of each node in the tree
+ starting at *node* by *n*. This is useful to "move code" to a different
+ location in a file.
"""
for child in walk(node):
if 'lineno' in child._attributes:
child.lineno = getattr(child, 'lineno', 0) + n
- if (
- "end_lineno" in child._attributes
- and (end_lineno := getattr(child, "end_lineno", 0)) is not None
- ):
- child.end_lineno = end_lineno + n
+ if (
+ "end_lineno" in child._attributes
+ and (end_lineno := getattr(child, "end_lineno", 0)) is not None
+ ):
+ child.end_lineno = end_lineno + n
return node
@@ -293,79 +293,79 @@ def get_docstring(node, clean=True):
return text
-def _splitlines_no_ff(source):
- """Split a string into lines ignoring form feed and other chars.
-
- This mimics how the Python parser splits source code.
- """
- idx = 0
- lines = []
- next_line = ''
- while idx < len(source):
- c = source[idx]
- next_line += c
- idx += 1
- # Keep \r\n together
- if c == '\r' and idx < len(source) and source[idx] == '\n':
- next_line += '\n'
- idx += 1
- if c in '\r\n':
- lines.append(next_line)
- next_line = ''
-
- if next_line:
- lines.append(next_line)
- return lines
-
-
-def _pad_whitespace(source):
- r"""Replace all chars except '\f\t' in a line with spaces."""
- result = ''
- for c in source:
- if c in '\f\t':
- result += c
- else:
- result += ' '
- return result
-
-
-def get_source_segment(source, node, *, padded=False):
- """Get source code segment of the *source* that generated *node*.
-
- If some location information (`lineno`, `end_lineno`, `col_offset`,
- or `end_col_offset`) is missing, return None.
-
- If *padded* is `True`, the first line of a multi-line statement will
- be padded with spaces to match its original position.
- """
- try:
- if node.end_lineno is None or node.end_col_offset is None:
- return None
- lineno = node.lineno - 1
- end_lineno = node.end_lineno - 1
- col_offset = node.col_offset
- end_col_offset = node.end_col_offset
- except AttributeError:
- return None
-
- lines = _splitlines_no_ff(source)
- if end_lineno == lineno:
- return lines[lineno].encode()[col_offset:end_col_offset].decode()
-
- if padded:
- padding = _pad_whitespace(lines[lineno].encode()[:col_offset].decode())
- else:
- padding = ''
-
- first = padding + lines[lineno].encode()[col_offset:].decode()
- last = lines[end_lineno].encode()[:end_col_offset].decode()
- lines = lines[lineno+1:end_lineno]
-
- lines.insert(0, first)
- lines.append(last)
- return ''.join(lines)
-
-
+def _splitlines_no_ff(source):
+ """Split a string into lines ignoring form feed and other chars.
+
+ This mimics how the Python parser splits source code.
+ """
+ idx = 0
+ lines = []
+ next_line = ''
+ while idx < len(source):
+ c = source[idx]
+ next_line += c
+ idx += 1
+ # Keep \r\n together
+ if c == '\r' and idx < len(source) and source[idx] == '\n':
+ next_line += '\n'
+ idx += 1
+ if c in '\r\n':
+ lines.append(next_line)
+ next_line = ''
+
+ if next_line:
+ lines.append(next_line)
+ return lines
+
+
+def _pad_whitespace(source):
+ r"""Replace all chars except '\f\t' in a line with spaces."""
+ result = ''
+ for c in source:
+ if c in '\f\t':
+ result += c
+ else:
+ result += ' '
+ return result
+
+
+def get_source_segment(source, node, *, padded=False):
+ """Get source code segment of the *source* that generated *node*.
+
+ If some location information (`lineno`, `end_lineno`, `col_offset`,
+ or `end_col_offset`) is missing, return None.
+
+ If *padded* is `True`, the first line of a multi-line statement will
+ be padded with spaces to match its original position.
+ """
+ try:
+ if node.end_lineno is None or node.end_col_offset is None:
+ return None
+ lineno = node.lineno - 1
+ end_lineno = node.end_lineno - 1
+ col_offset = node.col_offset
+ end_col_offset = node.end_col_offset
+ except AttributeError:
+ return None
+
+ lines = _splitlines_no_ff(source)
+ if end_lineno == lineno:
+ return lines[lineno].encode()[col_offset:end_col_offset].decode()
+
+ if padded:
+ padding = _pad_whitespace(lines[lineno].encode()[:col_offset].decode())
+ else:
+ padding = ''
+
+ first = padding + lines[lineno].encode()[col_offset:].decode()
+ last = lines[end_lineno].encode()[:end_col_offset].decode()
+ lines = lines[lineno+1:end_lineno]
+
+ lines.insert(0, first)
+ lines.append(last)
+ return ''.join(lines)
+
+
def walk(node):
"""
Recursively yield all descendant nodes in the tree starting at *node*
@@ -416,28 +416,28 @@ class NodeVisitor(object):
elif isinstance(value, AST):
self.visit(value)
- def visit_Constant(self, node):
- value = node.value
- type_name = _const_node_type_names.get(type(value))
- if type_name is None:
- for cls, name in _const_node_type_names.items():
- if isinstance(value, cls):
- type_name = name
- break
- if type_name is not None:
- method = 'visit_' + type_name
- try:
- visitor = getattr(self, method)
- except AttributeError:
- pass
- else:
- import warnings
- warnings.warn(f"{method} is deprecated; add visit_Constant",
- DeprecationWarning, 2)
- return visitor(node)
- return self.generic_visit(node)
-
-
+ def visit_Constant(self, node):
+ value = node.value
+ type_name = _const_node_type_names.get(type(value))
+ if type_name is None:
+ for cls, name in _const_node_type_names.items():
+ if isinstance(value, cls):
+ type_name = name
+ break
+ if type_name is not None:
+ method = 'visit_' + type_name
+ try:
+ visitor = getattr(self, method)
+ except AttributeError:
+ pass
+ else:
+ import warnings
+ warnings.warn(f"{method} is deprecated; add visit_Constant",
+ DeprecationWarning, 2)
+ return visitor(node)
+ return self.generic_visit(node)
+
+
class NodeTransformer(NodeVisitor):
"""
A :class:`NodeVisitor` subclass that walks the abstract syntax tree and
@@ -455,11 +455,11 @@ class NodeTransformer(NodeVisitor):
class RewriteName(NodeTransformer):
def visit_Name(self, node):
- return Subscript(
+ return Subscript(
value=Name(id='data', ctx=Load()),
- slice=Constant(value=node.id),
+ slice=Constant(value=node.id),
ctx=node.ctx
- )
+ )
Keep in mind that if the node you're operating on has child nodes you must
either transform the child nodes yourself or call the :meth:`generic_visit`
@@ -495,1106 +495,1106 @@ class NodeTransformer(NodeVisitor):
else:
setattr(node, field, new_node)
return node
-
-
-# If the ast module is loaded more than once, only add deprecated methods once
-if not hasattr(Constant, 'n'):
- # The following code is for backward compatibility.
- # It will be removed in future.
-
- def _getter(self):
- """Deprecated. Use value instead."""
- return self.value
-
- def _setter(self, value):
- self.value = value
-
- Constant.n = property(_getter, _setter)
- Constant.s = property(_getter, _setter)
-
-class _ABC(type):
-
- def __init__(cls, *args):
- cls.__doc__ = """Deprecated AST node class. Use ast.Constant instead"""
-
- def __instancecheck__(cls, inst):
- if not isinstance(inst, Constant):
- return False
- if cls in _const_types:
- try:
- value = inst.value
- except AttributeError:
- return False
- else:
- return (
- isinstance(value, _const_types[cls]) and
- not isinstance(value, _const_types_not.get(cls, ()))
- )
- return type.__instancecheck__(cls, inst)
-
-def _new(cls, *args, **kwargs):
- for key in kwargs:
- if key not in cls._fields:
- # arbitrary keyword arguments are accepted
- continue
- pos = cls._fields.index(key)
- if pos < len(args):
- raise TypeError(f"{cls.__name__} got multiple values for argument {key!r}")
- if cls in _const_types:
- return Constant(*args, **kwargs)
- return Constant.__new__(cls, *args, **kwargs)
-
-class Num(Constant, metaclass=_ABC):
- _fields = ('n',)
- __new__ = _new
-
-class Str(Constant, metaclass=_ABC):
- _fields = ('s',)
- __new__ = _new
-
-class Bytes(Constant, metaclass=_ABC):
- _fields = ('s',)
- __new__ = _new
-
-class NameConstant(Constant, metaclass=_ABC):
- __new__ = _new
-
-class Ellipsis(Constant, metaclass=_ABC):
- _fields = ()
-
- def __new__(cls, *args, **kwargs):
- if cls is Ellipsis:
- return Constant(..., *args, **kwargs)
- return Constant.__new__(cls, *args, **kwargs)
-
-_const_types = {
- Num: (int, float, complex),
- Str: (str,),
- Bytes: (bytes,),
- NameConstant: (type(None), bool),
- Ellipsis: (type(...),),
-}
-_const_types_not = {
- Num: (bool,),
-}
-
-_const_node_type_names = {
- bool: 'NameConstant', # should be before int
- type(None): 'NameConstant',
- int: 'Num',
- float: 'Num',
- complex: 'Num',
- str: 'Str',
- bytes: 'Bytes',
- type(...): 'Ellipsis',
-}
-
-class slice(AST):
- """Deprecated AST node class."""
-
-class Index(slice):
- """Deprecated AST node class. Use the index value directly instead."""
- def __new__(cls, value, **kwargs):
- return value
-
-class ExtSlice(slice):
- """Deprecated AST node class. Use ast.Tuple instead."""
- def __new__(cls, dims=(), **kwargs):
- return Tuple(list(dims), Load(), **kwargs)
-
-# If the ast module is loaded more than once, only add deprecated methods once
-if not hasattr(Tuple, 'dims'):
- # The following code is for backward compatibility.
- # It will be removed in future.
-
- def _dims_getter(self):
- """Deprecated. Use elts instead."""
- return self.elts
-
- def _dims_setter(self, value):
- self.elts = value
-
- Tuple.dims = property(_dims_getter, _dims_setter)
-
-class Suite(mod):
- """Deprecated AST node class. Unused in Python 3."""
-
-class AugLoad(expr_context):
- """Deprecated AST node class. Unused in Python 3."""
-
-class AugStore(expr_context):
- """Deprecated AST node class. Unused in Python 3."""
-
-class Param(expr_context):
- """Deprecated AST node class. Unused in Python 3."""
-
-
-# Large float and imaginary literals get turned into infinities in the AST.
-# We unparse those infinities to INFSTR.
-_INFSTR = "1e" + repr(sys.float_info.max_10_exp + 1)
-
-class _Precedence(IntEnum):
- """Precedence table that originated from python grammar."""
-
- TUPLE = auto()
- YIELD = auto() # 'yield', 'yield from'
- TEST = auto() # 'if'-'else', 'lambda'
- OR = auto() # 'or'
- AND = auto() # 'and'
- NOT = auto() # 'not'
- CMP = auto() # '<', '>', '==', '>=', '<=', '!=',
- # 'in', 'not in', 'is', 'is not'
- EXPR = auto()
- BOR = EXPR # '|'
- BXOR = auto() # '^'
- BAND = auto() # '&'
- SHIFT = auto() # '<<', '>>'
- ARITH = auto() # '+', '-'
- TERM = auto() # '*', '@', '/', '%', '//'
- FACTOR = auto() # unary '+', '-', '~'
- POWER = auto() # '**'
- AWAIT = auto() # 'await'
- ATOM = auto()
-
- def next(self):
- try:
- return self.__class__(self + 1)
- except ValueError:
- return self
-
-
-_SINGLE_QUOTES = ("'", '"')
-_MULTI_QUOTES = ('"""', "'''")
-_ALL_QUOTES = (*_SINGLE_QUOTES, *_MULTI_QUOTES)
-
-class _Unparser(NodeVisitor):
- """Methods in this class recursively traverse an AST and
- output source code for the abstract syntax; original formatting
- is disregarded."""
-
- def __init__(self, *, _avoid_backslashes=False):
- self._source = []
- self._buffer = []
- self._precedences = {}
- self._type_ignores = {}
- self._indent = 0
- self._avoid_backslashes = _avoid_backslashes
-
- def interleave(self, inter, f, seq):
- """Call f on each item in seq, calling inter() in between."""
- seq = iter(seq)
- try:
- f(next(seq))
- except StopIteration:
- pass
- else:
- for x in seq:
- inter()
- f(x)
-
- def items_view(self, traverser, items):
- """Traverse and separate the given *items* with a comma and append it to
- the buffer. If *items* is a single item sequence, a trailing comma
- will be added."""
- if len(items) == 1:
- traverser(items[0])
- self.write(",")
- else:
- self.interleave(lambda: self.write(", "), traverser, items)
-
- def maybe_newline(self):
- """Adds a newline if it isn't the start of generated source"""
- if self._source:
- self.write("\n")
-
- def fill(self, text=""):
- """Indent a piece of text and append it, according to the current
- indentation level"""
- self.maybe_newline()
- self.write(" " * self._indent + text)
-
- def write(self, text):
- """Append a piece of text"""
- self._source.append(text)
-
- def buffer_writer(self, text):
- self._buffer.append(text)
-
- @property
- def buffer(self):
- value = "".join(self._buffer)
- self._buffer.clear()
- return value
-
- @contextmanager
- def block(self, *, extra = None):
- """A context manager for preparing the source for blocks. It adds
- the character':', increases the indentation on enter and decreases
- the indentation on exit. If *extra* is given, it will be directly
- appended after the colon character.
- """
- self.write(":")
- if extra:
- self.write(extra)
- self._indent += 1
- yield
- self._indent -= 1
-
- @contextmanager
- def delimit(self, start, end):
- """A context manager for preparing the source for expressions. It adds
- *start* to the buffer and enters, after exit it adds *end*."""
-
- self.write(start)
- yield
- self.write(end)
-
- def delimit_if(self, start, end, condition):
- if condition:
- return self.delimit(start, end)
- else:
- return nullcontext()
-
- def require_parens(self, precedence, node):
- """Shortcut to adding precedence related parens"""
- return self.delimit_if("(", ")", self.get_precedence(node) > precedence)
-
- def get_precedence(self, node):
- return self._precedences.get(node, _Precedence.TEST)
-
- def set_precedence(self, precedence, *nodes):
- for node in nodes:
- self._precedences[node] = precedence
-
- def get_raw_docstring(self, node):
- """If a docstring node is found in the body of the *node* parameter,
- return that docstring node, None otherwise.
-
- Logic mirrored from ``_PyAST_GetDocString``."""
- if not isinstance(
- node, (AsyncFunctionDef, FunctionDef, ClassDef, Module)
- ) or len(node.body) < 1:
- return None
- node = node.body[0]
- if not isinstance(node, Expr):
- return None
- node = node.value
- if isinstance(node, Constant) and isinstance(node.value, str):
- return node
-
- def get_type_comment(self, node):
- comment = self._type_ignores.get(node.lineno) or node.type_comment
- if comment is not None:
- return f" # type: {comment}"
-
- def traverse(self, node):
- if isinstance(node, list):
- for item in node:
- self.traverse(item)
- else:
- super().visit(node)
-
- def visit(self, node):
- """Outputs a source code string that, if converted back to an ast
- (using ast.parse) will generate an AST equivalent to *node*"""
- self._source = []
- self.traverse(node)
- return "".join(self._source)
-
- def _write_docstring_and_traverse_body(self, node):
- if (docstring := self.get_raw_docstring(node)):
- self._write_docstring(docstring)
- self.traverse(node.body[1:])
- else:
- self.traverse(node.body)
-
- def visit_Module(self, node):
- self._type_ignores = {
- ignore.lineno: f"ignore{ignore.tag}"
- for ignore in node.type_ignores
- }
- self._write_docstring_and_traverse_body(node)
- self._type_ignores.clear()
-
- def visit_FunctionType(self, node):
- with self.delimit("(", ")"):
- self.interleave(
- lambda: self.write(", "), self.traverse, node.argtypes
- )
-
- self.write(" -> ")
- self.traverse(node.returns)
-
- def visit_Expr(self, node):
- self.fill()
- self.set_precedence(_Precedence.YIELD, node.value)
- self.traverse(node.value)
-
- def visit_NamedExpr(self, node):
- with self.require_parens(_Precedence.TUPLE, node):
- self.set_precedence(_Precedence.ATOM, node.target, node.value)
- self.traverse(node.target)
- self.write(" := ")
- self.traverse(node.value)
-
- def visit_Import(self, node):
- self.fill("import ")
- self.interleave(lambda: self.write(", "), self.traverse, node.names)
-
- def visit_ImportFrom(self, node):
- self.fill("from ")
- self.write("." * node.level)
- if node.module:
- self.write(node.module)
- self.write(" import ")
- self.interleave(lambda: self.write(", "), self.traverse, node.names)
-
- def visit_Assign(self, node):
- self.fill()
- for target in node.targets:
- self.traverse(target)
- self.write(" = ")
- self.traverse(node.value)
- if type_comment := self.get_type_comment(node):
- self.write(type_comment)
-
- def visit_AugAssign(self, node):
- self.fill()
- self.traverse(node.target)
- self.write(" " + self.binop[node.op.__class__.__name__] + "= ")
- self.traverse(node.value)
-
- def visit_AnnAssign(self, node):
- self.fill()
- with self.delimit_if("(", ")", not node.simple and isinstance(node.target, Name)):
- self.traverse(node.target)
- self.write(": ")
- self.traverse(node.annotation)
- if node.value:
- self.write(" = ")
- self.traverse(node.value)
-
- def visit_Return(self, node):
- self.fill("return")
- if node.value:
- self.write(" ")
- self.traverse(node.value)
-
- def visit_Pass(self, node):
- self.fill("pass")
-
- def visit_Break(self, node):
- self.fill("break")
-
- def visit_Continue(self, node):
- self.fill("continue")
-
- def visit_Delete(self, node):
- self.fill("del ")
- self.interleave(lambda: self.write(", "), self.traverse, node.targets)
-
- def visit_Assert(self, node):
- self.fill("assert ")
- self.traverse(node.test)
- if node.msg:
- self.write(", ")
- self.traverse(node.msg)
-
- def visit_Global(self, node):
- self.fill("global ")
- self.interleave(lambda: self.write(", "), self.write, node.names)
-
- def visit_Nonlocal(self, node):
- self.fill("nonlocal ")
- self.interleave(lambda: self.write(", "), self.write, node.names)
-
- def visit_Await(self, node):
- with self.require_parens(_Precedence.AWAIT, node):
- self.write("await")
- if node.value:
- self.write(" ")
- self.set_precedence(_Precedence.ATOM, node.value)
- self.traverse(node.value)
-
- def visit_Yield(self, node):
- with self.require_parens(_Precedence.YIELD, node):
- self.write("yield")
- if node.value:
- self.write(" ")
- self.set_precedence(_Precedence.ATOM, node.value)
- self.traverse(node.value)
-
- def visit_YieldFrom(self, node):
- with self.require_parens(_Precedence.YIELD, node):
- self.write("yield from ")
- if not node.value:
- raise ValueError("Node can't be used without a value attribute.")
- self.set_precedence(_Precedence.ATOM, node.value)
- self.traverse(node.value)
-
- def visit_Raise(self, node):
- self.fill("raise")
- if not node.exc:
- if node.cause:
- raise ValueError(f"Node can't use cause without an exception.")
- return
- self.write(" ")
- self.traverse(node.exc)
- if node.cause:
- self.write(" from ")
- self.traverse(node.cause)
-
- def visit_Try(self, node):
- self.fill("try")
- with self.block():
- self.traverse(node.body)
- for ex in node.handlers:
- self.traverse(ex)
- if node.orelse:
- self.fill("else")
- with self.block():
- self.traverse(node.orelse)
- if node.finalbody:
- self.fill("finally")
- with self.block():
- self.traverse(node.finalbody)
-
- def visit_ExceptHandler(self, node):
- self.fill("except")
- if node.type:
- self.write(" ")
- self.traverse(node.type)
- if node.name:
- self.write(" as ")
- self.write(node.name)
- with self.block():
- self.traverse(node.body)
-
- def visit_ClassDef(self, node):
- self.maybe_newline()
- for deco in node.decorator_list:
- self.fill("@")
- self.traverse(deco)
- self.fill("class " + node.name)
- with self.delimit_if("(", ")", condition = node.bases or node.keywords):
- comma = False
- for e in node.bases:
- if comma:
- self.write(", ")
- else:
- comma = True
- self.traverse(e)
- for e in node.keywords:
- if comma:
- self.write(", ")
- else:
- comma = True
- self.traverse(e)
-
- with self.block():
- self._write_docstring_and_traverse_body(node)
-
- def visit_FunctionDef(self, node):
- self._function_helper(node, "def")
-
- def visit_AsyncFunctionDef(self, node):
- self._function_helper(node, "async def")
-
- def _function_helper(self, node, fill_suffix):
- self.maybe_newline()
- for deco in node.decorator_list:
- self.fill("@")
- self.traverse(deco)
- def_str = fill_suffix + " " + node.name
- self.fill(def_str)
- with self.delimit("(", ")"):
- self.traverse(node.args)
- if node.returns:
- self.write(" -> ")
- self.traverse(node.returns)
- with self.block(extra=self.get_type_comment(node)):
- self._write_docstring_and_traverse_body(node)
-
- def visit_For(self, node):
- self._for_helper("for ", node)
-
- def visit_AsyncFor(self, node):
- self._for_helper("async for ", node)
-
- def _for_helper(self, fill, node):
- self.fill(fill)
- self.traverse(node.target)
- self.write(" in ")
- self.traverse(node.iter)
- with self.block(extra=self.get_type_comment(node)):
- self.traverse(node.body)
- if node.orelse:
- self.fill("else")
- with self.block():
- self.traverse(node.orelse)
-
- def visit_If(self, node):
- self.fill("if ")
- self.traverse(node.test)
- with self.block():
- self.traverse(node.body)
- # collapse nested ifs into equivalent elifs.
- while node.orelse and len(node.orelse) == 1 and isinstance(node.orelse[0], If):
- node = node.orelse[0]
- self.fill("elif ")
- self.traverse(node.test)
- with self.block():
- self.traverse(node.body)
- # final else
- if node.orelse:
- self.fill("else")
- with self.block():
- self.traverse(node.orelse)
-
- def visit_While(self, node):
- self.fill("while ")
- self.traverse(node.test)
- with self.block():
- self.traverse(node.body)
- if node.orelse:
- self.fill("else")
- with self.block():
- self.traverse(node.orelse)
-
- def visit_With(self, node):
- self.fill("with ")
- self.interleave(lambda: self.write(", "), self.traverse, node.items)
- with self.block(extra=self.get_type_comment(node)):
- self.traverse(node.body)
-
- def visit_AsyncWith(self, node):
- self.fill("async with ")
- self.interleave(lambda: self.write(", "), self.traverse, node.items)
- with self.block(extra=self.get_type_comment(node)):
- self.traverse(node.body)
-
- def _str_literal_helper(
- self, string, *, quote_types=_ALL_QUOTES, escape_special_whitespace=False
- ):
- """Helper for writing string literals, minimizing escapes.
- Returns the tuple (string literal to write, possible quote types).
- """
- def escape_char(c):
- # \n and \t are non-printable, but we only escape them if
- # escape_special_whitespace is True
- if not escape_special_whitespace and c in "\n\t":
- return c
- # Always escape backslashes and other non-printable characters
- if c == "\\" or not c.isprintable():
- return c.encode("unicode_escape").decode("ascii")
- return c
-
- escaped_string = "".join(map(escape_char, string))
- possible_quotes = quote_types
- if "\n" in escaped_string:
- possible_quotes = [q for q in possible_quotes if q in _MULTI_QUOTES]
- possible_quotes = [q for q in possible_quotes if q not in escaped_string]
- if not possible_quotes:
- # If there aren't any possible_quotes, fallback to using repr
- # on the original string. Try to use a quote from quote_types,
- # e.g., so that we use triple quotes for docstrings.
- string = repr(string)
- quote = next((q for q in quote_types if string[0] in q), string[0])
- return string[1:-1], [quote]
- if escaped_string:
- # Sort so that we prefer '''"''' over """\""""
- possible_quotes.sort(key=lambda q: q[0] == escaped_string[-1])
- # If we're using triple quotes and we'd need to escape a final
- # quote, escape it
- if possible_quotes[0][0] == escaped_string[-1]:
- assert len(possible_quotes[0]) == 3
- escaped_string = escaped_string[:-1] + "\\" + escaped_string[-1]
- return escaped_string, possible_quotes
-
- def _write_str_avoiding_backslashes(self, string, *, quote_types=_ALL_QUOTES):
- """Write string literal value with a best effort attempt to avoid backslashes."""
- string, quote_types = self._str_literal_helper(string, quote_types=quote_types)
- quote_type = quote_types[0]
- self.write(f"{quote_type}{string}{quote_type}")
-
- def visit_JoinedStr(self, node):
- self.write("f")
- if self._avoid_backslashes:
- self._fstring_JoinedStr(node, self.buffer_writer)
- self._write_str_avoiding_backslashes(self.buffer)
- return
-
- # If we don't need to avoid backslashes globally (i.e., we only need
- # to avoid them inside FormattedValues), it's cosmetically preferred
- # to use escaped whitespace. That is, it's preferred to use backslashes
- # for cases like: f"{x}\n". To accomplish this, we keep track of what
- # in our buffer corresponds to FormattedValues and what corresponds to
- # Constant parts of the f-string, and allow escapes accordingly.
- buffer = []
- for value in node.values:
- meth = getattr(self, "_fstring_" + type(value).__name__)
- meth(value, self.buffer_writer)
- buffer.append((self.buffer, isinstance(value, Constant)))
- new_buffer = []
- quote_types = _ALL_QUOTES
- for value, is_constant in buffer:
- # Repeatedly narrow down the list of possible quote_types
- value, quote_types = self._str_literal_helper(
- value, quote_types=quote_types,
- escape_special_whitespace=is_constant
- )
- new_buffer.append(value)
- value = "".join(new_buffer)
- quote_type = quote_types[0]
- self.write(f"{quote_type}{value}{quote_type}")
-
- def visit_FormattedValue(self, node):
- self.write("f")
- self._fstring_FormattedValue(node, self.buffer_writer)
- self._write_str_avoiding_backslashes(self.buffer)
-
- def _fstring_JoinedStr(self, node, write):
- for value in node.values:
- meth = getattr(self, "_fstring_" + type(value).__name__)
- meth(value, write)
-
- def _fstring_Constant(self, node, write):
- if not isinstance(node.value, str):
- raise ValueError("Constants inside JoinedStr should be a string.")
- value = node.value.replace("{", "{{").replace("}", "}}")
- write(value)
-
- def _fstring_FormattedValue(self, node, write):
- write("{")
- unparser = type(self)(_avoid_backslashes=True)
- unparser.set_precedence(_Precedence.TEST.next(), node.value)
- expr = unparser.visit(node.value)
- if expr.startswith("{"):
- write(" ") # Separate pair of opening brackets as "{ {"
- if "\\" in expr:
- raise ValueError("Unable to avoid backslash in f-string expression part")
- write(expr)
- if node.conversion != -1:
- conversion = chr(node.conversion)
- if conversion not in "sra":
- raise ValueError("Unknown f-string conversion.")
- write(f"!{conversion}")
- if node.format_spec:
- write(":")
- meth = getattr(self, "_fstring_" + type(node.format_spec).__name__)
- meth(node.format_spec, write)
- write("}")
-
- def visit_Name(self, node):
- self.write(node.id)
-
- def _write_docstring(self, node):
- self.fill()
- if node.kind == "u":
- self.write("u")
- self._write_str_avoiding_backslashes(node.value, quote_types=_MULTI_QUOTES)
-
- def _write_constant(self, value):
- if isinstance(value, (float, complex)):
- # Substitute overflowing decimal literal for AST infinities,
- # and inf - inf for NaNs.
- self.write(
- repr(value)
- .replace("inf", _INFSTR)
- .replace("nan", f"({_INFSTR}-{_INFSTR})")
- )
- elif self._avoid_backslashes and isinstance(value, str):
- self._write_str_avoiding_backslashes(value)
- else:
- self.write(repr(value))
-
- def visit_Constant(self, node):
- value = node.value
- if isinstance(value, tuple):
- with self.delimit("(", ")"):
- self.items_view(self._write_constant, value)
- elif value is ...:
- self.write("...")
- else:
- if node.kind == "u":
- self.write("u")
- self._write_constant(node.value)
-
- def visit_List(self, node):
- with self.delimit("[", "]"):
- self.interleave(lambda: self.write(", "), self.traverse, node.elts)
-
- def visit_ListComp(self, node):
- with self.delimit("[", "]"):
- self.traverse(node.elt)
- for gen in node.generators:
- self.traverse(gen)
-
- def visit_GeneratorExp(self, node):
- with self.delimit("(", ")"):
- self.traverse(node.elt)
- for gen in node.generators:
- self.traverse(gen)
-
- def visit_SetComp(self, node):
- with self.delimit("{", "}"):
- self.traverse(node.elt)
- for gen in node.generators:
- self.traverse(gen)
-
- def visit_DictComp(self, node):
- with self.delimit("{", "}"):
- self.traverse(node.key)
- self.write(": ")
- self.traverse(node.value)
- for gen in node.generators:
- self.traverse(gen)
-
- def visit_comprehension(self, node):
- if node.is_async:
- self.write(" async for ")
- else:
- self.write(" for ")
- self.set_precedence(_Precedence.TUPLE, node.target)
- self.traverse(node.target)
- self.write(" in ")
- self.set_precedence(_Precedence.TEST.next(), node.iter, *node.ifs)
- self.traverse(node.iter)
- for if_clause in node.ifs:
- self.write(" if ")
- self.traverse(if_clause)
-
- def visit_IfExp(self, node):
- with self.require_parens(_Precedence.TEST, node):
- self.set_precedence(_Precedence.TEST.next(), node.body, node.test)
- self.traverse(node.body)
- self.write(" if ")
- self.traverse(node.test)
- self.write(" else ")
- self.set_precedence(_Precedence.TEST, node.orelse)
- self.traverse(node.orelse)
-
- def visit_Set(self, node):
- if node.elts:
- with self.delimit("{", "}"):
- self.interleave(lambda: self.write(", "), self.traverse, node.elts)
- else:
- # `{}` would be interpreted as a dictionary literal, and
- # `set` might be shadowed. Thus:
- self.write('{*()}')
-
- def visit_Dict(self, node):
- def write_key_value_pair(k, v):
- self.traverse(k)
- self.write(": ")
- self.traverse(v)
-
- def write_item(item):
- k, v = item
- if k is None:
- # for dictionary unpacking operator in dicts {**{'y': 2}}
- # see PEP 448 for details
- self.write("**")
- self.set_precedence(_Precedence.EXPR, v)
- self.traverse(v)
- else:
- write_key_value_pair(k, v)
-
- with self.delimit("{", "}"):
- self.interleave(
- lambda: self.write(", "), write_item, zip(node.keys, node.values)
- )
-
- def visit_Tuple(self, node):
- with self.delimit("(", ")"):
- self.items_view(self.traverse, node.elts)
-
- unop = {"Invert": "~", "Not": "not", "UAdd": "+", "USub": "-"}
- unop_precedence = {
- "not": _Precedence.NOT,
- "~": _Precedence.FACTOR,
- "+": _Precedence.FACTOR,
- "-": _Precedence.FACTOR,
- }
-
- def visit_UnaryOp(self, node):
- operator = self.unop[node.op.__class__.__name__]
- operator_precedence = self.unop_precedence[operator]
- with self.require_parens(operator_precedence, node):
- self.write(operator)
- # factor prefixes (+, -, ~) shouldn't be seperated
- # from the value they belong, (e.g: +1 instead of + 1)
- if operator_precedence is not _Precedence.FACTOR:
- self.write(" ")
- self.set_precedence(operator_precedence, node.operand)
- self.traverse(node.operand)
-
- binop = {
- "Add": "+",
- "Sub": "-",
- "Mult": "*",
- "MatMult": "@",
- "Div": "/",
- "Mod": "%",
- "LShift": "<<",
- "RShift": ">>",
- "BitOr": "|",
- "BitXor": "^",
- "BitAnd": "&",
- "FloorDiv": "//",
- "Pow": "**",
- }
-
- binop_precedence = {
- "+": _Precedence.ARITH,
- "-": _Precedence.ARITH,
- "*": _Precedence.TERM,
- "@": _Precedence.TERM,
- "/": _Precedence.TERM,
- "%": _Precedence.TERM,
- "<<": _Precedence.SHIFT,
- ">>": _Precedence.SHIFT,
- "|": _Precedence.BOR,
- "^": _Precedence.BXOR,
- "&": _Precedence.BAND,
- "//": _Precedence.TERM,
- "**": _Precedence.POWER,
- }
-
- binop_rassoc = frozenset(("**",))
- def visit_BinOp(self, node):
- operator = self.binop[node.op.__class__.__name__]
- operator_precedence = self.binop_precedence[operator]
- with self.require_parens(operator_precedence, node):
- if operator in self.binop_rassoc:
- left_precedence = operator_precedence.next()
- right_precedence = operator_precedence
- else:
- left_precedence = operator_precedence
- right_precedence = operator_precedence.next()
-
- self.set_precedence(left_precedence, node.left)
- self.traverse(node.left)
- self.write(f" {operator} ")
- self.set_precedence(right_precedence, node.right)
- self.traverse(node.right)
-
- cmpops = {
- "Eq": "==",
- "NotEq": "!=",
- "Lt": "<",
- "LtE": "<=",
- "Gt": ">",
- "GtE": ">=",
- "Is": "is",
- "IsNot": "is not",
- "In": "in",
- "NotIn": "not in",
- }
-
- def visit_Compare(self, node):
- with self.require_parens(_Precedence.CMP, node):
- self.set_precedence(_Precedence.CMP.next(), node.left, *node.comparators)
- self.traverse(node.left)
- for o, e in zip(node.ops, node.comparators):
- self.write(" " + self.cmpops[o.__class__.__name__] + " ")
- self.traverse(e)
-
- boolops = {"And": "and", "Or": "or"}
- boolop_precedence = {"and": _Precedence.AND, "or": _Precedence.OR}
-
- def visit_BoolOp(self, node):
- operator = self.boolops[node.op.__class__.__name__]
- operator_precedence = self.boolop_precedence[operator]
-
- def increasing_level_traverse(node):
- nonlocal operator_precedence
- operator_precedence = operator_precedence.next()
- self.set_precedence(operator_precedence, node)
- self.traverse(node)
-
- with self.require_parens(operator_precedence, node):
- s = f" {operator} "
- self.interleave(lambda: self.write(s), increasing_level_traverse, node.values)
-
- def visit_Attribute(self, node):
- self.set_precedence(_Precedence.ATOM, node.value)
- self.traverse(node.value)
- # Special case: 3.__abs__() is a syntax error, so if node.value
- # is an integer literal then we need to either parenthesize
- # it or add an extra space to get 3 .__abs__().
- if isinstance(node.value, Constant) and isinstance(node.value.value, int):
- self.write(" ")
- self.write(".")
- self.write(node.attr)
-
- def visit_Call(self, node):
- self.set_precedence(_Precedence.ATOM, node.func)
- self.traverse(node.func)
- with self.delimit("(", ")"):
- comma = False
- for e in node.args:
- if comma:
- self.write(", ")
- else:
- comma = True
- self.traverse(e)
- for e in node.keywords:
- if comma:
- self.write(", ")
- else:
- comma = True
- self.traverse(e)
-
- def visit_Subscript(self, node):
- def is_simple_tuple(slice_value):
- # when unparsing a non-empty tuple, the parentheses can be safely
- # omitted if there aren't any elements that explicitly requires
- # parentheses (such as starred expressions).
- return (
- isinstance(slice_value, Tuple)
- and slice_value.elts
- and not any(isinstance(elt, Starred) for elt in slice_value.elts)
- )
-
- self.set_precedence(_Precedence.ATOM, node.value)
- self.traverse(node.value)
- with self.delimit("[", "]"):
- if is_simple_tuple(node.slice):
- self.items_view(self.traverse, node.slice.elts)
- else:
- self.traverse(node.slice)
-
- def visit_Starred(self, node):
- self.write("*")
- self.set_precedence(_Precedence.EXPR, node.value)
- self.traverse(node.value)
-
- def visit_Ellipsis(self, node):
- self.write("...")
-
- def visit_Slice(self, node):
- if node.lower:
- self.traverse(node.lower)
- self.write(":")
- if node.upper:
- self.traverse(node.upper)
- if node.step:
- self.write(":")
- self.traverse(node.step)
-
- def visit_arg(self, node):
- self.write(node.arg)
- if node.annotation:
- self.write(": ")
- self.traverse(node.annotation)
-
- def visit_arguments(self, node):
- first = True
- # normal arguments
- all_args = node.posonlyargs + node.args
- defaults = [None] * (len(all_args) - len(node.defaults)) + node.defaults
- for index, elements in enumerate(zip(all_args, defaults), 1):
- a, d = elements
- if first:
- first = False
- else:
- self.write(", ")
- self.traverse(a)
- if d:
- self.write("=")
- self.traverse(d)
- if index == len(node.posonlyargs):
- self.write(", /")
-
- # varargs, or bare '*' if no varargs but keyword-only arguments present
- if node.vararg or node.kwonlyargs:
- if first:
- first = False
- else:
- self.write(", ")
- self.write("*")
- if node.vararg:
- self.write(node.vararg.arg)
- if node.vararg.annotation:
- self.write(": ")
- self.traverse(node.vararg.annotation)
-
- # keyword-only arguments
- if node.kwonlyargs:
- for a, d in zip(node.kwonlyargs, node.kw_defaults):
- self.write(", ")
- self.traverse(a)
- if d:
- self.write("=")
- self.traverse(d)
-
- # kwargs
- if node.kwarg:
- if first:
- first = False
- else:
- self.write(", ")
- self.write("**" + node.kwarg.arg)
- if node.kwarg.annotation:
- self.write(": ")
- self.traverse(node.kwarg.annotation)
-
- def visit_keyword(self, node):
- if node.arg is None:
- self.write("**")
- else:
- self.write(node.arg)
- self.write("=")
- self.traverse(node.value)
-
- def visit_Lambda(self, node):
- with self.require_parens(_Precedence.TEST, node):
- self.write("lambda ")
- self.traverse(node.args)
- self.write(": ")
- self.set_precedence(_Precedence.TEST, node.body)
- self.traverse(node.body)
-
- def visit_alias(self, node):
- self.write(node.name)
- if node.asname:
- self.write(" as " + node.asname)
-
- def visit_withitem(self, node):
- self.traverse(node.context_expr)
- if node.optional_vars:
- self.write(" as ")
- self.traverse(node.optional_vars)
-
-def unparse(ast_obj):
- unparser = _Unparser()
- return unparser.visit(ast_obj)
-
-
-def main():
- import argparse
-
- parser = argparse.ArgumentParser(prog='python -m ast')
- parser.add_argument('infile', type=argparse.FileType(mode='rb'), nargs='?',
- default='-',
- help='the file to parse; defaults to stdin')
- parser.add_argument('-m', '--mode', default='exec',
- choices=('exec', 'single', 'eval', 'func_type'),
- help='specify what kind of code must be parsed')
- parser.add_argument('--no-type-comments', default=True, action='store_false',
- help="don't add information about type comments")
- parser.add_argument('-a', '--include-attributes', action='store_true',
- help='include attributes such as line numbers and '
- 'column offsets')
- parser.add_argument('-i', '--indent', type=int, default=3,
- help='indentation of nodes (number of spaces)')
- args = parser.parse_args()
-
- with args.infile as infile:
- source = infile.read()
- tree = parse(source, args.infile.name, args.mode, type_comments=args.no_type_comments)
- print(dump(tree, include_attributes=args.include_attributes, indent=args.indent))
-
-if __name__ == '__main__':
- main()
+
+
+# If the ast module is loaded more than once, only add deprecated methods once
+if not hasattr(Constant, 'n'):
+ # The following code is for backward compatibility.
+ # It will be removed in future.
+
+ def _getter(self):
+ """Deprecated. Use value instead."""
+ return self.value
+
+ def _setter(self, value):
+ self.value = value
+
+ Constant.n = property(_getter, _setter)
+ Constant.s = property(_getter, _setter)
+
+class _ABC(type):
+
+ def __init__(cls, *args):
+ cls.__doc__ = """Deprecated AST node class. Use ast.Constant instead"""
+
+ def __instancecheck__(cls, inst):
+ if not isinstance(inst, Constant):
+ return False
+ if cls in _const_types:
+ try:
+ value = inst.value
+ except AttributeError:
+ return False
+ else:
+ return (
+ isinstance(value, _const_types[cls]) and
+ not isinstance(value, _const_types_not.get(cls, ()))
+ )
+ return type.__instancecheck__(cls, inst)
+
+def _new(cls, *args, **kwargs):
+ for key in kwargs:
+ if key not in cls._fields:
+ # arbitrary keyword arguments are accepted
+ continue
+ pos = cls._fields.index(key)
+ if pos < len(args):
+ raise TypeError(f"{cls.__name__} got multiple values for argument {key!r}")
+ if cls in _const_types:
+ return Constant(*args, **kwargs)
+ return Constant.__new__(cls, *args, **kwargs)
+
+class Num(Constant, metaclass=_ABC):
+ _fields = ('n',)
+ __new__ = _new
+
+class Str(Constant, metaclass=_ABC):
+ _fields = ('s',)
+ __new__ = _new
+
+class Bytes(Constant, metaclass=_ABC):
+ _fields = ('s',)
+ __new__ = _new
+
+class NameConstant(Constant, metaclass=_ABC):
+ __new__ = _new
+
+class Ellipsis(Constant, metaclass=_ABC):
+ _fields = ()
+
+ def __new__(cls, *args, **kwargs):
+ if cls is Ellipsis:
+ return Constant(..., *args, **kwargs)
+ return Constant.__new__(cls, *args, **kwargs)
+
+_const_types = {
+ Num: (int, float, complex),
+ Str: (str,),
+ Bytes: (bytes,),
+ NameConstant: (type(None), bool),
+ Ellipsis: (type(...),),
+}
+_const_types_not = {
+ Num: (bool,),
+}
+
+_const_node_type_names = {
+ bool: 'NameConstant', # should be before int
+ type(None): 'NameConstant',
+ int: 'Num',
+ float: 'Num',
+ complex: 'Num',
+ str: 'Str',
+ bytes: 'Bytes',
+ type(...): 'Ellipsis',
+}
+
+class slice(AST):
+ """Deprecated AST node class."""
+
+class Index(slice):
+ """Deprecated AST node class. Use the index value directly instead."""
+ def __new__(cls, value, **kwargs):
+ return value
+
+class ExtSlice(slice):
+ """Deprecated AST node class. Use ast.Tuple instead."""
+ def __new__(cls, dims=(), **kwargs):
+ return Tuple(list(dims), Load(), **kwargs)
+
+# If the ast module is loaded more than once, only add deprecated methods once
+if not hasattr(Tuple, 'dims'):
+ # The following code is for backward compatibility.
+ # It will be removed in future.
+
+ def _dims_getter(self):
+ """Deprecated. Use elts instead."""
+ return self.elts
+
+ def _dims_setter(self, value):
+ self.elts = value
+
+ Tuple.dims = property(_dims_getter, _dims_setter)
+
+class Suite(mod):
+ """Deprecated AST node class. Unused in Python 3."""
+
+class AugLoad(expr_context):
+ """Deprecated AST node class. Unused in Python 3."""
+
+class AugStore(expr_context):
+ """Deprecated AST node class. Unused in Python 3."""
+
+class Param(expr_context):
+ """Deprecated AST node class. Unused in Python 3."""
+
+
+# Large float and imaginary literals get turned into infinities in the AST.
+# We unparse those infinities to INFSTR.
+_INFSTR = "1e" + repr(sys.float_info.max_10_exp + 1)
+
+class _Precedence(IntEnum):
+ """Precedence table that originated from python grammar."""
+
+ TUPLE = auto()
+ YIELD = auto() # 'yield', 'yield from'
+ TEST = auto() # 'if'-'else', 'lambda'
+ OR = auto() # 'or'
+ AND = auto() # 'and'
+ NOT = auto() # 'not'
+ CMP = auto() # '<', '>', '==', '>=', '<=', '!=',
+ # 'in', 'not in', 'is', 'is not'
+ EXPR = auto()
+ BOR = EXPR # '|'
+ BXOR = auto() # '^'
+ BAND = auto() # '&'
+ SHIFT = auto() # '<<', '>>'
+ ARITH = auto() # '+', '-'
+ TERM = auto() # '*', '@', '/', '%', '//'
+ FACTOR = auto() # unary '+', '-', '~'
+ POWER = auto() # '**'
+ AWAIT = auto() # 'await'
+ ATOM = auto()
+
+ def next(self):
+ try:
+ return self.__class__(self + 1)
+ except ValueError:
+ return self
+
+
+_SINGLE_QUOTES = ("'", '"')
+_MULTI_QUOTES = ('"""', "'''")
+_ALL_QUOTES = (*_SINGLE_QUOTES, *_MULTI_QUOTES)
+
+class _Unparser(NodeVisitor):
+ """Methods in this class recursively traverse an AST and
+ output source code for the abstract syntax; original formatting
+ is disregarded."""
+
+ def __init__(self, *, _avoid_backslashes=False):
+ self._source = []
+ self._buffer = []
+ self._precedences = {}
+ self._type_ignores = {}
+ self._indent = 0
+ self._avoid_backslashes = _avoid_backslashes
+
+ def interleave(self, inter, f, seq):
+ """Call f on each item in seq, calling inter() in between."""
+ seq = iter(seq)
+ try:
+ f(next(seq))
+ except StopIteration:
+ pass
+ else:
+ for x in seq:
+ inter()
+ f(x)
+
+ def items_view(self, traverser, items):
+ """Traverse and separate the given *items* with a comma and append it to
+ the buffer. If *items* is a single item sequence, a trailing comma
+ will be added."""
+ if len(items) == 1:
+ traverser(items[0])
+ self.write(",")
+ else:
+ self.interleave(lambda: self.write(", "), traverser, items)
+
+ def maybe_newline(self):
+ """Adds a newline if it isn't the start of generated source"""
+ if self._source:
+ self.write("\n")
+
+ def fill(self, text=""):
+ """Indent a piece of text and append it, according to the current
+ indentation level"""
+ self.maybe_newline()
+ self.write(" " * self._indent + text)
+
+ def write(self, text):
+ """Append a piece of text"""
+ self._source.append(text)
+
+ def buffer_writer(self, text):
+ self._buffer.append(text)
+
+ @property
+ def buffer(self):
+ value = "".join(self._buffer)
+ self._buffer.clear()
+ return value
+
+ @contextmanager
+ def block(self, *, extra = None):
+ """A context manager for preparing the source for blocks. It adds
+ the character':', increases the indentation on enter and decreases
+ the indentation on exit. If *extra* is given, it will be directly
+ appended after the colon character.
+ """
+ self.write(":")
+ if extra:
+ self.write(extra)
+ self._indent += 1
+ yield
+ self._indent -= 1
+
+ @contextmanager
+ def delimit(self, start, end):
+ """A context manager for preparing the source for expressions. It adds
+ *start* to the buffer and enters, after exit it adds *end*."""
+
+ self.write(start)
+ yield
+ self.write(end)
+
+ def delimit_if(self, start, end, condition):
+ if condition:
+ return self.delimit(start, end)
+ else:
+ return nullcontext()
+
+ def require_parens(self, precedence, node):
+ """Shortcut to adding precedence related parens"""
+ return self.delimit_if("(", ")", self.get_precedence(node) > precedence)
+
+ def get_precedence(self, node):
+ return self._precedences.get(node, _Precedence.TEST)
+
+ def set_precedence(self, precedence, *nodes):
+ for node in nodes:
+ self._precedences[node] = precedence
+
+ def get_raw_docstring(self, node):
+ """If a docstring node is found in the body of the *node* parameter,
+ return that docstring node, None otherwise.
+
+ Logic mirrored from ``_PyAST_GetDocString``."""
+ if not isinstance(
+ node, (AsyncFunctionDef, FunctionDef, ClassDef, Module)
+ ) or len(node.body) < 1:
+ return None
+ node = node.body[0]
+ if not isinstance(node, Expr):
+ return None
+ node = node.value
+ if isinstance(node, Constant) and isinstance(node.value, str):
+ return node
+
+ def get_type_comment(self, node):
+ comment = self._type_ignores.get(node.lineno) or node.type_comment
+ if comment is not None:
+ return f" # type: {comment}"
+
+ def traverse(self, node):
+ if isinstance(node, list):
+ for item in node:
+ self.traverse(item)
+ else:
+ super().visit(node)
+
+ def visit(self, node):
+ """Outputs a source code string that, if converted back to an ast
+ (using ast.parse) will generate an AST equivalent to *node*"""
+ self._source = []
+ self.traverse(node)
+ return "".join(self._source)
+
+ def _write_docstring_and_traverse_body(self, node):
+ if (docstring := self.get_raw_docstring(node)):
+ self._write_docstring(docstring)
+ self.traverse(node.body[1:])
+ else:
+ self.traverse(node.body)
+
+ def visit_Module(self, node):
+ self._type_ignores = {
+ ignore.lineno: f"ignore{ignore.tag}"
+ for ignore in node.type_ignores
+ }
+ self._write_docstring_and_traverse_body(node)
+ self._type_ignores.clear()
+
+ def visit_FunctionType(self, node):
+ with self.delimit("(", ")"):
+ self.interleave(
+ lambda: self.write(", "), self.traverse, node.argtypes
+ )
+
+ self.write(" -> ")
+ self.traverse(node.returns)
+
+ def visit_Expr(self, node):
+ self.fill()
+ self.set_precedence(_Precedence.YIELD, node.value)
+ self.traverse(node.value)
+
+ def visit_NamedExpr(self, node):
+ with self.require_parens(_Precedence.TUPLE, node):
+ self.set_precedence(_Precedence.ATOM, node.target, node.value)
+ self.traverse(node.target)
+ self.write(" := ")
+ self.traverse(node.value)
+
+ def visit_Import(self, node):
+ self.fill("import ")
+ self.interleave(lambda: self.write(", "), self.traverse, node.names)
+
+ def visit_ImportFrom(self, node):
+ self.fill("from ")
+ self.write("." * node.level)
+ if node.module:
+ self.write(node.module)
+ self.write(" import ")
+ self.interleave(lambda: self.write(", "), self.traverse, node.names)
+
+ def visit_Assign(self, node):
+ self.fill()
+ for target in node.targets:
+ self.traverse(target)
+ self.write(" = ")
+ self.traverse(node.value)
+ if type_comment := self.get_type_comment(node):
+ self.write(type_comment)
+
+ def visit_AugAssign(self, node):
+ self.fill()
+ self.traverse(node.target)
+ self.write(" " + self.binop[node.op.__class__.__name__] + "= ")
+ self.traverse(node.value)
+
+ def visit_AnnAssign(self, node):
+ self.fill()
+ with self.delimit_if("(", ")", not node.simple and isinstance(node.target, Name)):
+ self.traverse(node.target)
+ self.write(": ")
+ self.traverse(node.annotation)
+ if node.value:
+ self.write(" = ")
+ self.traverse(node.value)
+
+ def visit_Return(self, node):
+ self.fill("return")
+ if node.value:
+ self.write(" ")
+ self.traverse(node.value)
+
+ def visit_Pass(self, node):
+ self.fill("pass")
+
+ def visit_Break(self, node):
+ self.fill("break")
+
+ def visit_Continue(self, node):
+ self.fill("continue")
+
+ def visit_Delete(self, node):
+ self.fill("del ")
+ self.interleave(lambda: self.write(", "), self.traverse, node.targets)
+
+ def visit_Assert(self, node):
+ self.fill("assert ")
+ self.traverse(node.test)
+ if node.msg:
+ self.write(", ")
+ self.traverse(node.msg)
+
+ def visit_Global(self, node):
+ self.fill("global ")
+ self.interleave(lambda: self.write(", "), self.write, node.names)
+
+ def visit_Nonlocal(self, node):
+ self.fill("nonlocal ")
+ self.interleave(lambda: self.write(", "), self.write, node.names)
+
+ def visit_Await(self, node):
+ with self.require_parens(_Precedence.AWAIT, node):
+ self.write("await")
+ if node.value:
+ self.write(" ")
+ self.set_precedence(_Precedence.ATOM, node.value)
+ self.traverse(node.value)
+
+ def visit_Yield(self, node):
+ with self.require_parens(_Precedence.YIELD, node):
+ self.write("yield")
+ if node.value:
+ self.write(" ")
+ self.set_precedence(_Precedence.ATOM, node.value)
+ self.traverse(node.value)
+
+ def visit_YieldFrom(self, node):
+ with self.require_parens(_Precedence.YIELD, node):
+ self.write("yield from ")
+ if not node.value:
+ raise ValueError("Node can't be used without a value attribute.")
+ self.set_precedence(_Precedence.ATOM, node.value)
+ self.traverse(node.value)
+
+ def visit_Raise(self, node):
+ self.fill("raise")
+ if not node.exc:
+ if node.cause:
+ raise ValueError(f"Node can't use cause without an exception.")
+ return
+ self.write(" ")
+ self.traverse(node.exc)
+ if node.cause:
+ self.write(" from ")
+ self.traverse(node.cause)
+
+ def visit_Try(self, node):
+ self.fill("try")
+ with self.block():
+ self.traverse(node.body)
+ for ex in node.handlers:
+ self.traverse(ex)
+ if node.orelse:
+ self.fill("else")
+ with self.block():
+ self.traverse(node.orelse)
+ if node.finalbody:
+ self.fill("finally")
+ with self.block():
+ self.traverse(node.finalbody)
+
+ def visit_ExceptHandler(self, node):
+ self.fill("except")
+ if node.type:
+ self.write(" ")
+ self.traverse(node.type)
+ if node.name:
+ self.write(" as ")
+ self.write(node.name)
+ with self.block():
+ self.traverse(node.body)
+
+ def visit_ClassDef(self, node):
+ self.maybe_newline()
+ for deco in node.decorator_list:
+ self.fill("@")
+ self.traverse(deco)
+ self.fill("class " + node.name)
+ with self.delimit_if("(", ")", condition = node.bases or node.keywords):
+ comma = False
+ for e in node.bases:
+ if comma:
+ self.write(", ")
+ else:
+ comma = True
+ self.traverse(e)
+ for e in node.keywords:
+ if comma:
+ self.write(", ")
+ else:
+ comma = True
+ self.traverse(e)
+
+ with self.block():
+ self._write_docstring_and_traverse_body(node)
+
+ def visit_FunctionDef(self, node):
+ self._function_helper(node, "def")
+
+ def visit_AsyncFunctionDef(self, node):
+ self._function_helper(node, "async def")
+
+ def _function_helper(self, node, fill_suffix):
+ self.maybe_newline()
+ for deco in node.decorator_list:
+ self.fill("@")
+ self.traverse(deco)
+ def_str = fill_suffix + " " + node.name
+ self.fill(def_str)
+ with self.delimit("(", ")"):
+ self.traverse(node.args)
+ if node.returns:
+ self.write(" -> ")
+ self.traverse(node.returns)
+ with self.block(extra=self.get_type_comment(node)):
+ self._write_docstring_and_traverse_body(node)
+
+ def visit_For(self, node):
+ self._for_helper("for ", node)
+
+ def visit_AsyncFor(self, node):
+ self._for_helper("async for ", node)
+
+ def _for_helper(self, fill, node):
+ self.fill(fill)
+ self.traverse(node.target)
+ self.write(" in ")
+ self.traverse(node.iter)
+ with self.block(extra=self.get_type_comment(node)):
+ self.traverse(node.body)
+ if node.orelse:
+ self.fill("else")
+ with self.block():
+ self.traverse(node.orelse)
+
+ def visit_If(self, node):
+ self.fill("if ")
+ self.traverse(node.test)
+ with self.block():
+ self.traverse(node.body)
+ # collapse nested ifs into equivalent elifs.
+ while node.orelse and len(node.orelse) == 1 and isinstance(node.orelse[0], If):
+ node = node.orelse[0]
+ self.fill("elif ")
+ self.traverse(node.test)
+ with self.block():
+ self.traverse(node.body)
+ # final else
+ if node.orelse:
+ self.fill("else")
+ with self.block():
+ self.traverse(node.orelse)
+
+ def visit_While(self, node):
+ self.fill("while ")
+ self.traverse(node.test)
+ with self.block():
+ self.traverse(node.body)
+ if node.orelse:
+ self.fill("else")
+ with self.block():
+ self.traverse(node.orelse)
+
+ def visit_With(self, node):
+ self.fill("with ")
+ self.interleave(lambda: self.write(", "), self.traverse, node.items)
+ with self.block(extra=self.get_type_comment(node)):
+ self.traverse(node.body)
+
+ def visit_AsyncWith(self, node):
+ self.fill("async with ")
+ self.interleave(lambda: self.write(", "), self.traverse, node.items)
+ with self.block(extra=self.get_type_comment(node)):
+ self.traverse(node.body)
+
+ def _str_literal_helper(
+ self, string, *, quote_types=_ALL_QUOTES, escape_special_whitespace=False
+ ):
+ """Helper for writing string literals, minimizing escapes.
+ Returns the tuple (string literal to write, possible quote types).
+ """
+ def escape_char(c):
+ # \n and \t are non-printable, but we only escape them if
+ # escape_special_whitespace is True
+ if not escape_special_whitespace and c in "\n\t":
+ return c
+ # Always escape backslashes and other non-printable characters
+ if c == "\\" or not c.isprintable():
+ return c.encode("unicode_escape").decode("ascii")
+ return c
+
+ escaped_string = "".join(map(escape_char, string))
+ possible_quotes = quote_types
+ if "\n" in escaped_string:
+ possible_quotes = [q for q in possible_quotes if q in _MULTI_QUOTES]
+ possible_quotes = [q for q in possible_quotes if q not in escaped_string]
+ if not possible_quotes:
+ # If there aren't any possible_quotes, fallback to using repr
+ # on the original string. Try to use a quote from quote_types,
+ # e.g., so that we use triple quotes for docstrings.
+ string = repr(string)
+ quote = next((q for q in quote_types if string[0] in q), string[0])
+ return string[1:-1], [quote]
+ if escaped_string:
+ # Sort so that we prefer '''"''' over """\""""
+ possible_quotes.sort(key=lambda q: q[0] == escaped_string[-1])
+ # If we're using triple quotes and we'd need to escape a final
+ # quote, escape it
+ if possible_quotes[0][0] == escaped_string[-1]:
+ assert len(possible_quotes[0]) == 3
+ escaped_string = escaped_string[:-1] + "\\" + escaped_string[-1]
+ return escaped_string, possible_quotes
+
+ def _write_str_avoiding_backslashes(self, string, *, quote_types=_ALL_QUOTES):
+ """Write string literal value with a best effort attempt to avoid backslashes."""
+ string, quote_types = self._str_literal_helper(string, quote_types=quote_types)
+ quote_type = quote_types[0]
+ self.write(f"{quote_type}{string}{quote_type}")
+
+ def visit_JoinedStr(self, node):
+ self.write("f")
+ if self._avoid_backslashes:
+ self._fstring_JoinedStr(node, self.buffer_writer)
+ self._write_str_avoiding_backslashes(self.buffer)
+ return
+
+ # If we don't need to avoid backslashes globally (i.e., we only need
+ # to avoid them inside FormattedValues), it's cosmetically preferred
+ # to use escaped whitespace. That is, it's preferred to use backslashes
+ # for cases like: f"{x}\n". To accomplish this, we keep track of what
+ # in our buffer corresponds to FormattedValues and what corresponds to
+ # Constant parts of the f-string, and allow escapes accordingly.
+ buffer = []
+ for value in node.values:
+ meth = getattr(self, "_fstring_" + type(value).__name__)
+ meth(value, self.buffer_writer)
+ buffer.append((self.buffer, isinstance(value, Constant)))
+ new_buffer = []
+ quote_types = _ALL_QUOTES
+ for value, is_constant in buffer:
+ # Repeatedly narrow down the list of possible quote_types
+ value, quote_types = self._str_literal_helper(
+ value, quote_types=quote_types,
+ escape_special_whitespace=is_constant
+ )
+ new_buffer.append(value)
+ value = "".join(new_buffer)
+ quote_type = quote_types[0]
+ self.write(f"{quote_type}{value}{quote_type}")
+
+ def visit_FormattedValue(self, node):
+ self.write("f")
+ self._fstring_FormattedValue(node, self.buffer_writer)
+ self._write_str_avoiding_backslashes(self.buffer)
+
+ def _fstring_JoinedStr(self, node, write):
+ for value in node.values:
+ meth = getattr(self, "_fstring_" + type(value).__name__)
+ meth(value, write)
+
+ def _fstring_Constant(self, node, write):
+ if not isinstance(node.value, str):
+ raise ValueError("Constants inside JoinedStr should be a string.")
+ value = node.value.replace("{", "{{").replace("}", "}}")
+ write(value)
+
+ def _fstring_FormattedValue(self, node, write):
+ write("{")
+ unparser = type(self)(_avoid_backslashes=True)
+ unparser.set_precedence(_Precedence.TEST.next(), node.value)
+ expr = unparser.visit(node.value)
+ if expr.startswith("{"):
+ write(" ") # Separate pair of opening brackets as "{ {"
+ if "\\" in expr:
+ raise ValueError("Unable to avoid backslash in f-string expression part")
+ write(expr)
+ if node.conversion != -1:
+ conversion = chr(node.conversion)
+ if conversion not in "sra":
+ raise ValueError("Unknown f-string conversion.")
+ write(f"!{conversion}")
+ if node.format_spec:
+ write(":")
+ meth = getattr(self, "_fstring_" + type(node.format_spec).__name__)
+ meth(node.format_spec, write)
+ write("}")
+
+ def visit_Name(self, node):
+ self.write(node.id)
+
+ def _write_docstring(self, node):
+ self.fill()
+ if node.kind == "u":
+ self.write("u")
+ self._write_str_avoiding_backslashes(node.value, quote_types=_MULTI_QUOTES)
+
+ def _write_constant(self, value):
+ if isinstance(value, (float, complex)):
+ # Substitute overflowing decimal literal for AST infinities,
+ # and inf - inf for NaNs.
+ self.write(
+ repr(value)
+ .replace("inf", _INFSTR)
+ .replace("nan", f"({_INFSTR}-{_INFSTR})")
+ )
+ elif self._avoid_backslashes and isinstance(value, str):
+ self._write_str_avoiding_backslashes(value)
+ else:
+ self.write(repr(value))
+
+ def visit_Constant(self, node):
+ value = node.value
+ if isinstance(value, tuple):
+ with self.delimit("(", ")"):
+ self.items_view(self._write_constant, value)
+ elif value is ...:
+ self.write("...")
+ else:
+ if node.kind == "u":
+ self.write("u")
+ self._write_constant(node.value)
+
+ def visit_List(self, node):
+ with self.delimit("[", "]"):
+ self.interleave(lambda: self.write(", "), self.traverse, node.elts)
+
+ def visit_ListComp(self, node):
+ with self.delimit("[", "]"):
+ self.traverse(node.elt)
+ for gen in node.generators:
+ self.traverse(gen)
+
+ def visit_GeneratorExp(self, node):
+ with self.delimit("(", ")"):
+ self.traverse(node.elt)
+ for gen in node.generators:
+ self.traverse(gen)
+
+ def visit_SetComp(self, node):
+ with self.delimit("{", "}"):
+ self.traverse(node.elt)
+ for gen in node.generators:
+ self.traverse(gen)
+
+ def visit_DictComp(self, node):
+ with self.delimit("{", "}"):
+ self.traverse(node.key)
+ self.write(": ")
+ self.traverse(node.value)
+ for gen in node.generators:
+ self.traverse(gen)
+
+ def visit_comprehension(self, node):
+ if node.is_async:
+ self.write(" async for ")
+ else:
+ self.write(" for ")
+ self.set_precedence(_Precedence.TUPLE, node.target)
+ self.traverse(node.target)
+ self.write(" in ")
+ self.set_precedence(_Precedence.TEST.next(), node.iter, *node.ifs)
+ self.traverse(node.iter)
+ for if_clause in node.ifs:
+ self.write(" if ")
+ self.traverse(if_clause)
+
+ def visit_IfExp(self, node):
+ with self.require_parens(_Precedence.TEST, node):
+ self.set_precedence(_Precedence.TEST.next(), node.body, node.test)
+ self.traverse(node.body)
+ self.write(" if ")
+ self.traverse(node.test)
+ self.write(" else ")
+ self.set_precedence(_Precedence.TEST, node.orelse)
+ self.traverse(node.orelse)
+
+ def visit_Set(self, node):
+ if node.elts:
+ with self.delimit("{", "}"):
+ self.interleave(lambda: self.write(", "), self.traverse, node.elts)
+ else:
+ # `{}` would be interpreted as a dictionary literal, and
+ # `set` might be shadowed. Thus:
+ self.write('{*()}')
+
+ def visit_Dict(self, node):
+ def write_key_value_pair(k, v):
+ self.traverse(k)
+ self.write(": ")
+ self.traverse(v)
+
+ def write_item(item):
+ k, v = item
+ if k is None:
+ # for dictionary unpacking operator in dicts {**{'y': 2}}
+ # see PEP 448 for details
+ self.write("**")
+ self.set_precedence(_Precedence.EXPR, v)
+ self.traverse(v)
+ else:
+ write_key_value_pair(k, v)
+
+ with self.delimit("{", "}"):
+ self.interleave(
+ lambda: self.write(", "), write_item, zip(node.keys, node.values)
+ )
+
+ def visit_Tuple(self, node):
+ with self.delimit("(", ")"):
+ self.items_view(self.traverse, node.elts)
+
+ unop = {"Invert": "~", "Not": "not", "UAdd": "+", "USub": "-"}
+ unop_precedence = {
+ "not": _Precedence.NOT,
+ "~": _Precedence.FACTOR,
+ "+": _Precedence.FACTOR,
+ "-": _Precedence.FACTOR,
+ }
+
+ def visit_UnaryOp(self, node):
+ operator = self.unop[node.op.__class__.__name__]
+ operator_precedence = self.unop_precedence[operator]
+ with self.require_parens(operator_precedence, node):
+ self.write(operator)
+ # factor prefixes (+, -, ~) shouldn't be seperated
+ # from the value they belong, (e.g: +1 instead of + 1)
+ if operator_precedence is not _Precedence.FACTOR:
+ self.write(" ")
+ self.set_precedence(operator_precedence, node.operand)
+ self.traverse(node.operand)
+
+ binop = {
+ "Add": "+",
+ "Sub": "-",
+ "Mult": "*",
+ "MatMult": "@",
+ "Div": "/",
+ "Mod": "%",
+ "LShift": "<<",
+ "RShift": ">>",
+ "BitOr": "|",
+ "BitXor": "^",
+ "BitAnd": "&",
+ "FloorDiv": "//",
+ "Pow": "**",
+ }
+
+ binop_precedence = {
+ "+": _Precedence.ARITH,
+ "-": _Precedence.ARITH,
+ "*": _Precedence.TERM,
+ "@": _Precedence.TERM,
+ "/": _Precedence.TERM,
+ "%": _Precedence.TERM,
+ "<<": _Precedence.SHIFT,
+ ">>": _Precedence.SHIFT,
+ "|": _Precedence.BOR,
+ "^": _Precedence.BXOR,
+ "&": _Precedence.BAND,
+ "//": _Precedence.TERM,
+ "**": _Precedence.POWER,
+ }
+
+ binop_rassoc = frozenset(("**",))
+ def visit_BinOp(self, node):
+ operator = self.binop[node.op.__class__.__name__]
+ operator_precedence = self.binop_precedence[operator]
+ with self.require_parens(operator_precedence, node):
+ if operator in self.binop_rassoc:
+ left_precedence = operator_precedence.next()
+ right_precedence = operator_precedence
+ else:
+ left_precedence = operator_precedence
+ right_precedence = operator_precedence.next()
+
+ self.set_precedence(left_precedence, node.left)
+ self.traverse(node.left)
+ self.write(f" {operator} ")
+ self.set_precedence(right_precedence, node.right)
+ self.traverse(node.right)
+
+ cmpops = {
+ "Eq": "==",
+ "NotEq": "!=",
+ "Lt": "<",
+ "LtE": "<=",
+ "Gt": ">",
+ "GtE": ">=",
+ "Is": "is",
+ "IsNot": "is not",
+ "In": "in",
+ "NotIn": "not in",
+ }
+
+ def visit_Compare(self, node):
+ with self.require_parens(_Precedence.CMP, node):
+ self.set_precedence(_Precedence.CMP.next(), node.left, *node.comparators)
+ self.traverse(node.left)
+ for o, e in zip(node.ops, node.comparators):
+ self.write(" " + self.cmpops[o.__class__.__name__] + " ")
+ self.traverse(e)
+
+ boolops = {"And": "and", "Or": "or"}
+ boolop_precedence = {"and": _Precedence.AND, "or": _Precedence.OR}
+
+ def visit_BoolOp(self, node):
+ operator = self.boolops[node.op.__class__.__name__]
+ operator_precedence = self.boolop_precedence[operator]
+
+ def increasing_level_traverse(node):
+ nonlocal operator_precedence
+ operator_precedence = operator_precedence.next()
+ self.set_precedence(operator_precedence, node)
+ self.traverse(node)
+
+ with self.require_parens(operator_precedence, node):
+ s = f" {operator} "
+ self.interleave(lambda: self.write(s), increasing_level_traverse, node.values)
+
+ def visit_Attribute(self, node):
+ self.set_precedence(_Precedence.ATOM, node.value)
+ self.traverse(node.value)
+ # Special case: 3.__abs__() is a syntax error, so if node.value
+ # is an integer literal then we need to either parenthesize
+ # it or add an extra space to get 3 .__abs__().
+ if isinstance(node.value, Constant) and isinstance(node.value.value, int):
+ self.write(" ")
+ self.write(".")
+ self.write(node.attr)
+
+ def visit_Call(self, node):
+ self.set_precedence(_Precedence.ATOM, node.func)
+ self.traverse(node.func)
+ with self.delimit("(", ")"):
+ comma = False
+ for e in node.args:
+ if comma:
+ self.write(", ")
+ else:
+ comma = True
+ self.traverse(e)
+ for e in node.keywords:
+ if comma:
+ self.write(", ")
+ else:
+ comma = True
+ self.traverse(e)
+
+ def visit_Subscript(self, node):
+ def is_simple_tuple(slice_value):
+ # when unparsing a non-empty tuple, the parentheses can be safely
+ # omitted if there aren't any elements that explicitly requires
+ # parentheses (such as starred expressions).
+ return (
+ isinstance(slice_value, Tuple)
+ and slice_value.elts
+ and not any(isinstance(elt, Starred) for elt in slice_value.elts)
+ )
+
+ self.set_precedence(_Precedence.ATOM, node.value)
+ self.traverse(node.value)
+ with self.delimit("[", "]"):
+ if is_simple_tuple(node.slice):
+ self.items_view(self.traverse, node.slice.elts)
+ else:
+ self.traverse(node.slice)
+
+ def visit_Starred(self, node):
+ self.write("*")
+ self.set_precedence(_Precedence.EXPR, node.value)
+ self.traverse(node.value)
+
+ def visit_Ellipsis(self, node):
+ self.write("...")
+
+ def visit_Slice(self, node):
+ if node.lower:
+ self.traverse(node.lower)
+ self.write(":")
+ if node.upper:
+ self.traverse(node.upper)
+ if node.step:
+ self.write(":")
+ self.traverse(node.step)
+
+ def visit_arg(self, node):
+ self.write(node.arg)
+ if node.annotation:
+ self.write(": ")
+ self.traverse(node.annotation)
+
+ def visit_arguments(self, node):
+ first = True
+ # normal arguments
+ all_args = node.posonlyargs + node.args
+ defaults = [None] * (len(all_args) - len(node.defaults)) + node.defaults
+ for index, elements in enumerate(zip(all_args, defaults), 1):
+ a, d = elements
+ if first:
+ first = False
+ else:
+ self.write(", ")
+ self.traverse(a)
+ if d:
+ self.write("=")
+ self.traverse(d)
+ if index == len(node.posonlyargs):
+ self.write(", /")
+
+ # varargs, or bare '*' if no varargs but keyword-only arguments present
+ if node.vararg or node.kwonlyargs:
+ if first:
+ first = False
+ else:
+ self.write(", ")
+ self.write("*")
+ if node.vararg:
+ self.write(node.vararg.arg)
+ if node.vararg.annotation:
+ self.write(": ")
+ self.traverse(node.vararg.annotation)
+
+ # keyword-only arguments
+ if node.kwonlyargs:
+ for a, d in zip(node.kwonlyargs, node.kw_defaults):
+ self.write(", ")
+ self.traverse(a)
+ if d:
+ self.write("=")
+ self.traverse(d)
+
+ # kwargs
+ if node.kwarg:
+ if first:
+ first = False
+ else:
+ self.write(", ")
+ self.write("**" + node.kwarg.arg)
+ if node.kwarg.annotation:
+ self.write(": ")
+ self.traverse(node.kwarg.annotation)
+
+ def visit_keyword(self, node):
+ if node.arg is None:
+ self.write("**")
+ else:
+ self.write(node.arg)
+ self.write("=")
+ self.traverse(node.value)
+
+ def visit_Lambda(self, node):
+ with self.require_parens(_Precedence.TEST, node):
+ self.write("lambda ")
+ self.traverse(node.args)
+ self.write(": ")
+ self.set_precedence(_Precedence.TEST, node.body)
+ self.traverse(node.body)
+
+ def visit_alias(self, node):
+ self.write(node.name)
+ if node.asname:
+ self.write(" as " + node.asname)
+
+ def visit_withitem(self, node):
+ self.traverse(node.context_expr)
+ if node.optional_vars:
+ self.write(" as ")
+ self.traverse(node.optional_vars)
+
+def unparse(ast_obj):
+ unparser = _Unparser()
+ return unparser.visit(ast_obj)
+
+
+def main():
+ import argparse
+
+ parser = argparse.ArgumentParser(prog='python -m ast')
+ parser.add_argument('infile', type=argparse.FileType(mode='rb'), nargs='?',
+ default='-',
+ help='the file to parse; defaults to stdin')
+ parser.add_argument('-m', '--mode', default='exec',
+ choices=('exec', 'single', 'eval', 'func_type'),
+ help='specify what kind of code must be parsed')
+ parser.add_argument('--no-type-comments', default=True, action='store_false',
+ help="don't add information about type comments")
+ parser.add_argument('-a', '--include-attributes', action='store_true',
+ help='include attributes such as line numbers and '
+ 'column offsets')
+ parser.add_argument('-i', '--indent', type=int, default=3,
+ help='indentation of nodes (number of spaces)')
+ args = parser.parse_args()
+
+ with args.infile as infile:
+ source = infile.read()
+ tree = parse(source, args.infile.name, args.mode, type_comments=args.no_type_comments)
+ print(dump(tree, include_attributes=args.include_attributes, indent=args.indent))
+
+if __name__ == '__main__':
+ main()
diff --git a/contrib/tools/python3/src/Lib/asynchat.py b/contrib/tools/python3/src/Lib/asynchat.py
index f4ba361bd4..898d305d7d 100644
--- a/contrib/tools/python3/src/Lib/asynchat.py
+++ b/contrib/tools/python3/src/Lib/asynchat.py
@@ -117,7 +117,7 @@ class async_chat(asyncore.dispatcher):
data = self.recv(self.ac_in_buffer_size)
except BlockingIOError:
return
- except OSError:
+ except OSError:
self.handle_error()
return
diff --git a/contrib/tools/python3/src/Lib/asyncio/__init__.py b/contrib/tools/python3/src/Lib/asyncio/__init__.py
index eb84bfb189..8a90986b0b 100644
--- a/contrib/tools/python3/src/Lib/asyncio/__init__.py
+++ b/contrib/tools/python3/src/Lib/asyncio/__init__.py
@@ -8,7 +8,7 @@ import sys
from .base_events import *
from .coroutines import *
from .events import *
-from .exceptions import *
+from .exceptions import *
from .futures import *
from .locks import *
from .protocols import *
@@ -17,7 +17,7 @@ from .queues import *
from .streams import *
from .subprocess import *
from .tasks import *
-from .threads import *
+from .threads import *
from .transports import *
# Exposed for _asynciomodule.c to implement now deprecated
@@ -27,7 +27,7 @@ from .tasks import _all_tasks_compat # NoQA
__all__ = (base_events.__all__ +
coroutines.__all__ +
events.__all__ +
- exceptions.__all__ +
+ exceptions.__all__ +
futures.__all__ +
locks.__all__ +
protocols.__all__ +
@@ -36,7 +36,7 @@ __all__ = (base_events.__all__ +
streams.__all__ +
subprocess.__all__ +
tasks.__all__ +
- threads.__all__ +
+ threads.__all__ +
transports.__all__)
if sys.platform == 'win32': # pragma: no cover
diff --git a/contrib/tools/python3/src/Lib/asyncio/__main__.py b/contrib/tools/python3/src/Lib/asyncio/__main__.py
index 18bb87a5bc..abe8e722dd 100644
--- a/contrib/tools/python3/src/Lib/asyncio/__main__.py
+++ b/contrib/tools/python3/src/Lib/asyncio/__main__.py
@@ -1,125 +1,125 @@
-import ast
-import asyncio
-import code
-import concurrent.futures
-import inspect
-import sys
-import threading
-import types
-import warnings
-
-from . import futures
-
-
-class AsyncIOInteractiveConsole(code.InteractiveConsole):
-
- def __init__(self, locals, loop):
- super().__init__(locals)
- self.compile.compiler.flags |= ast.PyCF_ALLOW_TOP_LEVEL_AWAIT
-
- self.loop = loop
-
- def runcode(self, code):
- future = concurrent.futures.Future()
-
- def callback():
- global repl_future
- global repl_future_interrupted
-
- repl_future = None
- repl_future_interrupted = False
-
- func = types.FunctionType(code, self.locals)
- try:
- coro = func()
- except SystemExit:
- raise
- except KeyboardInterrupt as ex:
- repl_future_interrupted = True
- future.set_exception(ex)
- return
- except BaseException as ex:
- future.set_exception(ex)
- return
-
- if not inspect.iscoroutine(coro):
- future.set_result(coro)
- return
-
- try:
- repl_future = self.loop.create_task(coro)
- futures._chain_future(repl_future, future)
- except BaseException as exc:
- future.set_exception(exc)
-
- loop.call_soon_threadsafe(callback)
-
- try:
- return future.result()
- except SystemExit:
- raise
- except BaseException:
- if repl_future_interrupted:
- self.write("\nKeyboardInterrupt\n")
- else:
- self.showtraceback()
-
-
-class REPLThread(threading.Thread):
-
- def run(self):
- try:
- banner = (
- f'asyncio REPL {sys.version} on {sys.platform}\n'
- f'Use "await" directly instead of "asyncio.run()".\n'
- f'Type "help", "copyright", "credits" or "license" '
- f'for more information.\n'
- f'{getattr(sys, "ps1", ">>> ")}import asyncio'
- )
-
- console.interact(
- banner=banner,
- exitmsg='exiting asyncio REPL...')
- finally:
- warnings.filterwarnings(
- 'ignore',
- message=r'^coroutine .* was never awaited$',
- category=RuntimeWarning)
-
- loop.call_soon_threadsafe(loop.stop)
-
-
-if __name__ == '__main__':
- loop = asyncio.new_event_loop()
- asyncio.set_event_loop(loop)
-
- repl_locals = {'asyncio': asyncio}
- for key in {'__name__', '__package__',
- '__loader__', '__spec__',
- '__builtins__', '__file__'}:
- repl_locals[key] = locals()[key]
-
- console = AsyncIOInteractiveConsole(repl_locals, loop)
-
- repl_future = None
- repl_future_interrupted = False
-
- try:
- import readline # NoQA
- except ImportError:
- pass
-
- repl_thread = REPLThread()
- repl_thread.daemon = True
- repl_thread.start()
-
- while True:
- try:
- loop.run_forever()
- except KeyboardInterrupt:
- if repl_future and not repl_future.done():
- repl_future.cancel()
- repl_future_interrupted = True
- continue
- else:
- break
+import ast
+import asyncio
+import code
+import concurrent.futures
+import inspect
+import sys
+import threading
+import types
+import warnings
+
+from . import futures
+
+
+class AsyncIOInteractiveConsole(code.InteractiveConsole):
+
+ def __init__(self, locals, loop):
+ super().__init__(locals)
+ self.compile.compiler.flags |= ast.PyCF_ALLOW_TOP_LEVEL_AWAIT
+
+ self.loop = loop
+
+ def runcode(self, code):
+ future = concurrent.futures.Future()
+
+ def callback():
+ global repl_future
+ global repl_future_interrupted
+
+ repl_future = None
+ repl_future_interrupted = False
+
+ func = types.FunctionType(code, self.locals)
+ try:
+ coro = func()
+ except SystemExit:
+ raise
+ except KeyboardInterrupt as ex:
+ repl_future_interrupted = True
+ future.set_exception(ex)
+ return
+ except BaseException as ex:
+ future.set_exception(ex)
+ return
+
+ if not inspect.iscoroutine(coro):
+ future.set_result(coro)
+ return
+
+ try:
+ repl_future = self.loop.create_task(coro)
+ futures._chain_future(repl_future, future)
+ except BaseException as exc:
+ future.set_exception(exc)
+
+ loop.call_soon_threadsafe(callback)
+
+ try:
+ return future.result()
+ except SystemExit:
+ raise
+ except BaseException:
+ if repl_future_interrupted:
+ self.write("\nKeyboardInterrupt\n")
+ else:
+ self.showtraceback()
+
+
+class REPLThread(threading.Thread):
+
+ def run(self):
+ try:
+ banner = (
+ f'asyncio REPL {sys.version} on {sys.platform}\n'
+ f'Use "await" directly instead of "asyncio.run()".\n'
+ f'Type "help", "copyright", "credits" or "license" '
+ f'for more information.\n'
+ f'{getattr(sys, "ps1", ">>> ")}import asyncio'
+ )
+
+ console.interact(
+ banner=banner,
+ exitmsg='exiting asyncio REPL...')
+ finally:
+ warnings.filterwarnings(
+ 'ignore',
+ message=r'^coroutine .* was never awaited$',
+ category=RuntimeWarning)
+
+ loop.call_soon_threadsafe(loop.stop)
+
+
+if __name__ == '__main__':
+ loop = asyncio.new_event_loop()
+ asyncio.set_event_loop(loop)
+
+ repl_locals = {'asyncio': asyncio}
+ for key in {'__name__', '__package__',
+ '__loader__', '__spec__',
+ '__builtins__', '__file__'}:
+ repl_locals[key] = locals()[key]
+
+ console = AsyncIOInteractiveConsole(repl_locals, loop)
+
+ repl_future = None
+ repl_future_interrupted = False
+
+ try:
+ import readline # NoQA
+ except ImportError:
+ pass
+
+ repl_thread = REPLThread()
+ repl_thread.daemon = True
+ repl_thread.start()
+
+ while True:
+ try:
+ loop.run_forever()
+ except KeyboardInterrupt:
+ if repl_future and not repl_future.done():
+ repl_future.cancel()
+ repl_future_interrupted = True
+ continue
+ else:
+ break
diff --git a/contrib/tools/python3/src/Lib/asyncio/base_events.py b/contrib/tools/python3/src/Lib/asyncio/base_events.py
index 8c1fb49694..34fdbf2146 100644
--- a/contrib/tools/python3/src/Lib/asyncio/base_events.py
+++ b/contrib/tools/python3/src/Lib/asyncio/base_events.py
@@ -16,12 +16,12 @@ to modify the meaning of the API call itself.
import collections
import collections.abc
import concurrent.futures
-import functools
+import functools
import heapq
import itertools
import os
import socket
-import stat
+import stat
import subprocess
import threading
import time
@@ -38,14 +38,14 @@ except ImportError: # pragma: no cover
from . import constants
from . import coroutines
from . import events
-from . import exceptions
+from . import exceptions
from . import futures
from . import protocols
from . import sslproto
-from . import staggered
+from . import staggered
from . import tasks
from . import transports
-from . import trsock
+from . import trsock
from .log import logger
@@ -60,17 +60,17 @@ _MIN_SCHEDULED_TIMER_HANDLES = 100
# before cleanup of cancelled handles is performed.
_MIN_CANCELLED_TIMER_HANDLES_FRACTION = 0.5
-
+
_HAS_IPv6 = hasattr(socket, 'AF_INET6')
# Maximum timeout passed to select to avoid OS limitations
MAXIMUM_SELECT_TIMEOUT = 24 * 3600
-# Used for deprecation and removal of `loop.create_datagram_endpoint()`'s
-# *reuse_address* parameter
-_unset = object()
-
+# Used for deprecation and removal of `loop.create_datagram_endpoint()`'s
+# *reuse_address* parameter
+_unset = object()
+
def _format_handle(handle):
cb = handle._callback
if isinstance(getattr(cb, '__self__', None), tasks.Task):
@@ -100,7 +100,7 @@ def _set_reuseport(sock):
'SO_REUSEPORT defined but not implemented.')
-def _ipaddr_info(host, port, family, type, proto, flowinfo=0, scopeid=0):
+def _ipaddr_info(host, port, family, type, proto, flowinfo=0, scopeid=0):
# Try to skip getaddrinfo if "host" is already an IP. Users might have
# handled name resolution in their own code and pass in resolved IPs.
if not hasattr(socket, 'inet_pton'):
@@ -149,7 +149,7 @@ def _ipaddr_info(host, port, family, type, proto, flowinfo=0, scopeid=0):
socket.inet_pton(af, host)
# The host has already been resolved.
if _HAS_IPv6 and af == socket.AF_INET6:
- return af, type, proto, '', (host, port, flowinfo, scopeid)
+ return af, type, proto, '', (host, port, flowinfo, scopeid)
else:
return af, type, proto, '', (host, port)
except OSError:
@@ -159,32 +159,32 @@ def _ipaddr_info(host, port, family, type, proto, flowinfo=0, scopeid=0):
return None
-def _interleave_addrinfos(addrinfos, first_address_family_count=1):
- """Interleave list of addrinfo tuples by family."""
- # Group addresses by family
- addrinfos_by_family = collections.OrderedDict()
- for addr in addrinfos:
- family = addr[0]
- if family not in addrinfos_by_family:
- addrinfos_by_family[family] = []
- addrinfos_by_family[family].append(addr)
- addrinfos_lists = list(addrinfos_by_family.values())
-
- reordered = []
- if first_address_family_count > 1:
- reordered.extend(addrinfos_lists[0][:first_address_family_count - 1])
- del addrinfos_lists[0][:first_address_family_count - 1]
- reordered.extend(
- a for a in itertools.chain.from_iterable(
- itertools.zip_longest(*addrinfos_lists)
- ) if a is not None)
- return reordered
-
-
+def _interleave_addrinfos(addrinfos, first_address_family_count=1):
+ """Interleave list of addrinfo tuples by family."""
+ # Group addresses by family
+ addrinfos_by_family = collections.OrderedDict()
+ for addr in addrinfos:
+ family = addr[0]
+ if family not in addrinfos_by_family:
+ addrinfos_by_family[family] = []
+ addrinfos_by_family[family].append(addr)
+ addrinfos_lists = list(addrinfos_by_family.values())
+
+ reordered = []
+ if first_address_family_count > 1:
+ reordered.extend(addrinfos_lists[0][:first_address_family_count - 1])
+ del addrinfos_lists[0][:first_address_family_count - 1]
+ reordered.extend(
+ a for a in itertools.chain.from_iterable(
+ itertools.zip_longest(*addrinfos_lists)
+ ) if a is not None)
+ return reordered
+
+
def _run_until_complete_cb(fut):
if not fut.cancelled():
exc = fut.exception()
- if isinstance(exc, (SystemExit, KeyboardInterrupt)):
+ if isinstance(exc, (SystemExit, KeyboardInterrupt)):
# Issue #22429: run_forever() already finished, no need to
# stop it.
return
@@ -324,8 +324,8 @@ class Server(events.AbstractServer):
@property
def sockets(self):
if self._sockets is None:
- return ()
- return tuple(trsock.TransportSocket(s) for s in self._sockets)
+ return ()
+ return tuple(trsock.TransportSocket(s) for s in self._sockets)
def close(self):
sockets = self._sockets
@@ -350,7 +350,7 @@ class Server(events.AbstractServer):
self._start_serving()
# Skip one loop iteration so that all 'loop.add_reader'
# go through.
- await tasks.sleep(0)
+ await tasks.sleep(0)
async def serve_forever(self):
if self._serving_forever_fut is not None:
@@ -364,7 +364,7 @@ class Server(events.AbstractServer):
try:
await self._serving_forever_fut
- except exceptions.CancelledError:
+ except exceptions.CancelledError:
try:
self.close()
await self.wait_closed()
@@ -410,8 +410,8 @@ class BaseEventLoop(events.AbstractEventLoop):
self._asyncgens = weakref.WeakSet()
# Set to True when `loop.shutdown_asyncgens` is called.
self._asyncgens_shutdown_called = False
- # Set to True when `loop.shutdown_default_executor` is called.
- self._executor_shutdown_called = False
+ # Set to True when `loop.shutdown_default_executor` is called.
+ self._executor_shutdown_called = False
def __repr__(self):
return (
@@ -423,20 +423,20 @@ class BaseEventLoop(events.AbstractEventLoop):
"""Create a Future object attached to the loop."""
return futures.Future(loop=self)
- def create_task(self, coro, *, name=None):
+ def create_task(self, coro, *, name=None):
"""Schedule a coroutine object.
Return a task object.
"""
self._check_closed()
if self._task_factory is None:
- task = tasks.Task(coro, loop=self, name=name)
+ task = tasks.Task(coro, loop=self, name=name)
if task._source_traceback:
del task._source_traceback[-1]
else:
task = self._task_factory(self, coro)
- tasks._set_task_name(task, name)
-
+ tasks._set_task_name(task, name)
+
return task
def set_task_factory(self, factory):
@@ -509,10 +509,10 @@ class BaseEventLoop(events.AbstractEventLoop):
if self._closed:
raise RuntimeError('Event loop is closed')
- def _check_default_executor(self):
- if self._executor_shutdown_called:
- raise RuntimeError('Executor shutdown has been called')
-
+ def _check_default_executor(self):
+ if self._executor_shutdown_called:
+ raise RuntimeError('Executor shutdown has been called')
+
def _asyncgen_finalizer_hook(self, agen):
self._asyncgens.discard(agen)
if not self.is_closed():
@@ -539,7 +539,7 @@ class BaseEventLoop(events.AbstractEventLoop):
closing_agens = list(self._asyncgens)
self._asyncgens.clear()
- results = await tasks._gather(
+ results = await tasks._gather(
*[ag.aclose() for ag in closing_agens],
return_exceptions=True,
loop=self)
@@ -553,37 +553,37 @@ class BaseEventLoop(events.AbstractEventLoop):
'asyncgen': agen
})
- async def shutdown_default_executor(self):
- """Schedule the shutdown of the default executor."""
- self._executor_shutdown_called = True
- if self._default_executor is None:
- return
- future = self.create_future()
- thread = threading.Thread(target=self._do_shutdown, args=(future,))
- thread.start()
- try:
- await future
- finally:
- thread.join()
-
- def _do_shutdown(self, future):
- try:
- self._default_executor.shutdown(wait=True)
- self.call_soon_threadsafe(future.set_result, None)
- except Exception as ex:
- self.call_soon_threadsafe(future.set_exception, ex)
-
- def _check_running(self):
+ async def shutdown_default_executor(self):
+ """Schedule the shutdown of the default executor."""
+ self._executor_shutdown_called = True
+ if self._default_executor is None:
+ return
+ future = self.create_future()
+ thread = threading.Thread(target=self._do_shutdown, args=(future,))
+ thread.start()
+ try:
+ await future
+ finally:
+ thread.join()
+
+ def _do_shutdown(self, future):
+ try:
+ self._default_executor.shutdown(wait=True)
+ self.call_soon_threadsafe(future.set_result, None)
+ except Exception as ex:
+ self.call_soon_threadsafe(future.set_exception, ex)
+
+ def _check_running(self):
if self.is_running():
raise RuntimeError('This event loop is already running')
if events._get_running_loop() is not None:
raise RuntimeError(
'Cannot run the event loop while another loop is running')
-
- def run_forever(self):
- """Run until stop() is called."""
- self._check_closed()
- self._check_running()
+
+ def run_forever(self):
+ """Run until stop() is called."""
+ self._check_closed()
+ self._check_running()
self._set_coroutine_origin_tracking(self._debug)
self._thread_id = threading.get_ident()
@@ -615,7 +615,7 @@ class BaseEventLoop(events.AbstractEventLoop):
Return the Future's result, or raise its exception.
"""
self._check_closed()
- self._check_running()
+ self._check_running()
new_task = not futures.isfuture(future)
future = tasks.ensure_future(future, loop=self)
@@ -666,7 +666,7 @@ class BaseEventLoop(events.AbstractEventLoop):
self._closed = True
self._ready.clear()
self._scheduled.clear()
- self._executor_shutdown_called = True
+ self._executor_shutdown_called = True
executor = self._default_executor
if executor is not None:
self._default_executor = None
@@ -676,9 +676,9 @@ class BaseEventLoop(events.AbstractEventLoop):
"""Returns True if the event loop was closed."""
return self._closed
- def __del__(self, _warn=warnings.warn):
+ def __del__(self, _warn=warnings.warn):
if not self.is_closed():
- _warn(f"unclosed event loop {self!r}", ResourceWarning, source=self)
+ _warn(f"unclosed event loop {self!r}", ResourceWarning, source=self)
if not self.is_running():
self.close()
@@ -803,23 +803,23 @@ class BaseEventLoop(events.AbstractEventLoop):
self._check_callback(func, 'run_in_executor')
if executor is None:
executor = self._default_executor
- # Only check when the default executor is being used
- self._check_default_executor()
+ # Only check when the default executor is being used
+ self._check_default_executor()
if executor is None:
- executor = concurrent.futures.ThreadPoolExecutor(
- thread_name_prefix='asyncio'
- )
+ executor = concurrent.futures.ThreadPoolExecutor(
+ thread_name_prefix='asyncio'
+ )
self._default_executor = executor
return futures.wrap_future(
executor.submit(func, *args), loop=self)
def set_default_executor(self, executor):
- if not isinstance(executor, concurrent.futures.ThreadPoolExecutor):
- warnings.warn(
- 'Using the default executor that is not an instance of '
- 'ThreadPoolExecutor is deprecated and will be prohibited '
- 'in Python 3.9',
- DeprecationWarning, 2)
+ if not isinstance(executor, concurrent.futures.ThreadPoolExecutor):
+ warnings.warn(
+ 'Using the default executor that is not an instance of '
+ 'ThreadPoolExecutor is deprecated and will be prohibited '
+ 'in Python 3.9',
+ DeprecationWarning, 2)
self._default_executor = executor
def _getaddrinfo_debug(self, host, port, family, type, proto, flags):
@@ -868,7 +868,7 @@ class BaseEventLoop(events.AbstractEventLoop):
try:
return await self._sock_sendfile_native(sock, file,
offset, count)
- except exceptions.SendfileNotAvailableError as exc:
+ except exceptions.SendfileNotAvailableError as exc:
if not fallback:
raise
return await self._sock_sendfile_fallback(sock, file,
@@ -877,7 +877,7 @@ class BaseEventLoop(events.AbstractEventLoop):
async def _sock_sendfile_native(self, sock, file, offset, count):
# NB: sendfile syscall is not supported for SSL sockets and
# non-mmap files even if sendfile is supported by OS
- raise exceptions.SendfileNotAvailableError(
+ raise exceptions.SendfileNotAvailableError(
f"syscall sendfile is not available for socket {sock!r} "
"and file {file!r} combination")
@@ -900,7 +900,7 @@ class BaseEventLoop(events.AbstractEventLoop):
read = await self.run_in_executor(None, file.readinto, view)
if not read:
break # EOF
- await self.sock_sendall(sock, view[:read])
+ await self.sock_sendall(sock, view[:read])
total_sent += read
return total_sent
finally:
@@ -928,49 +928,49 @@ class BaseEventLoop(events.AbstractEventLoop):
"offset must be a non-negative integer (got {!r})".format(
offset))
- async def _connect_sock(self, exceptions, addr_info, local_addr_infos=None):
- """Create, bind and connect one socket."""
- my_exceptions = []
- exceptions.append(my_exceptions)
- family, type_, proto, _, address = addr_info
- sock = None
- try:
- sock = socket.socket(family=family, type=type_, proto=proto)
- sock.setblocking(False)
- if local_addr_infos is not None:
- for _, _, _, _, laddr in local_addr_infos:
- try:
- sock.bind(laddr)
- break
- except OSError as exc:
- msg = (
- f'error while attempting to bind on '
- f'address {laddr!r}: '
- f'{exc.strerror.lower()}'
- )
- exc = OSError(exc.errno, msg)
- my_exceptions.append(exc)
- else: # all bind attempts failed
- raise my_exceptions.pop()
- await self.sock_connect(sock, address)
- return sock
- except OSError as exc:
- my_exceptions.append(exc)
- if sock is not None:
- sock.close()
- raise
- except:
- if sock is not None:
- sock.close()
- raise
-
+ async def _connect_sock(self, exceptions, addr_info, local_addr_infos=None):
+ """Create, bind and connect one socket."""
+ my_exceptions = []
+ exceptions.append(my_exceptions)
+ family, type_, proto, _, address = addr_info
+ sock = None
+ try:
+ sock = socket.socket(family=family, type=type_, proto=proto)
+ sock.setblocking(False)
+ if local_addr_infos is not None:
+ for _, _, _, _, laddr in local_addr_infos:
+ try:
+ sock.bind(laddr)
+ break
+ except OSError as exc:
+ msg = (
+ f'error while attempting to bind on '
+ f'address {laddr!r}: '
+ f'{exc.strerror.lower()}'
+ )
+ exc = OSError(exc.errno, msg)
+ my_exceptions.append(exc)
+ else: # all bind attempts failed
+ raise my_exceptions.pop()
+ await self.sock_connect(sock, address)
+ return sock
+ except OSError as exc:
+ my_exceptions.append(exc)
+ if sock is not None:
+ sock.close()
+ raise
+ except:
+ if sock is not None:
+ sock.close()
+ raise
+
async def create_connection(
self, protocol_factory, host=None, port=None,
*, ssl=None, family=0,
proto=0, flags=0, sock=None,
local_addr=None, server_hostname=None,
- ssl_handshake_timeout=None,
- happy_eyeballs_delay=None, interleave=None):
+ ssl_handshake_timeout=None,
+ happy_eyeballs_delay=None, interleave=None):
"""Connect to a TCP server.
Create a streaming transport connection to a given Internet host and
@@ -1005,10 +1005,10 @@ class BaseEventLoop(events.AbstractEventLoop):
raise ValueError(
'ssl_handshake_timeout is only meaningful with ssl')
- if happy_eyeballs_delay is not None and interleave is None:
- # If using happy eyeballs, default to interleave addresses by family
- interleave = 1
-
+ if happy_eyeballs_delay is not None and interleave is None:
+ # If using happy eyeballs, default to interleave addresses by family
+ interleave = 1
+
if host is not None or port is not None:
if sock is not None:
raise ValueError(
@@ -1027,31 +1027,31 @@ class BaseEventLoop(events.AbstractEventLoop):
flags=flags, loop=self)
if not laddr_infos:
raise OSError('getaddrinfo() returned empty list')
- else:
- laddr_infos = None
-
- if interleave:
- infos = _interleave_addrinfos(infos, interleave)
+ else:
+ laddr_infos = None
+ if interleave:
+ infos = _interleave_addrinfos(infos, interleave)
+
exceptions = []
- if happy_eyeballs_delay is None:
- # not using happy eyeballs
- for addrinfo in infos:
- try:
- sock = await self._connect_sock(
- exceptions, addrinfo, laddr_infos)
- break
- except OSError:
- continue
- else: # using happy eyeballs
- sock, _, _ = await staggered.staggered_race(
- (functools.partial(self._connect_sock,
- exceptions, addrinfo, laddr_infos)
- for addrinfo in infos),
- happy_eyeballs_delay, loop=self)
-
- if sock is None:
- exceptions = [exc for sub in exceptions for exc in sub]
+ if happy_eyeballs_delay is None:
+ # not using happy eyeballs
+ for addrinfo in infos:
+ try:
+ sock = await self._connect_sock(
+ exceptions, addrinfo, laddr_infos)
+ break
+ except OSError:
+ continue
+ else: # using happy eyeballs
+ sock, _, _ = await staggered.staggered_race(
+ (functools.partial(self._connect_sock,
+ exceptions, addrinfo, laddr_infos)
+ for addrinfo in infos),
+ happy_eyeballs_delay, loop=self)
+
+ if sock is None:
+ exceptions = [exc for sub in exceptions for exc in sub]
if len(exceptions) == 1:
raise exceptions[0]
else:
@@ -1150,7 +1150,7 @@ class BaseEventLoop(events.AbstractEventLoop):
try:
return await self._sendfile_native(transport, file,
offset, count)
- except exceptions.SendfileNotAvailableError as exc:
+ except exceptions.SendfileNotAvailableError as exc:
if not fallback:
raise
@@ -1163,7 +1163,7 @@ class BaseEventLoop(events.AbstractEventLoop):
offset, count)
async def _sendfile_native(self, transp, file, offset, count):
- raise exceptions.SendfileNotAvailableError(
+ raise exceptions.SendfileNotAvailableError(
"sendfile syscall is not supported")
async def _sendfile_fallback(self, transp, file, offset, count):
@@ -1180,11 +1180,11 @@ class BaseEventLoop(events.AbstractEventLoop):
if blocksize <= 0:
return total_sent
view = memoryview(buf)[:blocksize]
- read = await self.run_in_executor(None, file.readinto, view)
+ read = await self.run_in_executor(None, file.readinto, view)
if not read:
return total_sent # EOF
await proto.drain()
- transp.write(view[:read])
+ transp.write(view[:read])
total_sent += read
finally:
if total_sent > 0 and hasattr(file, 'seek'):
@@ -1229,7 +1229,7 @@ class BaseEventLoop(events.AbstractEventLoop):
try:
await waiter
- except BaseException:
+ except BaseException:
transport.close()
conmade_cb.cancel()
resume_cb.cancel()
@@ -1240,7 +1240,7 @@ class BaseEventLoop(events.AbstractEventLoop):
async def create_datagram_endpoint(self, protocol_factory,
local_addr=None, remote_addr=None, *,
family=0, proto=0, flags=0,
- reuse_address=_unset, reuse_port=None,
+ reuse_address=_unset, reuse_port=None,
allow_broadcast=None, sock=None):
"""Create datagram connection."""
if sock is not None:
@@ -1249,7 +1249,7 @@ class BaseEventLoop(events.AbstractEventLoop):
f'A UDP Socket was expected, got {sock!r}')
if (local_addr or remote_addr or
family or proto or flags or
- reuse_port or allow_broadcast):
+ reuse_port or allow_broadcast):
# show the problematic kwargs in exception msg
opts = dict(local_addr=local_addr, remote_addr=remote_addr,
family=family, proto=proto, flags=flags,
@@ -1270,28 +1270,28 @@ class BaseEventLoop(events.AbstractEventLoop):
for addr in (local_addr, remote_addr):
if addr is not None and not isinstance(addr, str):
raise TypeError('string is expected')
-
- if local_addr and local_addr[0] not in (0, '\x00'):
- try:
- if stat.S_ISSOCK(os.stat(local_addr).st_mode):
- os.remove(local_addr)
- except FileNotFoundError:
- pass
- except OSError as err:
- # Directory may have permissions only to create socket.
- logger.error('Unable to check or remove stale UNIX '
- 'socket %r: %r',
- local_addr, err)
-
+
+ if local_addr and local_addr[0] not in (0, '\x00'):
+ try:
+ if stat.S_ISSOCK(os.stat(local_addr).st_mode):
+ os.remove(local_addr)
+ except FileNotFoundError:
+ pass
+ except OSError as err:
+ # Directory may have permissions only to create socket.
+ logger.error('Unable to check or remove stale UNIX '
+ 'socket %r: %r',
+ local_addr, err)
+
addr_pairs_info = (((family, proto),
(local_addr, remote_addr)), )
else:
# join address by (family, protocol)
- addr_infos = {} # Using order preserving dict
+ addr_infos = {} # Using order preserving dict
for idx, addr in ((0, local_addr), (1, remote_addr)):
if addr is not None:
- if not (isinstance(addr, tuple) and len(addr) == 2):
- raise TypeError('2-tuple is expected')
+ if not (isinstance(addr, tuple) and len(addr) == 2):
+ raise TypeError('2-tuple is expected')
infos = await self._ensure_resolved(
addr, family=family, type=socket.SOCK_DGRAM,
@@ -1316,18 +1316,18 @@ class BaseEventLoop(events.AbstractEventLoop):
exceptions = []
- # bpo-37228
- if reuse_address is not _unset:
- if reuse_address:
- raise ValueError("Passing `reuse_address=True` is no "
- "longer supported, as the usage of "
- "SO_REUSEPORT in UDP poses a significant "
- "security concern.")
- else:
- warnings.warn("The *reuse_address* parameter has been "
- "deprecated as of 3.5.10 and is scheduled "
- "for removal in 3.11.", DeprecationWarning,
- stacklevel=2)
+ # bpo-37228
+ if reuse_address is not _unset:
+ if reuse_address:
+ raise ValueError("Passing `reuse_address=True` is no "
+ "longer supported, as the usage of "
+ "SO_REUSEPORT in UDP poses a significant "
+ "security concern.")
+ else:
+ warnings.warn("The *reuse_address* parameter has been "
+ "deprecated as of 3.5.10 and is scheduled "
+ "for removal in 3.11.", DeprecationWarning,
+ stacklevel=2)
for ((family, proto),
(local_address, remote_address)) in addr_pairs_info:
@@ -1346,8 +1346,8 @@ class BaseEventLoop(events.AbstractEventLoop):
if local_addr:
sock.bind(local_address)
if remote_addr:
- if not allow_broadcast:
- await self.sock_connect(sock, remote_address)
+ if not allow_broadcast:
+ await self.sock_connect(sock, remote_address)
r_addr = remote_address
except OSError as exc:
if sock is not None:
@@ -1388,7 +1388,7 @@ class BaseEventLoop(events.AbstractEventLoop):
family=0, type=socket.SOCK_STREAM,
proto=0, flags=0, loop):
host, port = address[:2]
- info = _ipaddr_info(host, port, family, type, proto, *address[2:])
+ info = _ipaddr_info(host, port, family, type, proto, *address[2:])
if info is not None:
# "host" is already a resolved IP.
return [info]
@@ -1457,7 +1457,7 @@ class BaseEventLoop(events.AbstractEventLoop):
fs = [self._create_server_getaddrinfo(host, port, family=family,
flags=flags)
for host in hosts]
- infos = await tasks._gather(*fs, loop=self)
+ infos = await tasks._gather(*fs, loop=self)
infos = set(itertools.chain.from_iterable(infos))
completed = False
@@ -1515,7 +1515,7 @@ class BaseEventLoop(events.AbstractEventLoop):
server._start_serving()
# Skip one loop iteration so that all 'loop.add_reader'
# go through.
- await tasks.sleep(0)
+ await tasks.sleep(0)
if self._debug:
logger.info("%r is serving", server)
@@ -1601,7 +1601,7 @@ class BaseEventLoop(events.AbstractEventLoop):
stderr=subprocess.PIPE,
universal_newlines=False,
shell=True, bufsize=0,
- encoding=None, errors=None, text=None,
+ encoding=None, errors=None, text=None,
**kwargs):
if not isinstance(cmd, (bytes, str)):
raise ValueError("cmd must be a string")
@@ -1611,13 +1611,13 @@ class BaseEventLoop(events.AbstractEventLoop):
raise ValueError("shell must be True")
if bufsize != 0:
raise ValueError("bufsize must be 0")
- if text:
- raise ValueError("text must be False")
- if encoding is not None:
- raise ValueError("encoding must be None")
- if errors is not None:
- raise ValueError("errors must be None")
-
+ if text:
+ raise ValueError("text must be False")
+ if encoding is not None:
+ raise ValueError("encoding must be None")
+ if errors is not None:
+ raise ValueError("errors must be None")
+
protocol = protocol_factory()
debug_log = None
if self._debug:
@@ -1634,22 +1634,22 @@ class BaseEventLoop(events.AbstractEventLoop):
async def subprocess_exec(self, protocol_factory, program, *args,
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, universal_newlines=False,
- shell=False, bufsize=0,
- encoding=None, errors=None, text=None,
- **kwargs):
+ shell=False, bufsize=0,
+ encoding=None, errors=None, text=None,
+ **kwargs):
if universal_newlines:
raise ValueError("universal_newlines must be False")
if shell:
raise ValueError("shell must be False")
if bufsize != 0:
raise ValueError("bufsize must be 0")
- if text:
- raise ValueError("text must be False")
- if encoding is not None:
- raise ValueError("encoding must be None")
- if errors is not None:
- raise ValueError("errors must be None")
-
+ if text:
+ raise ValueError("text must be False")
+ if encoding is not None:
+ raise ValueError("encoding must be None")
+ if errors is not None:
+ raise ValueError("errors must be None")
+
popen_args = (program,) + args
protocol = protocol_factory()
debug_log = None
@@ -1762,9 +1762,9 @@ class BaseEventLoop(events.AbstractEventLoop):
if self._exception_handler is None:
try:
self.default_exception_handler(context)
- except (SystemExit, KeyboardInterrupt):
- raise
- except BaseException:
+ except (SystemExit, KeyboardInterrupt):
+ raise
+ except BaseException:
# Second protection layer for unexpected errors
# in the default implementation, as well as for subclassed
# event loops with overloaded "default_exception_handler".
@@ -1773,9 +1773,9 @@ class BaseEventLoop(events.AbstractEventLoop):
else:
try:
self._exception_handler(self, context)
- except (SystemExit, KeyboardInterrupt):
- raise
- except BaseException as exc:
+ except (SystemExit, KeyboardInterrupt):
+ raise
+ except BaseException as exc:
# Exception in the user set custom exception handler.
try:
# Let's try default handler.
@@ -1784,9 +1784,9 @@ class BaseEventLoop(events.AbstractEventLoop):
'exception': exc,
'context': context,
})
- except (SystemExit, KeyboardInterrupt):
- raise
- except BaseException:
+ except (SystemExit, KeyboardInterrupt):
+ raise
+ except BaseException:
# Guard 'default_exception_handler' in case it is
# overloaded.
logger.error('Exception in default exception handler '
@@ -1851,7 +1851,7 @@ class BaseEventLoop(events.AbstractEventLoop):
when = self._scheduled[0]._when
timeout = min(max(0, when - self.time()), MAXIMUM_SELECT_TIMEOUT)
- event_list = self._selector.select(timeout)
+ event_list = self._selector.select(timeout)
self._process_events(event_list)
# Handle 'later' callbacks that are ready.
diff --git a/contrib/tools/python3/src/Lib/asyncio/base_futures.py b/contrib/tools/python3/src/Lib/asyncio/base_futures.py
index 2c01ac98e1..0962405d8d 100644
--- a/contrib/tools/python3/src/Lib/asyncio/base_futures.py
+++ b/contrib/tools/python3/src/Lib/asyncio/base_futures.py
@@ -1,7 +1,7 @@
__all__ = ()
import reprlib
-from _thread import get_ident
+from _thread import get_ident
from . import format_helpers
@@ -42,16 +42,16 @@ def _format_callbacks(cb):
return f'cb=[{cb}]'
-# bpo-42183: _repr_running is needed for repr protection
-# when a Future or Task result contains itself directly or indirectly.
-# The logic is borrowed from @reprlib.recursive_repr decorator.
-# Unfortunately, the direct decorator usage is impossible because of
-# AttributeError: '_asyncio.Task' object has no attribute '__module__' error.
-#
-# After fixing this thing we can return to the decorator based approach.
-_repr_running = set()
-
-
+# bpo-42183: _repr_running is needed for repr protection
+# when a Future or Task result contains itself directly or indirectly.
+# The logic is borrowed from @reprlib.recursive_repr decorator.
+# Unfortunately, the direct decorator usage is impossible because of
+# AttributeError: '_asyncio.Task' object has no attribute '__module__' error.
+#
+# After fixing this thing we can return to the decorator based approach.
+_repr_running = set()
+
+
def _future_repr_info(future):
# (Future) -> str
"""helper function for Future.__repr__"""
@@ -60,17 +60,17 @@ def _future_repr_info(future):
if future._exception is not None:
info.append(f'exception={future._exception!r}')
else:
- key = id(future), get_ident()
- if key in _repr_running:
- result = '...'
- else:
- _repr_running.add(key)
- try:
- # use reprlib to limit the length of the output, especially
- # for very long strings
- result = reprlib.repr(future._result)
- finally:
- _repr_running.discard(key)
+ key = id(future), get_ident()
+ if key in _repr_running:
+ result = '...'
+ else:
+ _repr_running.add(key)
+ try:
+ # use reprlib to limit the length of the output, especially
+ # for very long strings
+ result = reprlib.repr(future._result)
+ finally:
+ _repr_running.discard(key)
info.append(f'result={result}')
if future._callbacks:
info.append(_format_callbacks(future._callbacks))
diff --git a/contrib/tools/python3/src/Lib/asyncio/base_subprocess.py b/contrib/tools/python3/src/Lib/asyncio/base_subprocess.py
index 14d5051922..05daeedd37 100644
--- a/contrib/tools/python3/src/Lib/asyncio/base_subprocess.py
+++ b/contrib/tools/python3/src/Lib/asyncio/base_subprocess.py
@@ -120,9 +120,9 @@ class BaseSubprocessTransport(transports.SubprocessTransport):
# Don't clear the _proc reference yet: _post_init() may still run
- def __del__(self, _warn=warnings.warn):
+ def __del__(self, _warn=warnings.warn):
if not self._closed:
- _warn(f"unclosed transport {self!r}", ResourceWarning, source=self)
+ _warn(f"unclosed transport {self!r}", ResourceWarning, source=self)
self.close()
def get_pid(self):
@@ -182,9 +182,9 @@ class BaseSubprocessTransport(transports.SubprocessTransport):
for callback, data in self._pending_calls:
loop.call_soon(callback, *data)
self._pending_calls = None
- except (SystemExit, KeyboardInterrupt):
- raise
- except BaseException as exc:
+ except (SystemExit, KeyboardInterrupt):
+ raise
+ except BaseException as exc:
if waiter is not None and not waiter.cancelled():
waiter.set_exception(exc)
else:
diff --git a/contrib/tools/python3/src/Lib/asyncio/base_tasks.py b/contrib/tools/python3/src/Lib/asyncio/base_tasks.py
index 09bb171a2c..d16f3f7be2 100644
--- a/contrib/tools/python3/src/Lib/asyncio/base_tasks.py
+++ b/contrib/tools/python3/src/Lib/asyncio/base_tasks.py
@@ -12,30 +12,30 @@ def _task_repr_info(task):
# replace status
info[0] = 'cancelling'
- info.insert(1, 'name=%r' % task.get_name())
-
+ info.insert(1, 'name=%r' % task.get_name())
+
coro = coroutines._format_coroutine(task._coro)
- info.insert(2, f'coro=<{coro}>')
+ info.insert(2, f'coro=<{coro}>')
if task._fut_waiter is not None:
- info.insert(3, f'wait_for={task._fut_waiter!r}')
+ info.insert(3, f'wait_for={task._fut_waiter!r}')
return info
def _task_get_stack(task, limit):
frames = []
- if hasattr(task._coro, 'cr_frame'):
- # case 1: 'async def' coroutines
+ if hasattr(task._coro, 'cr_frame'):
+ # case 1: 'async def' coroutines
f = task._coro.cr_frame
- elif hasattr(task._coro, 'gi_frame'):
- # case 2: legacy coroutines
+ elif hasattr(task._coro, 'gi_frame'):
+ # case 2: legacy coroutines
f = task._coro.gi_frame
- elif hasattr(task._coro, 'ag_frame'):
- # case 3: async generators
- f = task._coro.ag_frame
- else:
- # case 4: unknown objects
- f = None
+ elif hasattr(task._coro, 'ag_frame'):
+ # case 3: async generators
+ f = task._coro.ag_frame
+ else:
+ # case 4: unknown objects
+ f = None
if f is not None:
while f is not None:
if limit is not None:
diff --git a/contrib/tools/python3/src/Lib/asyncio/coroutines.py b/contrib/tools/python3/src/Lib/asyncio/coroutines.py
index 9664ea74d7..ea92c8c95b 100644
--- a/contrib/tools/python3/src/Lib/asyncio/coroutines.py
+++ b/contrib/tools/python3/src/Lib/asyncio/coroutines.py
@@ -7,7 +7,7 @@ import os
import sys
import traceback
import types
-import warnings
+import warnings
from . import base_futures
from . import constants
@@ -108,9 +108,9 @@ def coroutine(func):
If the coroutine is not yielded from before it is destroyed,
an error message is logged.
"""
- warnings.warn('"@coroutine" decorator is deprecated since Python 3.8, use "async def" instead',
- DeprecationWarning,
- stacklevel=2)
+ warnings.warn('"@coroutine" decorator is deprecated since Python 3.8, use "async def" instead',
+ DeprecationWarning,
+ stacklevel=2)
if inspect.iscoroutinefunction(func):
# In Python 3.5 that's all we need to do for coroutines
# defined with "async def".
diff --git a/contrib/tools/python3/src/Lib/asyncio/events.py b/contrib/tools/python3/src/Lib/asyncio/events.py
index 413ff2aaa6..2e806c1517 100644
--- a/contrib/tools/python3/src/Lib/asyncio/events.py
+++ b/contrib/tools/python3/src/Lib/asyncio/events.py
@@ -3,7 +3,7 @@
__all__ = (
'AbstractEventLoopPolicy',
'AbstractEventLoop', 'AbstractServer',
- 'Handle', 'TimerHandle',
+ 'Handle', 'TimerHandle',
'get_event_loop_policy', 'set_event_loop_policy',
'get_event_loop', 'set_event_loop', 'new_event_loop',
'get_child_watcher', 'set_child_watcher',
@@ -78,9 +78,9 @@ class Handle:
def _run(self):
try:
self._context.run(self._callback, *self._args)
- except (SystemExit, KeyboardInterrupt):
- raise
- except BaseException as exc:
+ except (SystemExit, KeyboardInterrupt):
+ raise
+ except BaseException as exc:
cb = format_helpers._format_callback_source(
self._callback, self._args)
msg = f'Exception in callback {cb}'
@@ -118,24 +118,24 @@ class TimerHandle(Handle):
return hash(self._when)
def __lt__(self, other):
- if isinstance(other, TimerHandle):
- return self._when < other._when
- return NotImplemented
+ if isinstance(other, TimerHandle):
+ return self._when < other._when
+ return NotImplemented
def __le__(self, other):
- if isinstance(other, TimerHandle):
- return self._when < other._when or self.__eq__(other)
- return NotImplemented
+ if isinstance(other, TimerHandle):
+ return self._when < other._when or self.__eq__(other)
+ return NotImplemented
def __gt__(self, other):
- if isinstance(other, TimerHandle):
- return self._when > other._when
- return NotImplemented
+ if isinstance(other, TimerHandle):
+ return self._when > other._when
+ return NotImplemented
def __ge__(self, other):
- if isinstance(other, TimerHandle):
- return self._when > other._when or self.__eq__(other)
- return NotImplemented
+ if isinstance(other, TimerHandle):
+ return self._when > other._when or self.__eq__(other)
+ return NotImplemented
def __eq__(self, other):
if isinstance(other, TimerHandle):
@@ -248,23 +248,23 @@ class AbstractEventLoop:
"""Shutdown all active asynchronous generators."""
raise NotImplementedError
- async def shutdown_default_executor(self):
- """Schedule the shutdown of the default executor."""
- raise NotImplementedError
-
+ async def shutdown_default_executor(self):
+ """Schedule the shutdown of the default executor."""
+ raise NotImplementedError
+
# Methods scheduling callbacks. All these return Handles.
def _timer_handle_cancelled(self, handle):
"""Notification that a TimerHandle has been cancelled."""
raise NotImplementedError
- def call_soon(self, callback, *args, context=None):
- return self.call_later(0, callback, *args, context=context)
+ def call_soon(self, callback, *args, context=None):
+ return self.call_later(0, callback, *args, context=context)
- def call_later(self, delay, callback, *args, context=None):
+ def call_later(self, delay, callback, *args, context=None):
raise NotImplementedError
- def call_at(self, when, callback, *args, context=None):
+ def call_at(self, when, callback, *args, context=None):
raise NotImplementedError
def time(self):
@@ -275,15 +275,15 @@ class AbstractEventLoop:
# Method scheduling a coroutine object: create a task.
- def create_task(self, coro, *, name=None):
+ def create_task(self, coro, *, name=None):
raise NotImplementedError
# Methods for interacting with threads.
- def call_soon_threadsafe(self, callback, *args, context=None):
+ def call_soon_threadsafe(self, callback, *args, context=None):
raise NotImplementedError
- def run_in_executor(self, executor, func, *args):
+ def run_in_executor(self, executor, func, *args):
raise NotImplementedError
def set_default_executor(self, executor):
@@ -303,8 +303,8 @@ class AbstractEventLoop:
*, ssl=None, family=0, proto=0,
flags=0, sock=None, local_addr=None,
server_hostname=None,
- ssl_handshake_timeout=None,
- happy_eyeballs_delay=None, interleave=None):
+ ssl_handshake_timeout=None,
+ happy_eyeballs_delay=None, interleave=None):
raise NotImplementedError
async def create_server(
@@ -396,7 +396,7 @@ class AbstractEventLoop:
The return value is a Server object, which can be used to stop
the service.
- path is a str, representing a file system path to bind the
+ path is a str, representing a file system path to bind the
server socket to.
sock can optionally be specified in order to use a preexisting
@@ -465,7 +465,7 @@ class AbstractEventLoop:
# The reason to accept file-like object instead of just file descriptor
# is: we need to own pipe and close it at transport finishing
# Can got complicated errors if pass f.fileno(),
- # close fd in pipe transport then close f and vice versa.
+ # close fd in pipe transport then close f and vice versa.
raise NotImplementedError
async def connect_write_pipe(self, protocol_factory, pipe):
@@ -478,7 +478,7 @@ class AbstractEventLoop:
# The reason to accept file-like object instead of just file descriptor
# is: we need to own pipe and close it at transport finishing
# Can got complicated errors if pass f.fileno(),
- # close fd in pipe transport then close f and vice versa.
+ # close fd in pipe transport then close f and vice versa.
raise NotImplementedError
async def subprocess_shell(self, protocol_factory, cmd, *,
@@ -629,13 +629,13 @@ class BaseDefaultEventLoopPolicy(AbstractEventLoopPolicy):
self._local = self._Local()
def get_event_loop(self):
- """Get the event loop for the current context.
+ """Get the event loop for the current context.
- Returns an instance of EventLoop or raises an exception.
+ Returns an instance of EventLoop or raises an exception.
"""
if (self._local._loop is None and
not self._local._set_called and
- threading.current_thread() is threading.main_thread()):
+ threading.current_thread() is threading.main_thread()):
self.set_event_loop(self.new_event_loop())
if self._local._loop is None:
diff --git a/contrib/tools/python3/src/Lib/asyncio/exceptions.py b/contrib/tools/python3/src/Lib/asyncio/exceptions.py
index f07e448657..0957fe6138 100644
--- a/contrib/tools/python3/src/Lib/asyncio/exceptions.py
+++ b/contrib/tools/python3/src/Lib/asyncio/exceptions.py
@@ -1,58 +1,58 @@
-"""asyncio exceptions."""
-
-
-__all__ = ('CancelledError', 'InvalidStateError', 'TimeoutError',
- 'IncompleteReadError', 'LimitOverrunError',
- 'SendfileNotAvailableError')
-
-
-class CancelledError(BaseException):
- """The Future or Task was cancelled."""
-
-
-class TimeoutError(Exception):
- """The operation exceeded the given deadline."""
-
-
-class InvalidStateError(Exception):
- """The operation is not allowed in this state."""
-
-
-class SendfileNotAvailableError(RuntimeError):
- """Sendfile syscall is not available.
-
- Raised if OS does not support sendfile syscall for given socket or
- file type.
- """
-
-
-class IncompleteReadError(EOFError):
- """
- Incomplete read error. Attributes:
-
- - partial: read bytes string before the end of stream was reached
- - expected: total number of expected bytes (or None if unknown)
- """
- def __init__(self, partial, expected):
- r_expected = 'undefined' if expected is None else repr(expected)
- super().__init__(f'{len(partial)} bytes read on a total of '
- f'{r_expected} expected bytes')
- self.partial = partial
- self.expected = expected
-
- def __reduce__(self):
- return type(self), (self.partial, self.expected)
-
-
-class LimitOverrunError(Exception):
- """Reached the buffer limit while looking for a separator.
-
- Attributes:
- - consumed: total number of to be consumed bytes.
- """
- def __init__(self, message, consumed):
- super().__init__(message)
- self.consumed = consumed
-
- def __reduce__(self):
- return type(self), (self.args[0], self.consumed)
+"""asyncio exceptions."""
+
+
+__all__ = ('CancelledError', 'InvalidStateError', 'TimeoutError',
+ 'IncompleteReadError', 'LimitOverrunError',
+ 'SendfileNotAvailableError')
+
+
+class CancelledError(BaseException):
+ """The Future or Task was cancelled."""
+
+
+class TimeoutError(Exception):
+ """The operation exceeded the given deadline."""
+
+
+class InvalidStateError(Exception):
+ """The operation is not allowed in this state."""
+
+
+class SendfileNotAvailableError(RuntimeError):
+ """Sendfile syscall is not available.
+
+ Raised if OS does not support sendfile syscall for given socket or
+ file type.
+ """
+
+
+class IncompleteReadError(EOFError):
+ """
+ Incomplete read error. Attributes:
+
+ - partial: read bytes string before the end of stream was reached
+ - expected: total number of expected bytes (or None if unknown)
+ """
+ def __init__(self, partial, expected):
+ r_expected = 'undefined' if expected is None else repr(expected)
+ super().__init__(f'{len(partial)} bytes read on a total of '
+ f'{r_expected} expected bytes')
+ self.partial = partial
+ self.expected = expected
+
+ def __reduce__(self):
+ return type(self), (self.partial, self.expected)
+
+
+class LimitOverrunError(Exception):
+ """Reached the buffer limit while looking for a separator.
+
+ Attributes:
+ - consumed: total number of to be consumed bytes.
+ """
+ def __init__(self, message, consumed):
+ super().__init__(message)
+ self.consumed = consumed
+
+ def __reduce__(self):
+ return type(self), (self.args[0], self.consumed)
diff --git a/contrib/tools/python3/src/Lib/asyncio/futures.py b/contrib/tools/python3/src/Lib/asyncio/futures.py
index bed4da52fd..d66c12152e 100644
--- a/contrib/tools/python3/src/Lib/asyncio/futures.py
+++ b/contrib/tools/python3/src/Lib/asyncio/futures.py
@@ -11,7 +11,7 @@ import sys
from . import base_futures
from . import events
-from . import exceptions
+from . import exceptions
from . import format_helpers
@@ -51,9 +51,9 @@ class Future:
_exception = None
_loop = None
_source_traceback = None
- _cancel_message = None
- # A saved CancelledError for later chaining as an exception context.
- _cancelled_exc = None
+ _cancel_message = None
+ # A saved CancelledError for later chaining as an exception context.
+ _cancelled_exc = None
# This field is used for a dual purpose:
# - Its presence is a marker to declare that a class implements
@@ -106,9 +106,9 @@ class Future:
context['source_traceback'] = self._source_traceback
self._loop.call_exception_handler(context)
- def __class_getitem__(cls, type):
- return cls
-
+ def __class_getitem__(cls, type):
+ return cls
+
@property
def _log_traceback(self):
return self.__log_traceback
@@ -121,27 +121,27 @@ class Future:
def get_loop(self):
"""Return the event loop the Future is bound to."""
- loop = self._loop
- if loop is None:
- raise RuntimeError("Future object is not initialized.")
- return loop
-
- def _make_cancelled_error(self):
- """Create the CancelledError to raise if the Future is cancelled.
-
- This should only be called once when handling a cancellation since
- it erases the saved context exception value.
- """
- if self._cancel_message is None:
- exc = exceptions.CancelledError()
- else:
- exc = exceptions.CancelledError(self._cancel_message)
- exc.__context__ = self._cancelled_exc
- # Remove the reference since we don't need this anymore.
- self._cancelled_exc = None
- return exc
-
- def cancel(self, msg=None):
+ loop = self._loop
+ if loop is None:
+ raise RuntimeError("Future object is not initialized.")
+ return loop
+
+ def _make_cancelled_error(self):
+ """Create the CancelledError to raise if the Future is cancelled.
+
+ This should only be called once when handling a cancellation since
+ it erases the saved context exception value.
+ """
+ if self._cancel_message is None:
+ exc = exceptions.CancelledError()
+ else:
+ exc = exceptions.CancelledError(self._cancel_message)
+ exc.__context__ = self._cancelled_exc
+ # Remove the reference since we don't need this anymore.
+ self._cancelled_exc = None
+ return exc
+
+ def cancel(self, msg=None):
"""Cancel the future and schedule callbacks.
If the future is already done or cancelled, return False. Otherwise,
@@ -152,7 +152,7 @@ class Future:
if self._state != _PENDING:
return False
self._state = _CANCELLED
- self._cancel_message = msg
+ self._cancel_message = msg
self.__schedule_callbacks()
return True
@@ -192,10 +192,10 @@ class Future:
the future is done and has an exception set, this exception is raised.
"""
if self._state == _CANCELLED:
- exc = self._make_cancelled_error()
- raise exc
+ exc = self._make_cancelled_error()
+ raise exc
if self._state != _FINISHED:
- raise exceptions.InvalidStateError('Result is not ready.')
+ raise exceptions.InvalidStateError('Result is not ready.')
self.__log_traceback = False
if self._exception is not None:
raise self._exception
@@ -210,10 +210,10 @@ class Future:
InvalidStateError.
"""
if self._state == _CANCELLED:
- exc = self._make_cancelled_error()
- raise exc
+ exc = self._make_cancelled_error()
+ raise exc
if self._state != _FINISHED:
- raise exceptions.InvalidStateError('Exception is not set.')
+ raise exceptions.InvalidStateError('Exception is not set.')
self.__log_traceback = False
return self._exception
@@ -255,7 +255,7 @@ class Future:
InvalidStateError.
"""
if self._state != _PENDING:
- raise exceptions.InvalidStateError(f'{self._state}: {self!r}')
+ raise exceptions.InvalidStateError(f'{self._state}: {self!r}')
self._result = result
self._state = _FINISHED
self.__schedule_callbacks()
@@ -267,7 +267,7 @@ class Future:
InvalidStateError.
"""
if self._state != _PENDING:
- raise exceptions.InvalidStateError(f'{self._state}: {self!r}')
+ raise exceptions.InvalidStateError(f'{self._state}: {self!r}')
if isinstance(exception, type):
exception = exception()
if type(exception) is StopIteration:
@@ -312,18 +312,18 @@ def _set_result_unless_cancelled(fut, result):
fut.set_result(result)
-def _convert_future_exc(exc):
- exc_class = type(exc)
- if exc_class is concurrent.futures.CancelledError:
- return exceptions.CancelledError(*exc.args)
- elif exc_class is concurrent.futures.TimeoutError:
- return exceptions.TimeoutError(*exc.args)
- elif exc_class is concurrent.futures.InvalidStateError:
- return exceptions.InvalidStateError(*exc.args)
- else:
- return exc
-
-
+def _convert_future_exc(exc):
+ exc_class = type(exc)
+ if exc_class is concurrent.futures.CancelledError:
+ return exceptions.CancelledError(*exc.args)
+ elif exc_class is concurrent.futures.TimeoutError:
+ return exceptions.TimeoutError(*exc.args)
+ elif exc_class is concurrent.futures.InvalidStateError:
+ return exceptions.InvalidStateError(*exc.args)
+ else:
+ return exc
+
+
def _set_concurrent_future_state(concurrent, source):
"""Copy state from a future to a concurrent.futures.Future."""
assert source.done()
@@ -333,7 +333,7 @@ def _set_concurrent_future_state(concurrent, source):
return
exception = source.exception()
if exception is not None:
- concurrent.set_exception(_convert_future_exc(exception))
+ concurrent.set_exception(_convert_future_exc(exception))
else:
result = source.result()
concurrent.set_result(result)
@@ -353,7 +353,7 @@ def _copy_future_state(source, dest):
else:
exception = source.exception()
if exception is not None:
- dest.set_exception(_convert_future_exc(exception))
+ dest.set_exception(_convert_future_exc(exception))
else:
result = source.result()
dest.set_result(result)
diff --git a/contrib/tools/python3/src/Lib/asyncio/locks.py b/contrib/tools/python3/src/Lib/asyncio/locks.py
index f1ce732478..218226dca2 100644
--- a/contrib/tools/python3/src/Lib/asyncio/locks.py
+++ b/contrib/tools/python3/src/Lib/asyncio/locks.py
@@ -6,7 +6,7 @@ import collections
import warnings
from . import events
-from . import exceptions
+from . import exceptions
class _ContextManagerMixin:
@@ -75,15 +75,15 @@ class Lock(_ContextManagerMixin):
"""
def __init__(self, *, loop=None):
- self._waiters = None
+ self._waiters = None
self._locked = False
- if loop is None:
- self._loop = events.get_event_loop()
- else:
+ if loop is None:
+ self._loop = events.get_event_loop()
+ else:
self._loop = loop
- warnings.warn("The loop argument is deprecated since Python 3.8, "
- "and scheduled for removal in Python 3.10.",
- DeprecationWarning, stacklevel=2)
+ warnings.warn("The loop argument is deprecated since Python 3.8, "
+ "and scheduled for removal in Python 3.10.",
+ DeprecationWarning, stacklevel=2)
def __repr__(self):
res = super().__repr__()
@@ -102,13 +102,13 @@ class Lock(_ContextManagerMixin):
This method blocks until the lock is unlocked, then sets it to
locked and returns True.
"""
- if (not self._locked and (self._waiters is None or
- all(w.cancelled() for w in self._waiters))):
+ if (not self._locked and (self._waiters is None or
+ all(w.cancelled() for w in self._waiters))):
self._locked = True
return True
- if self._waiters is None:
- self._waiters = collections.deque()
+ if self._waiters is None:
+ self._waiters = collections.deque()
fut = self._loop.create_future()
self._waiters.append(fut)
@@ -120,7 +120,7 @@ class Lock(_ContextManagerMixin):
await fut
finally:
self._waiters.remove(fut)
- except exceptions.CancelledError:
+ except exceptions.CancelledError:
if not self._locked:
self._wake_up_first()
raise
@@ -147,8 +147,8 @@ class Lock(_ContextManagerMixin):
def _wake_up_first(self):
"""Wake up the first waiter if it isn't done."""
- if not self._waiters:
- return
+ if not self._waiters:
+ return
try:
fut = next(iter(self._waiters))
except StopIteration:
@@ -173,13 +173,13 @@ class Event:
def __init__(self, *, loop=None):
self._waiters = collections.deque()
self._value = False
- if loop is None:
- self._loop = events.get_event_loop()
- else:
+ if loop is None:
+ self._loop = events.get_event_loop()
+ else:
self._loop = loop
- warnings.warn("The loop argument is deprecated since Python 3.8, "
- "and scheduled for removal in Python 3.10.",
- DeprecationWarning, stacklevel=2)
+ warnings.warn("The loop argument is deprecated since Python 3.8, "
+ "and scheduled for removal in Python 3.10.",
+ DeprecationWarning, stacklevel=2)
def __repr__(self):
res = super().__repr__()
@@ -240,16 +240,16 @@ class Condition(_ContextManagerMixin):
"""
def __init__(self, lock=None, *, loop=None):
- if loop is None:
- self._loop = events.get_event_loop()
- else:
+ if loop is None:
+ self._loop = events.get_event_loop()
+ else:
self._loop = loop
- warnings.warn("The loop argument is deprecated since Python 3.8, "
- "and scheduled for removal in Python 3.10.",
- DeprecationWarning, stacklevel=2)
+ warnings.warn("The loop argument is deprecated since Python 3.8, "
+ "and scheduled for removal in Python 3.10.",
+ DeprecationWarning, stacklevel=2)
if lock is None:
- lock = Lock(loop=loop)
+ lock = Lock(loop=loop)
elif lock._loop is not self._loop:
raise ValueError("loop argument must agree with lock")
@@ -299,11 +299,11 @@ class Condition(_ContextManagerMixin):
try:
await self.acquire()
break
- except exceptions.CancelledError:
+ except exceptions.CancelledError:
cancelled = True
if cancelled:
- raise exceptions.CancelledError
+ raise exceptions.CancelledError
async def wait_for(self, predicate):
"""Wait until a predicate becomes true.
@@ -371,13 +371,13 @@ class Semaphore(_ContextManagerMixin):
raise ValueError("Semaphore initial value must be >= 0")
self._value = value
self._waiters = collections.deque()
- if loop is None:
- self._loop = events.get_event_loop()
- else:
+ if loop is None:
+ self._loop = events.get_event_loop()
+ else:
self._loop = loop
- warnings.warn("The loop argument is deprecated since Python 3.8, "
- "and scheduled for removal in Python 3.10.",
- DeprecationWarning, stacklevel=2)
+ warnings.warn("The loop argument is deprecated since Python 3.8, "
+ "and scheduled for removal in Python 3.10.",
+ DeprecationWarning, stacklevel=2)
def __repr__(self):
res = super().__repr__()
@@ -437,11 +437,11 @@ class BoundedSemaphore(Semaphore):
"""
def __init__(self, value=1, *, loop=None):
- if loop:
- warnings.warn("The loop argument is deprecated since Python 3.8, "
- "and scheduled for removal in Python 3.10.",
- DeprecationWarning, stacklevel=2)
-
+ if loop:
+ warnings.warn("The loop argument is deprecated since Python 3.8, "
+ "and scheduled for removal in Python 3.10.",
+ DeprecationWarning, stacklevel=2)
+
self._bound_value = value
super().__init__(value, loop=loop)
diff --git a/contrib/tools/python3/src/Lib/asyncio/proactor_events.py b/contrib/tools/python3/src/Lib/asyncio/proactor_events.py
index b4cd414b82..33da82ae58 100644
--- a/contrib/tools/python3/src/Lib/asyncio/proactor_events.py
+++ b/contrib/tools/python3/src/Lib/asyncio/proactor_events.py
@@ -10,39 +10,39 @@ import io
import os
import socket
import warnings
-import signal
-import threading
-import collections
+import signal
+import threading
+import collections
from . import base_events
from . import constants
from . import futures
-from . import exceptions
+from . import exceptions
from . import protocols
from . import sslproto
from . import transports
-from . import trsock
+from . import trsock
from .log import logger
-def _set_socket_extra(transport, sock):
- transport._extra['socket'] = trsock.TransportSocket(sock)
-
- try:
- transport._extra['sockname'] = sock.getsockname()
- except socket.error:
- if transport._loop.get_debug():
- logger.warning(
- "getsockname() failed on %r", sock, exc_info=True)
-
- if 'peername' not in transport._extra:
- try:
- transport._extra['peername'] = sock.getpeername()
- except socket.error:
- # UDP sockets may not have a peer name
- transport._extra['peername'] = None
-
-
+def _set_socket_extra(transport, sock):
+ transport._extra['socket'] = trsock.TransportSocket(sock)
+
+ try:
+ transport._extra['sockname'] = sock.getsockname()
+ except socket.error:
+ if transport._loop.get_debug():
+ logger.warning(
+ "getsockname() failed on %r", sock, exc_info=True)
+
+ if 'peername' not in transport._extra:
+ try:
+ transport._extra['peername'] = sock.getpeername()
+ except socket.error:
+ # UDP sockets may not have a peer name
+ transport._extra['peername'] = None
+
+
class _ProactorBasePipeTransport(transports._FlowControlMixin,
transports.BaseTransport):
"""Base class for pipe and socket transports."""
@@ -110,14 +110,14 @@ class _ProactorBasePipeTransport(transports._FlowControlMixin,
self._read_fut.cancel()
self._read_fut = None
- def __del__(self, _warn=warnings.warn):
+ def __del__(self, _warn=warnings.warn):
if self._sock is not None:
- _warn(f"unclosed transport {self!r}", ResourceWarning, source=self)
+ _warn(f"unclosed transport {self!r}", ResourceWarning, source=self)
self.close()
def _fatal_error(self, exc, message='Fatal error on pipe transport'):
try:
- if isinstance(exc, OSError):
+ if isinstance(exc, OSError):
if self._loop.get_debug():
logger.debug("%r: %s", self, message, exc_info=True)
else:
@@ -233,9 +233,9 @@ class _ProactorReadPipeTransport(_ProactorBasePipeTransport,
try:
keep_open = self._protocol.eof_received()
- except (SystemExit, KeyboardInterrupt):
- raise
- except BaseException as exc:
+ except (SystemExit, KeyboardInterrupt):
+ raise
+ except BaseException as exc:
self._fatal_error(
exc, 'Fatal error: protocol.eof_received() call failed.')
return
@@ -258,9 +258,9 @@ class _ProactorReadPipeTransport(_ProactorBasePipeTransport,
if isinstance(self._protocol, protocols.BufferedProtocol):
try:
protocols._feed_data_to_buffered_proto(self._protocol, data)
- except (SystemExit, KeyboardInterrupt):
- raise
- except BaseException as exc:
+ except (SystemExit, KeyboardInterrupt):
+ raise
+ except BaseException as exc:
self._fatal_error(exc,
'Fatal error: protocol.buffer_updated() '
'call failed.')
@@ -307,7 +307,7 @@ class _ProactorReadPipeTransport(_ProactorBasePipeTransport,
self._force_close(exc)
except OSError as exc:
self._fatal_error(exc, 'Fatal read error on pipe transport')
- except exceptions.CancelledError:
+ except exceptions.CancelledError:
if not self._closing:
raise
else:
@@ -450,134 +450,134 @@ class _ProactorWritePipeTransport(_ProactorBaseWritePipeTransport):
self.close()
-class _ProactorDatagramTransport(_ProactorBasePipeTransport):
- max_size = 256 * 1024
- def __init__(self, loop, sock, protocol, address=None,
- waiter=None, extra=None):
- self._address = address
- self._empty_waiter = None
- # We don't need to call _protocol.connection_made() since our base
- # constructor does it for us.
- super().__init__(loop, sock, protocol, waiter=waiter, extra=extra)
-
- # The base constructor sets _buffer = None, so we set it here
- self._buffer = collections.deque()
- self._loop.call_soon(self._loop_reading)
-
- def _set_extra(self, sock):
- _set_socket_extra(self, sock)
-
- def get_write_buffer_size(self):
- return sum(len(data) for data, _ in self._buffer)
-
- def abort(self):
- self._force_close(None)
-
- def sendto(self, data, addr=None):
- if not isinstance(data, (bytes, bytearray, memoryview)):
- raise TypeError('data argument must be bytes-like object (%r)',
- type(data))
-
- if not data:
- return
-
- if self._address is not None and addr not in (None, self._address):
- raise ValueError(
- f'Invalid address: must be None or {self._address}')
-
- if self._conn_lost and self._address:
- if self._conn_lost >= constants.LOG_THRESHOLD_FOR_CONNLOST_WRITES:
- logger.warning('socket.sendto() raised exception.')
- self._conn_lost += 1
- return
-
- # Ensure that what we buffer is immutable.
- self._buffer.append((bytes(data), addr))
-
- if self._write_fut is None:
- # No current write operations are active, kick one off
- self._loop_writing()
- # else: A write operation is already kicked off
-
- self._maybe_pause_protocol()
-
- def _loop_writing(self, fut=None):
- try:
- if self._conn_lost:
- return
-
- assert fut is self._write_fut
- self._write_fut = None
- if fut:
- # We are in a _loop_writing() done callback, get the result
- fut.result()
-
- if not self._buffer or (self._conn_lost and self._address):
- # The connection has been closed
- if self._closing:
- self._loop.call_soon(self._call_connection_lost, None)
- return
-
- data, addr = self._buffer.popleft()
- if self._address is not None:
- self._write_fut = self._loop._proactor.send(self._sock,
- data)
- else:
- self._write_fut = self._loop._proactor.sendto(self._sock,
- data,
- addr=addr)
- except OSError as exc:
- self._protocol.error_received(exc)
- except Exception as exc:
- self._fatal_error(exc, 'Fatal write error on datagram transport')
- else:
- self._write_fut.add_done_callback(self._loop_writing)
- self._maybe_resume_protocol()
-
- def _loop_reading(self, fut=None):
- data = None
- try:
- if self._conn_lost:
- return
-
- assert self._read_fut is fut or (self._read_fut is None and
- self._closing)
-
- self._read_fut = None
- if fut is not None:
- res = fut.result()
-
- if self._closing:
- # since close() has been called we ignore any read data
- data = None
- return
-
- if self._address is not None:
- data, addr = res, self._address
- else:
- data, addr = res
-
- if self._conn_lost:
- return
- if self._address is not None:
- self._read_fut = self._loop._proactor.recv(self._sock,
- self.max_size)
- else:
- self._read_fut = self._loop._proactor.recvfrom(self._sock,
- self.max_size)
- except OSError as exc:
- self._protocol.error_received(exc)
- except exceptions.CancelledError:
- if not self._closing:
- raise
- else:
- if self._read_fut is not None:
- self._read_fut.add_done_callback(self._loop_reading)
- finally:
- if data:
- self._protocol.datagram_received(data, addr)
-
-
+class _ProactorDatagramTransport(_ProactorBasePipeTransport):
+ max_size = 256 * 1024
+ def __init__(self, loop, sock, protocol, address=None,
+ waiter=None, extra=None):
+ self._address = address
+ self._empty_waiter = None
+ # We don't need to call _protocol.connection_made() since our base
+ # constructor does it for us.
+ super().__init__(loop, sock, protocol, waiter=waiter, extra=extra)
+
+ # The base constructor sets _buffer = None, so we set it here
+ self._buffer = collections.deque()
+ self._loop.call_soon(self._loop_reading)
+
+ def _set_extra(self, sock):
+ _set_socket_extra(self, sock)
+
+ def get_write_buffer_size(self):
+ return sum(len(data) for data, _ in self._buffer)
+
+ def abort(self):
+ self._force_close(None)
+
+ def sendto(self, data, addr=None):
+ if not isinstance(data, (bytes, bytearray, memoryview)):
+ raise TypeError('data argument must be bytes-like object (%r)',
+ type(data))
+
+ if not data:
+ return
+
+ if self._address is not None and addr not in (None, self._address):
+ raise ValueError(
+ f'Invalid address: must be None or {self._address}')
+
+ if self._conn_lost and self._address:
+ if self._conn_lost >= constants.LOG_THRESHOLD_FOR_CONNLOST_WRITES:
+ logger.warning('socket.sendto() raised exception.')
+ self._conn_lost += 1
+ return
+
+ # Ensure that what we buffer is immutable.
+ self._buffer.append((bytes(data), addr))
+
+ if self._write_fut is None:
+ # No current write operations are active, kick one off
+ self._loop_writing()
+ # else: A write operation is already kicked off
+
+ self._maybe_pause_protocol()
+
+ def _loop_writing(self, fut=None):
+ try:
+ if self._conn_lost:
+ return
+
+ assert fut is self._write_fut
+ self._write_fut = None
+ if fut:
+ # We are in a _loop_writing() done callback, get the result
+ fut.result()
+
+ if not self._buffer or (self._conn_lost and self._address):
+ # The connection has been closed
+ if self._closing:
+ self._loop.call_soon(self._call_connection_lost, None)
+ return
+
+ data, addr = self._buffer.popleft()
+ if self._address is not None:
+ self._write_fut = self._loop._proactor.send(self._sock,
+ data)
+ else:
+ self._write_fut = self._loop._proactor.sendto(self._sock,
+ data,
+ addr=addr)
+ except OSError as exc:
+ self._protocol.error_received(exc)
+ except Exception as exc:
+ self._fatal_error(exc, 'Fatal write error on datagram transport')
+ else:
+ self._write_fut.add_done_callback(self._loop_writing)
+ self._maybe_resume_protocol()
+
+ def _loop_reading(self, fut=None):
+ data = None
+ try:
+ if self._conn_lost:
+ return
+
+ assert self._read_fut is fut or (self._read_fut is None and
+ self._closing)
+
+ self._read_fut = None
+ if fut is not None:
+ res = fut.result()
+
+ if self._closing:
+ # since close() has been called we ignore any read data
+ data = None
+ return
+
+ if self._address is not None:
+ data, addr = res, self._address
+ else:
+ data, addr = res
+
+ if self._conn_lost:
+ return
+ if self._address is not None:
+ self._read_fut = self._loop._proactor.recv(self._sock,
+ self.max_size)
+ else:
+ self._read_fut = self._loop._proactor.recvfrom(self._sock,
+ self.max_size)
+ except OSError as exc:
+ self._protocol.error_received(exc)
+ except exceptions.CancelledError:
+ if not self._closing:
+ raise
+ else:
+ if self._read_fut is not None:
+ self._read_fut.add_done_callback(self._loop_reading)
+ finally:
+ if data:
+ self._protocol.datagram_received(data, addr)
+
+
class _ProactorDuplexPipeTransport(_ProactorReadPipeTransport,
_ProactorBaseWritePipeTransport,
transports.Transport):
@@ -603,7 +603,7 @@ class _ProactorSocketTransport(_ProactorReadPipeTransport,
base_events._set_nodelay(sock)
def _set_extra(self, sock):
- _set_socket_extra(self, sock)
+ _set_socket_extra(self, sock)
def can_write_eof(self):
return True
@@ -627,9 +627,9 @@ class BaseProactorEventLoop(base_events.BaseEventLoop):
self._accept_futures = {} # socket file descriptor => Future
proactor.set_loop(self)
self._make_self_pipe()
- if threading.current_thread() is threading.main_thread():
- # wakeup fd can only be installed to a file descriptor from the main thread
- signal.set_wakeup_fd(self._csock.fileno())
+ if threading.current_thread() is threading.main_thread():
+ # wakeup fd can only be installed to a file descriptor from the main thread
+ signal.set_wakeup_fd(self._csock.fileno())
def _make_socket_transport(self, sock, protocol, waiter=None,
extra=None, server=None):
@@ -649,11 +649,11 @@ class BaseProactorEventLoop(base_events.BaseEventLoop):
extra=extra, server=server)
return ssl_protocol._app_transport
- def _make_datagram_transport(self, sock, protocol,
- address=None, waiter=None, extra=None):
- return _ProactorDatagramTransport(self, sock, protocol, address,
- waiter, extra)
-
+ def _make_datagram_transport(self, sock, protocol,
+ address=None, waiter=None, extra=None):
+ return _ProactorDatagramTransport(self, sock, protocol, address,
+ waiter, extra)
+
def _make_duplex_pipe_transport(self, sock, protocol, waiter=None,
extra=None):
return _ProactorDuplexPipeTransport(self,
@@ -675,8 +675,8 @@ class BaseProactorEventLoop(base_events.BaseEventLoop):
if self.is_closed():
return
- if threading.current_thread() is threading.main_thread():
- signal.set_wakeup_fd(-1)
+ if threading.current_thread() is threading.main_thread():
+ signal.set_wakeup_fd(-1)
# Call these methods before closing the event loop (before calling
# BaseEventLoop.close), because they can schedule callbacks with
# call_soon(), which is forbidden when the event loop is closed.
@@ -708,11 +708,11 @@ class BaseProactorEventLoop(base_events.BaseEventLoop):
try:
fileno = file.fileno()
except (AttributeError, io.UnsupportedOperation) as err:
- raise exceptions.SendfileNotAvailableError("not a regular file")
+ raise exceptions.SendfileNotAvailableError("not a regular file")
try:
fsize = os.fstat(fileno).st_size
- except OSError:
- raise exceptions.SendfileNotAvailableError("not a regular file")
+ except OSError:
+ raise exceptions.SendfileNotAvailableError("not a regular file")
blocksize = count if count else fsize
if not blocksize:
return 0 # empty file
@@ -766,21 +766,21 @@ class BaseProactorEventLoop(base_events.BaseEventLoop):
try:
if f is not None:
f.result() # may raise
- if self._self_reading_future is not f:
- # When we scheduled this Future, we assigned it to
- # _self_reading_future. If it's not there now, something has
- # tried to cancel the loop while this callback was still in the
- # queue (see windows_events.ProactorEventLoop.run_forever). In
- # that case stop here instead of continuing to schedule a new
- # iteration.
- return
+ if self._self_reading_future is not f:
+ # When we scheduled this Future, we assigned it to
+ # _self_reading_future. If it's not there now, something has
+ # tried to cancel the loop while this callback was still in the
+ # queue (see windows_events.ProactorEventLoop.run_forever). In
+ # that case stop here instead of continuing to schedule a new
+ # iteration.
+ return
f = self._proactor.recv(self._ssock, 4096)
- except exceptions.CancelledError:
+ except exceptions.CancelledError:
# _close_self_pipe() has been called, stop waiting for data
return
- except (SystemExit, KeyboardInterrupt):
- raise
- except BaseException as exc:
+ except (SystemExit, KeyboardInterrupt):
+ raise
+ except BaseException as exc:
self.call_exception_handler({
'message': 'Error on reading from the event loop self pipe',
'exception': exc,
@@ -791,17 +791,17 @@ class BaseProactorEventLoop(base_events.BaseEventLoop):
f.add_done_callback(self._loop_self_reading)
def _write_to_self(self):
- # This may be called from a different thread, possibly after
- # _close_self_pipe() has been called or even while it is
- # running. Guard for self._csock being None or closed. When
- # a socket is closed, send() raises OSError (with errno set to
- # EBADF, but let's not rely on the exact error code).
- csock = self._csock
- if csock is None:
- return
-
+ # This may be called from a different thread, possibly after
+ # _close_self_pipe() has been called or even while it is
+ # running. Guard for self._csock being None or closed. When
+ # a socket is closed, send() raises OSError (with errno set to
+ # EBADF, but let's not rely on the exact error code).
+ csock = self._csock
+ if csock is None:
+ return
+
try:
- csock.send(b'\0')
+ csock.send(b'\0')
except OSError:
if self._debug:
logger.debug("Fail to write a null byte into the "
@@ -837,13 +837,13 @@ class BaseProactorEventLoop(base_events.BaseEventLoop):
self.call_exception_handler({
'message': 'Accept failed on a socket',
'exception': exc,
- 'socket': trsock.TransportSocket(sock),
+ 'socket': trsock.TransportSocket(sock),
})
sock.close()
elif self._debug:
logger.debug("Accept failed on socket %r",
sock, exc_info=True)
- except exceptions.CancelledError:
+ except exceptions.CancelledError:
sock.close()
else:
self._accept_futures[sock.fileno()] = f
diff --git a/contrib/tools/python3/src/Lib/asyncio/protocols.py b/contrib/tools/python3/src/Lib/asyncio/protocols.py
index 69fa43e8b6..d09f51f4f2 100644
--- a/contrib/tools/python3/src/Lib/asyncio/protocols.py
+++ b/contrib/tools/python3/src/Lib/asyncio/protocols.py
@@ -16,8 +16,8 @@ class BaseProtocol:
write-only transport like write pipe
"""
- __slots__ = ()
-
+ __slots__ = ()
+
def connection_made(self, transport):
"""Called when a connection is made.
@@ -89,8 +89,8 @@ class Protocol(BaseProtocol):
* CL: connection_lost()
"""
- __slots__ = ()
-
+ __slots__ = ()
+
def data_received(self, data):
"""Called when some data is received.
@@ -134,8 +134,8 @@ class BufferedProtocol(BaseProtocol):
* CL: connection_lost()
"""
- __slots__ = ()
-
+ __slots__ = ()
+
def get_buffer(self, sizehint):
"""Called to allocate a new receive buffer.
@@ -166,8 +166,8 @@ class BufferedProtocol(BaseProtocol):
class DatagramProtocol(BaseProtocol):
"""Interface for datagram protocol."""
- __slots__ = ()
-
+ __slots__ = ()
+
def datagram_received(self, data, addr):
"""Called when some datagram is received."""
@@ -181,8 +181,8 @@ class DatagramProtocol(BaseProtocol):
class SubprocessProtocol(BaseProtocol):
"""Interface for protocol for subprocess calls."""
- __slots__ = ()
-
+ __slots__ = ()
+
def pipe_data_received(self, fd, data):
"""Called when the subprocess writes data into stdout/stderr pipe.
diff --git a/contrib/tools/python3/src/Lib/asyncio/queues.py b/contrib/tools/python3/src/Lib/asyncio/queues.py
index cd3f7c6a56..03ca592290 100644
--- a/contrib/tools/python3/src/Lib/asyncio/queues.py
+++ b/contrib/tools/python3/src/Lib/asyncio/queues.py
@@ -2,7 +2,7 @@ __all__ = ('Queue', 'PriorityQueue', 'LifoQueue', 'QueueFull', 'QueueEmpty')
import collections
import heapq
-import warnings
+import warnings
from . import events
from . import locks
@@ -35,9 +35,9 @@ class Queue:
self._loop = events.get_event_loop()
else:
self._loop = loop
- warnings.warn("The loop argument is deprecated since Python 3.8, "
- "and scheduled for removal in Python 3.10.",
- DeprecationWarning, stacklevel=2)
+ warnings.warn("The loop argument is deprecated since Python 3.8, "
+ "and scheduled for removal in Python 3.10.",
+ DeprecationWarning, stacklevel=2)
self._maxsize = maxsize
# Futures.
@@ -45,7 +45,7 @@ class Queue:
# Futures.
self._putters = collections.deque()
self._unfinished_tasks = 0
- self._finished = locks.Event(loop=loop)
+ self._finished = locks.Event(loop=loop)
self._finished.set()
self._init(maxsize)
@@ -76,9 +76,9 @@ class Queue:
def __str__(self):
return f'<{type(self).__name__} {self._format()}>'
- def __class_getitem__(cls, type):
- return cls
-
+ def __class_getitem__(cls, type):
+ return cls
+
def _format(self):
result = f'maxsize={self._maxsize!r}'
if getattr(self, '_queue', None):
diff --git a/contrib/tools/python3/src/Lib/asyncio/runners.py b/contrib/tools/python3/src/Lib/asyncio/runners.py
index 6920acba38..d6cdeef9a6 100644
--- a/contrib/tools/python3/src/Lib/asyncio/runners.py
+++ b/contrib/tools/python3/src/Lib/asyncio/runners.py
@@ -5,8 +5,8 @@ from . import events
from . import tasks
-def run(main, *, debug=None):
- """Execute the coroutine and return the result.
+def run(main, *, debug=None):
+ """Execute the coroutine and return the result.
This function runs the passed coroutine, taking care of
managing the asyncio event loop and finalizing asynchronous
@@ -39,14 +39,14 @@ def run(main, *, debug=None):
loop = events.new_event_loop()
try:
events.set_event_loop(loop)
- if debug is not None:
- loop.set_debug(debug)
+ if debug is not None:
+ loop.set_debug(debug)
return loop.run_until_complete(main)
finally:
try:
_cancel_all_tasks(loop)
loop.run_until_complete(loop.shutdown_asyncgens())
- loop.run_until_complete(loop.shutdown_default_executor())
+ loop.run_until_complete(loop.shutdown_default_executor())
finally:
events.set_event_loop(None)
loop.close()
@@ -61,7 +61,7 @@ def _cancel_all_tasks(loop):
task.cancel()
loop.run_until_complete(
- tasks._gather(*to_cancel, loop=loop, return_exceptions=True))
+ tasks._gather(*to_cancel, loop=loop, return_exceptions=True))
for task in to_cancel:
if task.cancelled():
diff --git a/contrib/tools/python3/src/Lib/asyncio/selector_events.py b/contrib/tools/python3/src/Lib/asyncio/selector_events.py
index 59cb6b1bab..2ecf392b8b 100644
--- a/contrib/tools/python3/src/Lib/asyncio/selector_events.py
+++ b/contrib/tools/python3/src/Lib/asyncio/selector_events.py
@@ -25,7 +25,7 @@ from . import futures
from . import protocols
from . import sslproto
from . import transports
-from . import trsock
+from . import trsock
from .log import logger
@@ -40,11 +40,11 @@ def _test_selector_event(selector, fd, event):
return bool(key.events & event)
-def _check_ssl_socket(sock):
- if ssl is not None and isinstance(sock, ssl.SSLSocket):
- raise TypeError("Socket cannot be of type SSLSocket")
-
-
+def _check_ssl_socket(sock):
+ if ssl is not None and isinstance(sock, ssl.SSLSocket):
+ raise TypeError("Socket cannot be of type SSLSocket")
+
+
class BaseSelectorEventLoop(base_events.BaseEventLoop):
"""Selector event loop.
@@ -133,17 +133,17 @@ class BaseSelectorEventLoop(base_events.BaseEventLoop):
# a socket is closed, send() raises OSError (with errno set to
# EBADF, but let's not rely on the exact error code).
csock = self._csock
- if csock is None:
- return
-
- try:
- csock.send(b'\0')
- except OSError:
- if self._debug:
- logger.debug("Fail to write a null byte into the "
- "self-pipe socket",
- exc_info=True)
-
+ if csock is None:
+ return
+
+ try:
+ csock.send(b'\0')
+ except OSError:
+ if self._debug:
+ logger.debug("Fail to write a null byte into the "
+ "self-pipe socket",
+ exc_info=True)
+
def _start_serving(self, protocol_factory, sock,
sslcontext=None, server=None, backlog=100,
ssl_handshake_timeout=constants.SSL_HANDSHAKE_TIMEOUT):
@@ -179,7 +179,7 @@ class BaseSelectorEventLoop(base_events.BaseEventLoop):
self.call_exception_handler({
'message': 'socket.accept() out of system resource',
'exception': exc,
- 'socket': trsock.TransportSocket(sock),
+ 'socket': trsock.TransportSocket(sock),
})
self._remove_reader(sock.fileno())
self.call_later(constants.ACCEPT_RETRY_DELAY,
@@ -216,14 +216,14 @@ class BaseSelectorEventLoop(base_events.BaseEventLoop):
try:
await waiter
- except BaseException:
+ except BaseException:
transport.close()
raise
- # It's now up to the protocol to handle the connection.
+ # It's now up to the protocol to handle the connection.
- except (SystemExit, KeyboardInterrupt):
- raise
- except BaseException as exc:
+ except (SystemExit, KeyboardInterrupt):
+ raise
+ except BaseException as exc:
if self._debug:
context = {
'message':
@@ -268,7 +268,7 @@ class BaseSelectorEventLoop(base_events.BaseEventLoop):
(handle, writer))
if reader is not None:
reader.cancel()
- return handle
+ return handle
def _remove_reader(self, fd):
if self.is_closed():
@@ -305,7 +305,7 @@ class BaseSelectorEventLoop(base_events.BaseEventLoop):
(reader, handle))
if writer is not None:
writer.cancel()
- return handle
+ return handle
def _remove_writer(self, fd):
"""Remove a writer callback."""
@@ -333,7 +333,7 @@ class BaseSelectorEventLoop(base_events.BaseEventLoop):
def add_reader(self, fd, callback, *args):
"""Add a reader callback."""
self._ensure_fd_no_transport(fd)
- self._add_reader(fd, callback, *args)
+ self._add_reader(fd, callback, *args)
def remove_reader(self, fd):
"""Remove a reader callback."""
@@ -343,7 +343,7 @@ class BaseSelectorEventLoop(base_events.BaseEventLoop):
def add_writer(self, fd, callback, *args):
"""Add a writer callback.."""
self._ensure_fd_no_transport(fd)
- self._add_writer(fd, callback, *args)
+ self._add_writer(fd, callback, *args)
def remove_writer(self, fd):
"""Remove a writer callback."""
@@ -357,37 +357,37 @@ class BaseSelectorEventLoop(base_events.BaseEventLoop):
The maximum amount of data to be received at once is specified by
nbytes.
"""
- _check_ssl_socket(sock)
+ _check_ssl_socket(sock)
if self._debug and sock.gettimeout() != 0:
raise ValueError("the socket must be non-blocking")
- try:
- return sock.recv(n)
- except (BlockingIOError, InterruptedError):
- pass
+ try:
+ return sock.recv(n)
+ except (BlockingIOError, InterruptedError):
+ pass
fut = self.create_future()
- fd = sock.fileno()
- self._ensure_fd_no_transport(fd)
- handle = self._add_reader(fd, self._sock_recv, fut, sock, n)
- fut.add_done_callback(
- functools.partial(self._sock_read_done, fd, handle=handle))
+ fd = sock.fileno()
+ self._ensure_fd_no_transport(fd)
+ handle = self._add_reader(fd, self._sock_recv, fut, sock, n)
+ fut.add_done_callback(
+ functools.partial(self._sock_read_done, fd, handle=handle))
return await fut
- def _sock_read_done(self, fd, fut, handle=None):
- if handle is None or not handle.cancelled():
- self.remove_reader(fd)
-
- def _sock_recv(self, fut, sock, n):
+ def _sock_read_done(self, fd, fut, handle=None):
+ if handle is None or not handle.cancelled():
+ self.remove_reader(fd)
+
+ def _sock_recv(self, fut, sock, n):
# _sock_recv() can add itself as an I/O callback if the operation can't
# be done immediately. Don't use it directly, call sock_recv().
- if fut.done():
+ if fut.done():
return
try:
data = sock.recv(n)
except (BlockingIOError, InterruptedError):
- return # try again next time
- except (SystemExit, KeyboardInterrupt):
- raise
- except BaseException as exc:
+ return # try again next time
+ except (SystemExit, KeyboardInterrupt):
+ raise
+ except BaseException as exc:
fut.set_exception(exc)
else:
fut.set_result(data)
@@ -398,34 +398,34 @@ class BaseSelectorEventLoop(base_events.BaseEventLoop):
The received data is written into *buf* (a writable buffer).
The return value is the number of bytes written.
"""
- _check_ssl_socket(sock)
+ _check_ssl_socket(sock)
if self._debug and sock.gettimeout() != 0:
raise ValueError("the socket must be non-blocking")
- try:
- return sock.recv_into(buf)
- except (BlockingIOError, InterruptedError):
- pass
+ try:
+ return sock.recv_into(buf)
+ except (BlockingIOError, InterruptedError):
+ pass
fut = self.create_future()
- fd = sock.fileno()
- self._ensure_fd_no_transport(fd)
- handle = self._add_reader(fd, self._sock_recv_into, fut, sock, buf)
- fut.add_done_callback(
- functools.partial(self._sock_read_done, fd, handle=handle))
+ fd = sock.fileno()
+ self._ensure_fd_no_transport(fd)
+ handle = self._add_reader(fd, self._sock_recv_into, fut, sock, buf)
+ fut.add_done_callback(
+ functools.partial(self._sock_read_done, fd, handle=handle))
return await fut
- def _sock_recv_into(self, fut, sock, buf):
+ def _sock_recv_into(self, fut, sock, buf):
# _sock_recv_into() can add itself as an I/O callback if the operation
# can't be done immediately. Don't use it directly, call
# sock_recv_into().
- if fut.done():
+ if fut.done():
return
try:
nbytes = sock.recv_into(buf)
except (BlockingIOError, InterruptedError):
- return # try again next time
- except (SystemExit, KeyboardInterrupt):
- raise
- except BaseException as exc:
+ return # try again next time
+ except (SystemExit, KeyboardInterrupt):
+ raise
+ except BaseException as exc:
fut.set_exception(exc)
else:
fut.set_result(nbytes)
@@ -439,56 +439,56 @@ class BaseSelectorEventLoop(base_events.BaseEventLoop):
raised, and there is no way to determine how much data, if any, was
successfully processed by the receiving end of the connection.
"""
- _check_ssl_socket(sock)
+ _check_ssl_socket(sock)
if self._debug and sock.gettimeout() != 0:
raise ValueError("the socket must be non-blocking")
- try:
- n = sock.send(data)
- except (BlockingIOError, InterruptedError):
- n = 0
-
- if n == len(data):
- # all data sent
- return
-
+ try:
+ n = sock.send(data)
+ except (BlockingIOError, InterruptedError):
+ n = 0
+
+ if n == len(data):
+ # all data sent
+ return
+
fut = self.create_future()
- fd = sock.fileno()
- self._ensure_fd_no_transport(fd)
- # use a trick with a list in closure to store a mutable state
- handle = self._add_writer(fd, self._sock_sendall, fut, sock,
- memoryview(data), [n])
- fut.add_done_callback(
- functools.partial(self._sock_write_done, fd, handle=handle))
+ fd = sock.fileno()
+ self._ensure_fd_no_transport(fd)
+ # use a trick with a list in closure to store a mutable state
+ handle = self._add_writer(fd, self._sock_sendall, fut, sock,
+ memoryview(data), [n])
+ fut.add_done_callback(
+ functools.partial(self._sock_write_done, fd, handle=handle))
return await fut
- def _sock_sendall(self, fut, sock, view, pos):
- if fut.done():
- # Future cancellation can be scheduled on previous loop iteration
+ def _sock_sendall(self, fut, sock, view, pos):
+ if fut.done():
+ # Future cancellation can be scheduled on previous loop iteration
return
- start = pos[0]
+ start = pos[0]
try:
- n = sock.send(view[start:])
+ n = sock.send(view[start:])
except (BlockingIOError, InterruptedError):
- return
- except (SystemExit, KeyboardInterrupt):
- raise
- except BaseException as exc:
+ return
+ except (SystemExit, KeyboardInterrupt):
+ raise
+ except BaseException as exc:
fut.set_exception(exc)
return
- start += n
-
- if start == len(view):
+ start += n
+
+ if start == len(view):
fut.set_result(None)
else:
- pos[0] = start
+ pos[0] = start
async def sock_connect(self, sock, address):
"""Connect to a remote socket at address.
This method is a coroutine.
"""
- _check_ssl_socket(sock)
+ _check_ssl_socket(sock)
if self._debug and sock.gettimeout() != 0:
raise ValueError("the socket must be non-blocking")
@@ -510,24 +510,24 @@ class BaseSelectorEventLoop(base_events.BaseEventLoop):
# connection runs in background. We have to wait until the socket
# becomes writable to be notified when the connection succeed or
# fails.
- self._ensure_fd_no_transport(fd)
- handle = self._add_writer(
- fd, self._sock_connect_cb, fut, sock, address)
+ self._ensure_fd_no_transport(fd)
+ handle = self._add_writer(
+ fd, self._sock_connect_cb, fut, sock, address)
fut.add_done_callback(
- functools.partial(self._sock_write_done, fd, handle=handle))
- except (SystemExit, KeyboardInterrupt):
- raise
- except BaseException as exc:
+ functools.partial(self._sock_write_done, fd, handle=handle))
+ except (SystemExit, KeyboardInterrupt):
+ raise
+ except BaseException as exc:
fut.set_exception(exc)
else:
fut.set_result(None)
- def _sock_write_done(self, fd, fut, handle=None):
- if handle is None or not handle.cancelled():
- self.remove_writer(fd)
+ def _sock_write_done(self, fd, fut, handle=None):
+ if handle is None or not handle.cancelled():
+ self.remove_writer(fd)
def _sock_connect_cb(self, fut, sock, address):
- if fut.done():
+ if fut.done():
return
try:
@@ -538,9 +538,9 @@ class BaseSelectorEventLoop(base_events.BaseEventLoop):
except (BlockingIOError, InterruptedError):
# socket is still registered, the callback will be retried later
pass
- except (SystemExit, KeyboardInterrupt):
- raise
- except BaseException as exc:
+ except (SystemExit, KeyboardInterrupt):
+ raise
+ except BaseException as exc:
fut.set_exception(exc)
else:
fut.set_result(None)
@@ -553,26 +553,26 @@ class BaseSelectorEventLoop(base_events.BaseEventLoop):
object usable to send and receive data on the connection, and address
is the address bound to the socket on the other end of the connection.
"""
- _check_ssl_socket(sock)
+ _check_ssl_socket(sock)
if self._debug and sock.gettimeout() != 0:
raise ValueError("the socket must be non-blocking")
fut = self.create_future()
- self._sock_accept(fut, sock)
+ self._sock_accept(fut, sock)
return await fut
- def _sock_accept(self, fut, sock):
+ def _sock_accept(self, fut, sock):
fd = sock.fileno()
try:
conn, address = sock.accept()
conn.setblocking(False)
except (BlockingIOError, InterruptedError):
- self._ensure_fd_no_transport(fd)
- handle = self._add_reader(fd, self._sock_accept, fut, sock)
- fut.add_done_callback(
- functools.partial(self._sock_read_done, fd, handle=handle))
- except (SystemExit, KeyboardInterrupt):
- raise
- except BaseException as exc:
+ self._ensure_fd_no_transport(fd)
+ handle = self._add_reader(fd, self._sock_accept, fut, sock)
+ fut.add_done_callback(
+ functools.partial(self._sock_read_done, fd, handle=handle))
+ except (SystemExit, KeyboardInterrupt):
+ raise
+ except BaseException as exc:
fut.set_exception(exc)
else:
fut.set_result((conn, address))
@@ -624,11 +624,11 @@ class _SelectorTransport(transports._FlowControlMixin,
def __init__(self, loop, sock, protocol, extra=None, server=None):
super().__init__(extra, loop)
- self._extra['socket'] = trsock.TransportSocket(sock)
- try:
- self._extra['sockname'] = sock.getsockname()
- except OSError:
- self._extra['sockname'] = None
+ self._extra['socket'] = trsock.TransportSocket(sock)
+ try:
+ self._extra['sockname'] = sock.getsockname()
+ except OSError:
+ self._extra['sockname'] = None
if 'peername' not in self._extra:
try:
self._extra['peername'] = sock.getpeername()
@@ -699,14 +699,14 @@ class _SelectorTransport(transports._FlowControlMixin,
self._loop._remove_writer(self._sock_fd)
self._loop.call_soon(self._call_connection_lost, None)
- def __del__(self, _warn=warnings.warn):
+ def __del__(self, _warn=warnings.warn):
if self._sock is not None:
- _warn(f"unclosed transport {self!r}", ResourceWarning, source=self)
+ _warn(f"unclosed transport {self!r}", ResourceWarning, source=self)
self._sock.close()
def _fatal_error(self, exc, message='Fatal error on transport'):
# Should be called from exception handler only.
- if isinstance(exc, OSError):
+ if isinstance(exc, OSError):
if self._loop.get_debug():
logger.debug("%r: %s", self, message, exc_info=True)
else:
@@ -820,9 +820,9 @@ class _SelectorSocketTransport(_SelectorTransport):
buf = self._protocol.get_buffer(-1)
if not len(buf):
raise RuntimeError('get_buffer() returned an empty buffer')
- except (SystemExit, KeyboardInterrupt):
- raise
- except BaseException as exc:
+ except (SystemExit, KeyboardInterrupt):
+ raise
+ except BaseException as exc:
self._fatal_error(
exc, 'Fatal error: protocol.get_buffer() call failed.')
return
@@ -831,9 +831,9 @@ class _SelectorSocketTransport(_SelectorTransport):
nbytes = self._sock.recv_into(buf)
except (BlockingIOError, InterruptedError):
return
- except (SystemExit, KeyboardInterrupt):
- raise
- except BaseException as exc:
+ except (SystemExit, KeyboardInterrupt):
+ raise
+ except BaseException as exc:
self._fatal_error(exc, 'Fatal read error on socket transport')
return
@@ -843,9 +843,9 @@ class _SelectorSocketTransport(_SelectorTransport):
try:
self._protocol.buffer_updated(nbytes)
- except (SystemExit, KeyboardInterrupt):
- raise
- except BaseException as exc:
+ except (SystemExit, KeyboardInterrupt):
+ raise
+ except BaseException as exc:
self._fatal_error(
exc, 'Fatal error: protocol.buffer_updated() call failed.')
@@ -856,9 +856,9 @@ class _SelectorSocketTransport(_SelectorTransport):
data = self._sock.recv(self.max_size)
except (BlockingIOError, InterruptedError):
return
- except (SystemExit, KeyboardInterrupt):
- raise
- except BaseException as exc:
+ except (SystemExit, KeyboardInterrupt):
+ raise
+ except BaseException as exc:
self._fatal_error(exc, 'Fatal read error on socket transport')
return
@@ -868,9 +868,9 @@ class _SelectorSocketTransport(_SelectorTransport):
try:
self._protocol.data_received(data)
- except (SystemExit, KeyboardInterrupt):
- raise
- except BaseException as exc:
+ except (SystemExit, KeyboardInterrupt):
+ raise
+ except BaseException as exc:
self._fatal_error(
exc, 'Fatal error: protocol.data_received() call failed.')
@@ -880,9 +880,9 @@ class _SelectorSocketTransport(_SelectorTransport):
try:
keep_open = self._protocol.eof_received()
- except (SystemExit, KeyboardInterrupt):
- raise
- except BaseException as exc:
+ except (SystemExit, KeyboardInterrupt):
+ raise
+ except BaseException as exc:
self._fatal_error(
exc, 'Fatal error: protocol.eof_received() call failed.')
return
@@ -918,9 +918,9 @@ class _SelectorSocketTransport(_SelectorTransport):
n = self._sock.send(data)
except (BlockingIOError, InterruptedError):
pass
- except (SystemExit, KeyboardInterrupt):
- raise
- except BaseException as exc:
+ except (SystemExit, KeyboardInterrupt):
+ raise
+ except BaseException as exc:
self._fatal_error(exc, 'Fatal write error on socket transport')
return
else:
@@ -943,9 +943,9 @@ class _SelectorSocketTransport(_SelectorTransport):
n = self._sock.send(self._buffer)
except (BlockingIOError, InterruptedError):
pass
- except (SystemExit, KeyboardInterrupt):
- raise
- except BaseException as exc:
+ except (SystemExit, KeyboardInterrupt):
+ raise
+ except BaseException as exc:
self._loop._remove_writer(self._sock_fd)
self._buffer.clear()
self._fatal_error(exc, 'Fatal write error on socket transport')
@@ -1021,9 +1021,9 @@ class _SelectorDatagramTransport(_SelectorTransport):
pass
except OSError as exc:
self._protocol.error_received(exc)
- except (SystemExit, KeyboardInterrupt):
- raise
- except BaseException as exc:
+ except (SystemExit, KeyboardInterrupt):
+ raise
+ except BaseException as exc:
self._fatal_error(exc, 'Fatal read error on datagram transport')
else:
self._protocol.datagram_received(data, addr)
@@ -1035,11 +1035,11 @@ class _SelectorDatagramTransport(_SelectorTransport):
if not data:
return
- if self._address:
- if addr not in (None, self._address):
- raise ValueError(
- f'Invalid address: must be None or {self._address}')
- addr = self._address
+ if self._address:
+ if addr not in (None, self._address):
+ raise ValueError(
+ f'Invalid address: must be None or {self._address}')
+ addr = self._address
if self._conn_lost and self._address:
if self._conn_lost >= constants.LOG_THRESHOLD_FOR_CONNLOST_WRITES:
@@ -1050,7 +1050,7 @@ class _SelectorDatagramTransport(_SelectorTransport):
if not self._buffer:
# Attempt to send it right away first.
try:
- if self._extra['peername']:
+ if self._extra['peername']:
self._sock.send(data)
else:
self._sock.sendto(data, addr)
@@ -1060,9 +1060,9 @@ class _SelectorDatagramTransport(_SelectorTransport):
except OSError as exc:
self._protocol.error_received(exc)
return
- except (SystemExit, KeyboardInterrupt):
- raise
- except BaseException as exc:
+ except (SystemExit, KeyboardInterrupt):
+ raise
+ except BaseException as exc:
self._fatal_error(
exc, 'Fatal write error on datagram transport')
return
@@ -1075,7 +1075,7 @@ class _SelectorDatagramTransport(_SelectorTransport):
while self._buffer:
data, addr = self._buffer.popleft()
try:
- if self._extra['peername']:
+ if self._extra['peername']:
self._sock.send(data)
else:
self._sock.sendto(data, addr)
@@ -1085,9 +1085,9 @@ class _SelectorDatagramTransport(_SelectorTransport):
except OSError as exc:
self._protocol.error_received(exc)
return
- except (SystemExit, KeyboardInterrupt):
- raise
- except BaseException as exc:
+ except (SystemExit, KeyboardInterrupt):
+ raise
+ except BaseException as exc:
self._fatal_error(
exc, 'Fatal write error on datagram transport')
return
diff --git a/contrib/tools/python3/src/Lib/asyncio/sslproto.py b/contrib/tools/python3/src/Lib/asyncio/sslproto.py
index cad25b2653..6731363a7a 100644
--- a/contrib/tools/python3/src/Lib/asyncio/sslproto.py
+++ b/contrib/tools/python3/src/Lib/asyncio/sslproto.py
@@ -315,9 +315,9 @@ class _SSLProtocolTransport(transports._FlowControlMixin,
self._closed = True
self._ssl_protocol._start_shutdown()
- def __del__(self, _warn=warnings.warn):
+ def __del__(self, _warn=warnings.warn):
if not self._closed:
- _warn(f"unclosed transport {self!r}", ResourceWarning, source=self)
+ _warn(f"unclosed transport {self!r}", ResourceWarning, source=self)
self.close()
def is_reading(self):
@@ -497,11 +497,11 @@ class SSLProtocol(protocols.Protocol):
self._app_transport._closed = True
self._transport = None
self._app_transport = None
- if getattr(self, '_handshake_timeout_handle', None):
- self._handshake_timeout_handle.cancel()
+ if getattr(self, '_handshake_timeout_handle', None):
+ self._handshake_timeout_handle.cancel()
self._wakeup_waiter(exc)
- self._app_protocol = None
- self._sslpipe = None
+ self._app_protocol = None
+ self._sslpipe = None
def pause_writing(self):
"""Called when the low-level transport's buffer goes over
@@ -526,9 +526,9 @@ class SSLProtocol(protocols.Protocol):
try:
ssldata, appdata = self._sslpipe.feed_ssldata(data)
- except (SystemExit, KeyboardInterrupt):
- raise
- except BaseException as e:
+ except (SystemExit, KeyboardInterrupt):
+ raise
+ except BaseException as e:
self._fatal_error(e, 'SSL error in data received')
return
@@ -543,9 +543,9 @@ class SSLProtocol(protocols.Protocol):
self._app_protocol, chunk)
else:
self._app_protocol.data_received(chunk)
- except (SystemExit, KeyboardInterrupt):
- raise
- except BaseException as ex:
+ except (SystemExit, KeyboardInterrupt):
+ raise
+ except BaseException as ex:
self._fatal_error(
ex, 'application protocol failed to receive SSL data')
return
@@ -631,9 +631,9 @@ class SSLProtocol(protocols.Protocol):
raise handshake_exc
peercert = sslobj.getpeercert()
- except (SystemExit, KeyboardInterrupt):
- raise
- except BaseException as exc:
+ except (SystemExit, KeyboardInterrupt):
+ raise
+ except BaseException as exc:
if isinstance(exc, ssl.CertificateError):
msg = 'SSL handshake failed on verifying the certificate'
else:
@@ -696,9 +696,9 @@ class SSLProtocol(protocols.Protocol):
# delete it and reduce the outstanding buffer size.
del self._write_backlog[0]
self._write_buffer_size -= len(data)
- except (SystemExit, KeyboardInterrupt):
- raise
- except BaseException as exc:
+ except (SystemExit, KeyboardInterrupt):
+ raise
+ except BaseException as exc:
if self._in_handshake:
# Exceptions will be re-raised in _on_handshake_complete.
self._on_handshake_complete(exc)
@@ -706,7 +706,7 @@ class SSLProtocol(protocols.Protocol):
self._fatal_error(exc, 'Fatal error on SSL transport')
def _fatal_error(self, exc, message='Fatal error on transport'):
- if isinstance(exc, OSError):
+ if isinstance(exc, OSError):
if self._loop.get_debug():
logger.debug("%r: %s", self, message, exc_info=True)
else:
diff --git a/contrib/tools/python3/src/Lib/asyncio/staggered.py b/contrib/tools/python3/src/Lib/asyncio/staggered.py
index 451a53a16f..9627efdf11 100644
--- a/contrib/tools/python3/src/Lib/asyncio/staggered.py
+++ b/contrib/tools/python3/src/Lib/asyncio/staggered.py
@@ -1,149 +1,149 @@
-"""Support for running coroutines in parallel with staggered start times."""
-
-__all__ = 'staggered_race',
-
-import contextlib
-import typing
-
-from . import events
-from . import exceptions as exceptions_mod
-from . import locks
-from . import tasks
-
-
-async def staggered_race(
- coro_fns: typing.Iterable[typing.Callable[[], typing.Awaitable]],
- delay: typing.Optional[float],
- *,
- loop: events.AbstractEventLoop = None,
-) -> typing.Tuple[
- typing.Any,
- typing.Optional[int],
- typing.List[typing.Optional[Exception]]
-]:
- """Run coroutines with staggered start times and take the first to finish.
-
- This method takes an iterable of coroutine functions. The first one is
- started immediately. From then on, whenever the immediately preceding one
- fails (raises an exception), or when *delay* seconds has passed, the next
- coroutine is started. This continues until one of the coroutines complete
- successfully, in which case all others are cancelled, or until all
- coroutines fail.
-
- The coroutines provided should be well-behaved in the following way:
-
- * They should only ``return`` if completed successfully.
-
- * They should always raise an exception if they did not complete
- successfully. In particular, if they handle cancellation, they should
- probably reraise, like this::
-
- try:
- # do work
- except asyncio.CancelledError:
- # undo partially completed work
- raise
-
- Args:
- coro_fns: an iterable of coroutine functions, i.e. callables that
- return a coroutine object when called. Use ``functools.partial`` or
- lambdas to pass arguments.
-
- delay: amount of time, in seconds, between starting coroutines. If
- ``None``, the coroutines will run sequentially.
-
- loop: the event loop to use.
-
- Returns:
- tuple *(winner_result, winner_index, exceptions)* where
-
- - *winner_result*: the result of the winning coroutine, or ``None``
- if no coroutines won.
-
- - *winner_index*: the index of the winning coroutine in
- ``coro_fns``, or ``None`` if no coroutines won. If the winning
- coroutine may return None on success, *winner_index* can be used
- to definitively determine whether any coroutine won.
-
- - *exceptions*: list of exceptions returned by the coroutines.
- ``len(exceptions)`` is equal to the number of coroutines actually
- started, and the order is the same as in ``coro_fns``. The winning
- coroutine's entry is ``None``.
-
- """
- # TODO: when we have aiter() and anext(), allow async iterables in coro_fns.
- loop = loop or events.get_running_loop()
- enum_coro_fns = enumerate(coro_fns)
- winner_result = None
- winner_index = None
- exceptions = []
- running_tasks = []
-
- async def run_one_coro(
- previous_failed: typing.Optional[locks.Event]) -> None:
- # Wait for the previous task to finish, or for delay seconds
- if previous_failed is not None:
- with contextlib.suppress(exceptions_mod.TimeoutError):
- # Use asyncio.wait_for() instead of asyncio.wait() here, so
- # that if we get cancelled at this point, Event.wait() is also
- # cancelled, otherwise there will be a "Task destroyed but it is
- # pending" later.
- await tasks.wait_for(previous_failed.wait(), delay)
- # Get the next coroutine to run
- try:
- this_index, coro_fn = next(enum_coro_fns)
- except StopIteration:
- return
- # Start task that will run the next coroutine
- this_failed = locks.Event()
- next_task = loop.create_task(run_one_coro(this_failed))
- running_tasks.append(next_task)
- assert len(running_tasks) == this_index + 2
- # Prepare place to put this coroutine's exceptions if not won
- exceptions.append(None)
- assert len(exceptions) == this_index + 1
-
- try:
- result = await coro_fn()
- except (SystemExit, KeyboardInterrupt):
- raise
- except BaseException as e:
- exceptions[this_index] = e
- this_failed.set() # Kickstart the next coroutine
- else:
- # Store winner's results
- nonlocal winner_index, winner_result
- assert winner_index is None
- winner_index = this_index
- winner_result = result
- # Cancel all other tasks. We take care to not cancel the current
- # task as well. If we do so, then since there is no `await` after
- # here and CancelledError are usually thrown at one, we will
- # encounter a curious corner case where the current task will end
- # up as done() == True, cancelled() == False, exception() ==
- # asyncio.CancelledError. This behavior is specified in
- # https://bugs.python.org/issue30048
- for i, t in enumerate(running_tasks):
- if i != this_index:
- t.cancel()
-
- first_task = loop.create_task(run_one_coro(None))
- running_tasks.append(first_task)
- try:
- # Wait for a growing list of tasks to all finish: poor man's version of
- # curio's TaskGroup or trio's nursery
- done_count = 0
- while done_count != len(running_tasks):
- done, _ = await tasks.wait(running_tasks)
- done_count = len(done)
- # If run_one_coro raises an unhandled exception, it's probably a
- # programming error, and I want to see it.
- if __debug__:
- for d in done:
- if d.done() and not d.cancelled() and d.exception():
- raise d.exception()
- return winner_result, winner_index, exceptions
- finally:
- # Make sure no tasks are left running if we leave this function
- for t in running_tasks:
- t.cancel()
+"""Support for running coroutines in parallel with staggered start times."""
+
+__all__ = 'staggered_race',
+
+import contextlib
+import typing
+
+from . import events
+from . import exceptions as exceptions_mod
+from . import locks
+from . import tasks
+
+
+async def staggered_race(
+ coro_fns: typing.Iterable[typing.Callable[[], typing.Awaitable]],
+ delay: typing.Optional[float],
+ *,
+ loop: events.AbstractEventLoop = None,
+) -> typing.Tuple[
+ typing.Any,
+ typing.Optional[int],
+ typing.List[typing.Optional[Exception]]
+]:
+ """Run coroutines with staggered start times and take the first to finish.
+
+ This method takes an iterable of coroutine functions. The first one is
+ started immediately. From then on, whenever the immediately preceding one
+ fails (raises an exception), or when *delay* seconds has passed, the next
+ coroutine is started. This continues until one of the coroutines complete
+ successfully, in which case all others are cancelled, or until all
+ coroutines fail.
+
+ The coroutines provided should be well-behaved in the following way:
+
+ * They should only ``return`` if completed successfully.
+
+ * They should always raise an exception if they did not complete
+ successfully. In particular, if they handle cancellation, they should
+ probably reraise, like this::
+
+ try:
+ # do work
+ except asyncio.CancelledError:
+ # undo partially completed work
+ raise
+
+ Args:
+ coro_fns: an iterable of coroutine functions, i.e. callables that
+ return a coroutine object when called. Use ``functools.partial`` or
+ lambdas to pass arguments.
+
+ delay: amount of time, in seconds, between starting coroutines. If
+ ``None``, the coroutines will run sequentially.
+
+ loop: the event loop to use.
+
+ Returns:
+ tuple *(winner_result, winner_index, exceptions)* where
+
+ - *winner_result*: the result of the winning coroutine, or ``None``
+ if no coroutines won.
+
+ - *winner_index*: the index of the winning coroutine in
+ ``coro_fns``, or ``None`` if no coroutines won. If the winning
+ coroutine may return None on success, *winner_index* can be used
+ to definitively determine whether any coroutine won.
+
+ - *exceptions*: list of exceptions returned by the coroutines.
+ ``len(exceptions)`` is equal to the number of coroutines actually
+ started, and the order is the same as in ``coro_fns``. The winning
+ coroutine's entry is ``None``.
+
+ """
+ # TODO: when we have aiter() and anext(), allow async iterables in coro_fns.
+ loop = loop or events.get_running_loop()
+ enum_coro_fns = enumerate(coro_fns)
+ winner_result = None
+ winner_index = None
+ exceptions = []
+ running_tasks = []
+
+ async def run_one_coro(
+ previous_failed: typing.Optional[locks.Event]) -> None:
+ # Wait for the previous task to finish, or for delay seconds
+ if previous_failed is not None:
+ with contextlib.suppress(exceptions_mod.TimeoutError):
+ # Use asyncio.wait_for() instead of asyncio.wait() here, so
+ # that if we get cancelled at this point, Event.wait() is also
+ # cancelled, otherwise there will be a "Task destroyed but it is
+ # pending" later.
+ await tasks.wait_for(previous_failed.wait(), delay)
+ # Get the next coroutine to run
+ try:
+ this_index, coro_fn = next(enum_coro_fns)
+ except StopIteration:
+ return
+ # Start task that will run the next coroutine
+ this_failed = locks.Event()
+ next_task = loop.create_task(run_one_coro(this_failed))
+ running_tasks.append(next_task)
+ assert len(running_tasks) == this_index + 2
+ # Prepare place to put this coroutine's exceptions if not won
+ exceptions.append(None)
+ assert len(exceptions) == this_index + 1
+
+ try:
+ result = await coro_fn()
+ except (SystemExit, KeyboardInterrupt):
+ raise
+ except BaseException as e:
+ exceptions[this_index] = e
+ this_failed.set() # Kickstart the next coroutine
+ else:
+ # Store winner's results
+ nonlocal winner_index, winner_result
+ assert winner_index is None
+ winner_index = this_index
+ winner_result = result
+ # Cancel all other tasks. We take care to not cancel the current
+ # task as well. If we do so, then since there is no `await` after
+ # here and CancelledError are usually thrown at one, we will
+ # encounter a curious corner case where the current task will end
+ # up as done() == True, cancelled() == False, exception() ==
+ # asyncio.CancelledError. This behavior is specified in
+ # https://bugs.python.org/issue30048
+ for i, t in enumerate(running_tasks):
+ if i != this_index:
+ t.cancel()
+
+ first_task = loop.create_task(run_one_coro(None))
+ running_tasks.append(first_task)
+ try:
+ # Wait for a growing list of tasks to all finish: poor man's version of
+ # curio's TaskGroup or trio's nursery
+ done_count = 0
+ while done_count != len(running_tasks):
+ done, _ = await tasks.wait(running_tasks)
+ done_count = len(done)
+ # If run_one_coro raises an unhandled exception, it's probably a
+ # programming error, and I want to see it.
+ if __debug__:
+ for d in done:
+ if d.done() and not d.cancelled() and d.exception():
+ raise d.exception()
+ return winner_result, winner_index, exceptions
+ finally:
+ # Make sure no tasks are left running if we leave this function
+ for t in running_tasks:
+ t.cancel()
diff --git a/contrib/tools/python3/src/Lib/asyncio/streams.py b/contrib/tools/python3/src/Lib/asyncio/streams.py
index 3c80bb8892..9c2cd4ad6b 100644
--- a/contrib/tools/python3/src/Lib/asyncio/streams.py
+++ b/contrib/tools/python3/src/Lib/asyncio/streams.py
@@ -1,19 +1,19 @@
__all__ = (
'StreamReader', 'StreamWriter', 'StreamReaderProtocol',
- 'open_connection', 'start_server')
+ 'open_connection', 'start_server')
import socket
-import sys
-import warnings
-import weakref
+import sys
+import warnings
+import weakref
if hasattr(socket, 'AF_UNIX'):
__all__ += ('open_unix_connection', 'start_unix_server')
from . import coroutines
from . import events
-from . import exceptions
-from . import format_helpers
+from . import exceptions
+from . import format_helpers
from . import protocols
from .log import logger
from .tasks import sleep
@@ -43,10 +43,10 @@ async def open_connection(host=None, port=None, *,
"""
if loop is None:
loop = events.get_event_loop()
- else:
- warnings.warn("The loop argument is deprecated since Python 3.8, "
- "and scheduled for removal in Python 3.10.",
- DeprecationWarning, stacklevel=2)
+ else:
+ warnings.warn("The loop argument is deprecated since Python 3.8, "
+ "and scheduled for removal in Python 3.10.",
+ DeprecationWarning, stacklevel=2)
reader = StreamReader(limit=limit, loop=loop)
protocol = StreamReaderProtocol(reader, loop=loop)
transport, _ = await loop.create_connection(
@@ -80,10 +80,10 @@ async def start_server(client_connected_cb, host=None, port=None, *,
"""
if loop is None:
loop = events.get_event_loop()
- else:
- warnings.warn("The loop argument is deprecated since Python 3.8, "
- "and scheduled for removal in Python 3.10.",
- DeprecationWarning, stacklevel=2)
+ else:
+ warnings.warn("The loop argument is deprecated since Python 3.8, "
+ "and scheduled for removal in Python 3.10.",
+ DeprecationWarning, stacklevel=2)
def factory():
reader = StreamReader(limit=limit, loop=loop)
@@ -102,10 +102,10 @@ if hasattr(socket, 'AF_UNIX'):
"""Similar to `open_connection` but works with UNIX Domain Sockets."""
if loop is None:
loop = events.get_event_loop()
- else:
- warnings.warn("The loop argument is deprecated since Python 3.8, "
- "and scheduled for removal in Python 3.10.",
- DeprecationWarning, stacklevel=2)
+ else:
+ warnings.warn("The loop argument is deprecated since Python 3.8, "
+ "and scheduled for removal in Python 3.10.",
+ DeprecationWarning, stacklevel=2)
reader = StreamReader(limit=limit, loop=loop)
protocol = StreamReaderProtocol(reader, loop=loop)
transport, _ = await loop.create_unix_connection(
@@ -118,10 +118,10 @@ if hasattr(socket, 'AF_UNIX'):
"""Similar to `start_server` but works with UNIX Domain Sockets."""
if loop is None:
loop = events.get_event_loop()
- else:
- warnings.warn("The loop argument is deprecated since Python 3.8, "
- "and scheduled for removal in Python 3.10.",
- DeprecationWarning, stacklevel=2)
+ else:
+ warnings.warn("The loop argument is deprecated since Python 3.8, "
+ "and scheduled for removal in Python 3.10.",
+ DeprecationWarning, stacklevel=2)
def factory():
reader = StreamReader(limit=limit, loop=loop)
@@ -196,10 +196,10 @@ class FlowControlMixin(protocols.Protocol):
self._drain_waiter = waiter
await waiter
- def _get_close_waiter(self, stream):
- raise NotImplementedError
-
+ def _get_close_waiter(self, stream):
+ raise NotImplementedError
+
class StreamReaderProtocol(FlowControlMixin, protocols.Protocol):
"""Helper class to adapt between Protocol and StreamReader.
@@ -209,86 +209,86 @@ class StreamReaderProtocol(FlowControlMixin, protocols.Protocol):
call inappropriate methods of the protocol.)
"""
- _source_traceback = None
-
+ _source_traceback = None
+
def __init__(self, stream_reader, client_connected_cb=None, loop=None):
super().__init__(loop=loop)
- if stream_reader is not None:
- self._stream_reader_wr = weakref.ref(stream_reader)
- self._source_traceback = stream_reader._source_traceback
- else:
- self._stream_reader_wr = None
- if client_connected_cb is not None:
- # This is a stream created by the `create_server()` function.
- # Keep a strong reference to the reader until a connection
- # is established.
- self._strong_reader = stream_reader
- self._reject_connection = False
+ if stream_reader is not None:
+ self._stream_reader_wr = weakref.ref(stream_reader)
+ self._source_traceback = stream_reader._source_traceback
+ else:
+ self._stream_reader_wr = None
+ if client_connected_cb is not None:
+ # This is a stream created by the `create_server()` function.
+ # Keep a strong reference to the reader until a connection
+ # is established.
+ self._strong_reader = stream_reader
+ self._reject_connection = False
self._stream_writer = None
- self._transport = None
+ self._transport = None
self._client_connected_cb = client_connected_cb
self._over_ssl = False
self._closed = self._loop.create_future()
- @property
- def _stream_reader(self):
- if self._stream_reader_wr is None:
- return None
- return self._stream_reader_wr()
-
+ @property
+ def _stream_reader(self):
+ if self._stream_reader_wr is None:
+ return None
+ return self._stream_reader_wr()
+
def connection_made(self, transport):
- if self._reject_connection:
- context = {
- 'message': ('An open stream was garbage collected prior to '
- 'establishing network connection; '
- 'call "stream.close()" explicitly.')
- }
- if self._source_traceback:
- context['source_traceback'] = self._source_traceback
- self._loop.call_exception_handler(context)
- transport.abort()
- return
- self._transport = transport
- reader = self._stream_reader
- if reader is not None:
- reader.set_transport(transport)
+ if self._reject_connection:
+ context = {
+ 'message': ('An open stream was garbage collected prior to '
+ 'establishing network connection; '
+ 'call "stream.close()" explicitly.')
+ }
+ if self._source_traceback:
+ context['source_traceback'] = self._source_traceback
+ self._loop.call_exception_handler(context)
+ transport.abort()
+ return
+ self._transport = transport
+ reader = self._stream_reader
+ if reader is not None:
+ reader.set_transport(transport)
self._over_ssl = transport.get_extra_info('sslcontext') is not None
if self._client_connected_cb is not None:
self._stream_writer = StreamWriter(transport, self,
- reader,
+ reader,
self._loop)
- res = self._client_connected_cb(reader,
+ res = self._client_connected_cb(reader,
self._stream_writer)
if coroutines.iscoroutine(res):
self._loop.create_task(res)
- self._strong_reader = None
+ self._strong_reader = None
def connection_lost(self, exc):
- reader = self._stream_reader
- if reader is not None:
+ reader = self._stream_reader
+ if reader is not None:
if exc is None:
- reader.feed_eof()
+ reader.feed_eof()
else:
- reader.set_exception(exc)
+ reader.set_exception(exc)
if not self._closed.done():
if exc is None:
self._closed.set_result(None)
else:
self._closed.set_exception(exc)
super().connection_lost(exc)
- self._stream_reader_wr = None
+ self._stream_reader_wr = None
self._stream_writer = None
- self._transport = None
+ self._transport = None
def data_received(self, data):
- reader = self._stream_reader
- if reader is not None:
- reader.feed_data(data)
+ reader = self._stream_reader
+ if reader is not None:
+ reader.feed_data(data)
def eof_received(self):
- reader = self._stream_reader
- if reader is not None:
- reader.feed_eof()
+ reader = self._stream_reader
+ if reader is not None:
+ reader.feed_eof()
if self._over_ssl:
# Prevent a warning in SSLProtocol.eof_received:
# "returning true from eof_received()
@@ -296,9 +296,9 @@ class StreamReaderProtocol(FlowControlMixin, protocols.Protocol):
return False
return True
- def _get_close_waiter(self, stream):
- return self._closed
-
+ def _get_close_waiter(self, stream):
+ return self._closed
+
def __del__(self):
# Prevent reports about unhandled exceptions.
# Better than self._closed._log_traceback = False hack
@@ -324,8 +324,8 @@ class StreamWriter:
assert reader is None or isinstance(reader, StreamReader)
self._reader = reader
self._loop = loop
- self._complete_fut = self._loop.create_future()
- self._complete_fut.set_result(None)
+ self._complete_fut = self._loop.create_future()
+ self._complete_fut.set_result(None)
def __repr__(self):
info = [self.__class__.__name__, f'transport={self._transport!r}']
@@ -356,7 +356,7 @@ class StreamWriter:
return self._transport.is_closing()
async def wait_closed(self):
- await self._protocol._get_close_waiter(self)
+ await self._protocol._get_close_waiter(self)
def get_extra_info(self, name, default=None):
return self._transport.get_extra_info(name, default)
@@ -374,23 +374,23 @@ class StreamWriter:
if exc is not None:
raise exc
if self._transport.is_closing():
- # Wait for protocol.connection_lost() call
- # Raise connection closing error if any,
- # ConnectionResetError otherwise
+ # Wait for protocol.connection_lost() call
+ # Raise connection closing error if any,
+ # ConnectionResetError otherwise
# Yield to the event loop so connection_lost() may be
# called. Without this, _drain_helper() would return
# immediately, and code that calls
# write(...); await drain()
# in a loop would never call connection_lost(), so it
# would not see an error when the socket is closed.
- await sleep(0)
+ await sleep(0)
await self._protocol._drain_helper()
class StreamReader:
- _source_traceback = None
-
+ _source_traceback = None
+
def __init__(self, limit=_DEFAULT_LIMIT, loop=None):
# The line length limit is a security feature;
# it also doubles as half the buffer limit.
@@ -409,9 +409,9 @@ class StreamReader:
self._exception = None
self._transport = None
self._paused = False
- if self._loop.get_debug():
- self._source_traceback = format_helpers.extract_stack(
- sys._getframe(1))
+ if self._loop.get_debug():
+ self._source_traceback = format_helpers.extract_stack(
+ sys._getframe(1))
def __repr__(self):
info = ['StreamReader']
@@ -538,9 +538,9 @@ class StreamReader:
seplen = len(sep)
try:
line = await self.readuntil(sep)
- except exceptions.IncompleteReadError as e:
+ except exceptions.IncompleteReadError as e:
return e.partial
- except exceptions.LimitOverrunError as e:
+ except exceptions.LimitOverrunError as e:
if self._buffer.startswith(sep, e.consumed):
del self._buffer[:e.consumed + seplen]
else:
@@ -615,7 +615,7 @@ class StreamReader:
# see upper comment for explanation.
offset = buflen + 1 - seplen
if offset > self._limit:
- raise exceptions.LimitOverrunError(
+ raise exceptions.LimitOverrunError(
'Separator is not found, and chunk exceed the limit',
offset)
@@ -626,13 +626,13 @@ class StreamReader:
if self._eof:
chunk = bytes(self._buffer)
self._buffer.clear()
- raise exceptions.IncompleteReadError(chunk, None)
+ raise exceptions.IncompleteReadError(chunk, None)
# _wait_for_data() will resume reading if stream was paused.
await self._wait_for_data('readuntil')
if isep > self._limit:
- raise exceptions.LimitOverrunError(
+ raise exceptions.LimitOverrunError(
'Separator is found, but chunk is longer than limit', isep)
chunk = self._buffer[:isep + seplen]
@@ -718,7 +718,7 @@ class StreamReader:
if self._eof:
incomplete = bytes(self._buffer)
self._buffer.clear()
- raise exceptions.IncompleteReadError(incomplete, n)
+ raise exceptions.IncompleteReadError(incomplete, n)
await self._wait_for_data('readexactly')
diff --git a/contrib/tools/python3/src/Lib/asyncio/subprocess.py b/contrib/tools/python3/src/Lib/asyncio/subprocess.py
index 820304ecca..73b0713687 100644
--- a/contrib/tools/python3/src/Lib/asyncio/subprocess.py
+++ b/contrib/tools/python3/src/Lib/asyncio/subprocess.py
@@ -1,7 +1,7 @@
__all__ = 'create_subprocess_exec', 'create_subprocess_shell'
import subprocess
-import warnings
+import warnings
from . import events
from . import protocols
@@ -26,7 +26,7 @@ class SubprocessStreamProtocol(streams.FlowControlMixin,
self._transport = None
self._process_exited = False
self._pipe_fds = []
- self._stdin_closed = self._loop.create_future()
+ self._stdin_closed = self._loop.create_future()
def __repr__(self):
info = [self.__class__.__name__]
@@ -78,10 +78,10 @@ class SubprocessStreamProtocol(streams.FlowControlMixin,
if pipe is not None:
pipe.close()
self.connection_lost(exc)
- if exc is None:
- self._stdin_closed.set_result(None)
- else:
- self._stdin_closed.set_exception(exc)
+ if exc is None:
+ self._stdin_closed.set_result(None)
+ else:
+ self._stdin_closed.set_exception(exc)
return
if fd == 1:
reader = self.stdout
@@ -108,11 +108,11 @@ class SubprocessStreamProtocol(streams.FlowControlMixin,
self._transport.close()
self._transport = None
- def _get_close_waiter(self, stream):
- if stream is self.stdin:
- return self._stdin_closed
-
+ def _get_close_waiter(self, stream):
+ if stream is self.stdin:
+ return self._stdin_closed
+
class Process:
def __init__(self, transport, protocol, loop):
self._transport = transport
@@ -193,8 +193,8 @@ class Process:
stderr = self._read_stream(2)
else:
stderr = self._noop()
- stdin, stdout, stderr = await tasks._gather(stdin, stdout, stderr,
- loop=self._loop)
+ stdin, stdout, stderr = await tasks._gather(stdin, stdout, stderr,
+ loop=self._loop)
await self.wait()
return (stdout, stderr)
@@ -204,13 +204,13 @@ async def create_subprocess_shell(cmd, stdin=None, stdout=None, stderr=None,
**kwds):
if loop is None:
loop = events.get_event_loop()
- else:
- warnings.warn("The loop argument is deprecated since Python 3.8 "
- "and scheduled for removal in Python 3.10.",
- DeprecationWarning,
- stacklevel=2
- )
-
+ else:
+ warnings.warn("The loop argument is deprecated since Python 3.8 "
+ "and scheduled for removal in Python 3.10.",
+ DeprecationWarning,
+ stacklevel=2
+ )
+
protocol_factory = lambda: SubprocessStreamProtocol(limit=limit,
loop=loop)
transport, protocol = await loop.subprocess_shell(
@@ -225,12 +225,12 @@ async def create_subprocess_exec(program, *args, stdin=None, stdout=None,
limit=streams._DEFAULT_LIMIT, **kwds):
if loop is None:
loop = events.get_event_loop()
- else:
- warnings.warn("The loop argument is deprecated since Python 3.8 "
- "and scheduled for removal in Python 3.10.",
- DeprecationWarning,
- stacklevel=2
- )
+ else:
+ warnings.warn("The loop argument is deprecated since Python 3.8 "
+ "and scheduled for removal in Python 3.10.",
+ DeprecationWarning,
+ stacklevel=2
+ )
protocol_factory = lambda: SubprocessStreamProtocol(limit=limit,
loop=loop)
transport, protocol = await loop.subprocess_exec(
diff --git a/contrib/tools/python3/src/Lib/asyncio/tasks.py b/contrib/tools/python3/src/Lib/asyncio/tasks.py
index 27a3c8c5a8..d378a369ba 100644
--- a/contrib/tools/python3/src/Lib/asyncio/tasks.py
+++ b/contrib/tools/python3/src/Lib/asyncio/tasks.py
@@ -13,7 +13,7 @@ import concurrent.futures
import contextvars
import functools
import inspect
-import itertools
+import itertools
import types
import warnings
import weakref
@@ -21,16 +21,16 @@ import weakref
from . import base_tasks
from . import coroutines
from . import events
-from . import exceptions
+from . import exceptions
from . import futures
-from .coroutines import _is_coroutine
-
-# Helper to generate new task names
-# This uses itertools.count() instead of a "+= 1" operation because the latter
-# is not thread safe. See bpo-11866 for a longer explanation.
-_task_name_counter = itertools.count(1).__next__
+from .coroutines import _is_coroutine
+# Helper to generate new task names
+# This uses itertools.count() instead of a "+= 1" operation because the latter
+# is not thread safe. See bpo-11866 for a longer explanation.
+_task_name_counter = itertools.count(1).__next__
+
def current_task(loop=None):
"""Return a currently executed task."""
if loop is None:
@@ -42,22 +42,22 @@ def all_tasks(loop=None):
"""Return a set of all tasks for the loop."""
if loop is None:
loop = events.get_running_loop()
- # Looping over a WeakSet (_all_tasks) isn't safe as it can be updated from another
- # thread while we do so. Therefore we cast it to list prior to filtering. The list
- # cast itself requires iteration, so we repeat it several times ignoring
- # RuntimeErrors (which are not very likely to occur). See issues 34970 and 36607 for
- # details.
- i = 0
- while True:
- try:
- tasks = list(_all_tasks)
- except RuntimeError:
- i += 1
- if i >= 1000:
- raise
- else:
- break
- return {t for t in tasks
+ # Looping over a WeakSet (_all_tasks) isn't safe as it can be updated from another
+ # thread while we do so. Therefore we cast it to list prior to filtering. The list
+ # cast itself requires iteration, so we repeat it several times ignoring
+ # RuntimeErrors (which are not very likely to occur). See issues 34970 and 36607 for
+ # details.
+ i = 0
+ while True:
+ try:
+ tasks = list(_all_tasks)
+ except RuntimeError:
+ i += 1
+ if i >= 1000:
+ raise
+ else:
+ break
+ return {t for t in tasks
if futures._get_loop(t) is loop and not t.done()}
@@ -67,34 +67,34 @@ def _all_tasks_compat(loop=None):
# method.
if loop is None:
loop = events.get_event_loop()
- # Looping over a WeakSet (_all_tasks) isn't safe as it can be updated from another
- # thread while we do so. Therefore we cast it to list prior to filtering. The list
- # cast itself requires iteration, so we repeat it several times ignoring
- # RuntimeErrors (which are not very likely to occur). See issues 34970 and 36607 for
- # details.
- i = 0
- while True:
- try:
- tasks = list(_all_tasks)
- except RuntimeError:
- i += 1
- if i >= 1000:
- raise
- else:
- break
- return {t for t in tasks if futures._get_loop(t) is loop}
-
-
-def _set_task_name(task, name):
- if name is not None:
- try:
- set_name = task.set_name
- except AttributeError:
- pass
- else:
- set_name(name)
-
-
+ # Looping over a WeakSet (_all_tasks) isn't safe as it can be updated from another
+ # thread while we do so. Therefore we cast it to list prior to filtering. The list
+ # cast itself requires iteration, so we repeat it several times ignoring
+ # RuntimeErrors (which are not very likely to occur). See issues 34970 and 36607 for
+ # details.
+ i = 0
+ while True:
+ try:
+ tasks = list(_all_tasks)
+ except RuntimeError:
+ i += 1
+ if i >= 1000:
+ raise
+ else:
+ break
+ return {t for t in tasks if futures._get_loop(t) is loop}
+
+
+def _set_task_name(task, name):
+ if name is not None:
+ try:
+ set_name = task.set_name
+ except AttributeError:
+ pass
+ else:
+ set_name(name)
+
+
class Task(futures._PyFuture): # Inherit Python Task implementation
# from a Python Future implementation.
@@ -113,7 +113,7 @@ class Task(futures._PyFuture): # Inherit Python Task implementation
# status is still pending
_log_destroy_pending = True
- def __init__(self, coro, *, loop=None, name=None):
+ def __init__(self, coro, *, loop=None, name=None):
super().__init__(loop=loop)
if self._source_traceback:
del self._source_traceback[-1]
@@ -123,11 +123,11 @@ class Task(futures._PyFuture): # Inherit Python Task implementation
self._log_destroy_pending = False
raise TypeError(f"a coroutine was expected, got {coro!r}")
- if name is None:
- self._name = f'Task-{_task_name_counter()}'
- else:
- self._name = str(name)
-
+ if name is None:
+ self._name = f'Task-{_task_name_counter()}'
+ else:
+ self._name = str(name)
+
self._must_cancel = False
self._fut_waiter = None
self._coro = coro
@@ -147,21 +147,21 @@ class Task(futures._PyFuture): # Inherit Python Task implementation
self._loop.call_exception_handler(context)
super().__del__()
- def __class_getitem__(cls, type):
- return cls
-
+ def __class_getitem__(cls, type):
+ return cls
+
def _repr_info(self):
return base_tasks._task_repr_info(self)
- def get_coro(self):
- return self._coro
-
- def get_name(self):
- return self._name
-
- def set_name(self, value):
- self._name = str(value)
-
+ def get_coro(self):
+ return self._coro
+
+ def get_name(self):
+ return self._name
+
+ def set_name(self, value):
+ self._name = str(value)
+
def set_result(self, result):
raise RuntimeError('Task does not support set_result operation')
@@ -202,7 +202,7 @@ class Task(futures._PyFuture): # Inherit Python Task implementation
"""
return base_tasks._task_print_stack(self, limit, file)
- def cancel(self, msg=None):
+ def cancel(self, msg=None):
"""Request that this task cancel itself.
This arranges for a CancelledError to be thrown into the
@@ -226,23 +226,23 @@ class Task(futures._PyFuture): # Inherit Python Task implementation
if self.done():
return False
if self._fut_waiter is not None:
- if self._fut_waiter.cancel(msg=msg):
+ if self._fut_waiter.cancel(msg=msg):
# Leave self._fut_waiter; it may be a Task that
# catches and ignores the cancellation so we may have
# to cancel it again later.
return True
# It must be the case that self.__step is already scheduled.
self._must_cancel = True
- self._cancel_message = msg
+ self._cancel_message = msg
return True
def __step(self, exc=None):
if self.done():
- raise exceptions.InvalidStateError(
+ raise exceptions.InvalidStateError(
f'_step(): already done: {self!r}, {exc!r}')
if self._must_cancel:
- if not isinstance(exc, exceptions.CancelledError):
- exc = self._make_cancelled_error()
+ if not isinstance(exc, exceptions.CancelledError):
+ exc = self._make_cancelled_error()
self._must_cancel = False
coro = self._coro
self._fut_waiter = None
@@ -260,16 +260,16 @@ class Task(futures._PyFuture): # Inherit Python Task implementation
if self._must_cancel:
# Task is cancelled right before coro stops.
self._must_cancel = False
- super().cancel(msg=self._cancel_message)
+ super().cancel(msg=self._cancel_message)
else:
super().set_result(exc.value)
- except exceptions.CancelledError as exc:
- # Save the original exception so we can chain it later.
- self._cancelled_exc = exc
+ except exceptions.CancelledError as exc:
+ # Save the original exception so we can chain it later.
+ self._cancelled_exc = exc
super().cancel() # I.e., Future.cancel(self).
- except (KeyboardInterrupt, SystemExit) as exc:
+ except (KeyboardInterrupt, SystemExit) as exc:
super().set_exception(exc)
- raise
+ raise
except BaseException as exc:
super().set_exception(exc)
else:
@@ -294,8 +294,8 @@ class Task(futures._PyFuture): # Inherit Python Task implementation
self.__wakeup, context=self._context)
self._fut_waiter = result
if self._must_cancel:
- if self._fut_waiter.cancel(
- msg=self._cancel_message):
+ if self._fut_waiter.cancel(
+ msg=self._cancel_message):
self._must_cancel = False
else:
new_exc = RuntimeError(
@@ -326,7 +326,7 @@ class Task(futures._PyFuture): # Inherit Python Task implementation
def __wakeup(self, future):
try:
future.result()
- except BaseException as exc:
+ except BaseException as exc:
# This may also be a cancellation.
self.__step(exc)
else:
@@ -352,15 +352,15 @@ else:
Task = _CTask = _asyncio.Task
-def create_task(coro, *, name=None):
+def create_task(coro, *, name=None):
"""Schedule the execution of a coroutine object in a spawn task.
Return a Task object.
"""
loop = events.get_running_loop()
- task = loop.create_task(coro)
- _set_task_name(task, name)
- return task
+ task = loop.create_task(coro)
+ _set_task_name(task, name)
+ return task
# wait() and as_completed() similar to those in PEP 3148.
@@ -373,7 +373,7 @@ ALL_COMPLETED = concurrent.futures.ALL_COMPLETED
async def wait(fs, *, loop=None, timeout=None, return_when=ALL_COMPLETED):
"""Wait for the Futures and coroutines given by fs to complete.
- The fs iterable must not be empty.
+ The fs iterable must not be empty.
Coroutines will be wrapped in Tasks.
@@ -394,22 +394,22 @@ async def wait(fs, *, loop=None, timeout=None, return_when=ALL_COMPLETED):
raise ValueError(f'Invalid return_when value: {return_when}')
if loop is None:
- loop = events.get_running_loop()
- else:
- warnings.warn("The loop argument is deprecated since Python 3.8, "
- "and scheduled for removal in Python 3.10.",
- DeprecationWarning, stacklevel=2)
-
- fs = set(fs)
-
- if any(coroutines.iscoroutine(f) for f in fs):
- warnings.warn("The explicit passing of coroutine objects to "
- "asyncio.wait() is deprecated since Python 3.8, and "
- "scheduled for removal in Python 3.11.",
- DeprecationWarning, stacklevel=2)
-
- fs = {ensure_future(f, loop=loop) for f in fs}
-
+ loop = events.get_running_loop()
+ else:
+ warnings.warn("The loop argument is deprecated since Python 3.8, "
+ "and scheduled for removal in Python 3.10.",
+ DeprecationWarning, stacklevel=2)
+
+ fs = set(fs)
+
+ if any(coroutines.iscoroutine(f) for f in fs):
+ warnings.warn("The explicit passing of coroutine objects to "
+ "asyncio.wait() is deprecated since Python 3.8, and "
+ "scheduled for removal in Python 3.11.",
+ DeprecationWarning, stacklevel=2)
+
+ fs = {ensure_future(f, loop=loop) for f in fs}
+
return await _wait(fs, timeout, return_when, loop)
@@ -432,11 +432,11 @@ async def wait_for(fut, timeout, *, loop=None):
This function is a coroutine.
"""
if loop is None:
- loop = events.get_running_loop()
- else:
- warnings.warn("The loop argument is deprecated since Python 3.8, "
- "and scheduled for removal in Python 3.10.",
- DeprecationWarning, stacklevel=2)
+ loop = events.get_running_loop()
+ else:
+ warnings.warn("The loop argument is deprecated since Python 3.8, "
+ "and scheduled for removal in Python 3.10.",
+ DeprecationWarning, stacklevel=2)
if timeout is None:
return await fut
@@ -447,11 +447,11 @@ async def wait_for(fut, timeout, *, loop=None):
if fut.done():
return fut.result()
- await _cancel_and_wait(fut, loop=loop)
- try:
- return fut.result()
- except exceptions.CancelledError as exc:
- raise exceptions.TimeoutError() from exc
+ await _cancel_and_wait(fut, loop=loop)
+ try:
+ return fut.result()
+ except exceptions.CancelledError as exc:
+ raise exceptions.TimeoutError() from exc
waiter = loop.create_future()
timeout_handle = loop.call_later(timeout, _release_waiter, waiter)
@@ -464,16 +464,16 @@ async def wait_for(fut, timeout, *, loop=None):
# wait until the future completes or the timeout
try:
await waiter
- except exceptions.CancelledError:
- if fut.done():
- return fut.result()
- else:
- fut.remove_done_callback(cb)
- # We must ensure that the task is not running
- # after wait_for() returns.
- # See https://bugs.python.org/issue32751
- await _cancel_and_wait(fut, loop=loop)
- raise
+ except exceptions.CancelledError:
+ if fut.done():
+ return fut.result()
+ else:
+ fut.remove_done_callback(cb)
+ # We must ensure that the task is not running
+ # after wait_for() returns.
+ # See https://bugs.python.org/issue32751
+ await _cancel_and_wait(fut, loop=loop)
+ raise
if fut.done():
return fut.result()
@@ -483,13 +483,13 @@ async def wait_for(fut, timeout, *, loop=None):
# after wait_for() returns.
# See https://bugs.python.org/issue32751
await _cancel_and_wait(fut, loop=loop)
- # In case task cancellation failed with some
- # exception, we should re-raise it
- # See https://bugs.python.org/issue40607
- try:
- return fut.result()
- except exceptions.CancelledError as exc:
- raise exceptions.TimeoutError() from exc
+ # In case task cancellation failed with some
+ # exception, we should re-raise it
+ # See https://bugs.python.org/issue40607
+ try:
+ return fut.result()
+ except exceptions.CancelledError as exc:
+ raise exceptions.TimeoutError() from exc
finally:
timeout_handle.cancel()
@@ -526,8 +526,8 @@ async def _wait(fs, timeout, return_when, loop):
finally:
if timeout_handle is not None:
timeout_handle.cancel()
- for f in fs:
- f.remove_done_callback(_on_completion)
+ for f in fs:
+ f.remove_done_callback(_on_completion)
done, pending = set(), set()
for f in fs:
@@ -574,19 +574,19 @@ def as_completed(fs, *, loop=None, timeout=None):
Note: The futures 'f' are not necessarily members of fs.
"""
if futures.isfuture(fs) or coroutines.iscoroutine(fs):
- raise TypeError(f"expect an iterable of futures, not {type(fs).__name__}")
-
- if loop is not None:
- warnings.warn("The loop argument is deprecated since Python 3.8, "
- "and scheduled for removal in Python 3.10.",
- DeprecationWarning, stacklevel=2)
-
+ raise TypeError(f"expect an iterable of futures, not {type(fs).__name__}")
+
+ if loop is not None:
+ warnings.warn("The loop argument is deprecated since Python 3.8, "
+ "and scheduled for removal in Python 3.10.",
+ DeprecationWarning, stacklevel=2)
+
from .queues import Queue # Import here to avoid circular import problem.
done = Queue(loop=loop)
-
- if loop is None:
- loop = events.get_event_loop()
- todo = {ensure_future(f, loop=loop) for f in set(fs)}
+
+ if loop is None:
+ loop = events.get_event_loop()
+ todo = {ensure_future(f, loop=loop) for f in set(fs)}
timeout_handle = None
def _on_timeout():
@@ -607,7 +607,7 @@ def as_completed(fs, *, loop=None, timeout=None):
f = await done.get()
if f is None:
# Dummy value from _on_timeout().
- raise exceptions.TimeoutError
+ raise exceptions.TimeoutError
return f.result() # May raise f.exception().
for f in todo:
@@ -632,18 +632,18 @@ def __sleep0():
async def sleep(delay, result=None, *, loop=None):
"""Coroutine that completes after a given time (in seconds)."""
- if loop is not None:
- warnings.warn("The loop argument is deprecated since Python 3.8, "
- "and scheduled for removal in Python 3.10.",
- DeprecationWarning, stacklevel=2)
-
+ if loop is not None:
+ warnings.warn("The loop argument is deprecated since Python 3.8, "
+ "and scheduled for removal in Python 3.10.",
+ DeprecationWarning, stacklevel=2)
+
if delay <= 0:
await __sleep0()
return result
if loop is None:
- loop = events.get_running_loop()
-
+ loop = events.get_running_loop()
+
future = loop.create_future()
h = loop.call_later(delay,
futures._set_result_unless_cancelled,
@@ -668,8 +668,8 @@ def ensure_future(coro_or_future, *, loop=None):
return task
elif futures.isfuture(coro_or_future):
if loop is not None and loop is not futures._get_loop(coro_or_future):
- raise ValueError('The future belongs to a different loop than '
- 'the one specified as the loop argument')
+ raise ValueError('The future belongs to a different loop than '
+ 'the one specified as the loop argument')
return coro_or_future
elif inspect.isawaitable(coro_or_future):
return ensure_future(_wrap_awaitable(coro_or_future), loop=loop)
@@ -678,7 +678,7 @@ def ensure_future(coro_or_future, *, loop=None):
'required')
-@types.coroutine
+@types.coroutine
def _wrap_awaitable(awaitable):
"""Helper for asyncio.ensure_future().
@@ -687,9 +687,9 @@ def _wrap_awaitable(awaitable):
"""
return (yield from awaitable.__await__())
-_wrap_awaitable._is_coroutine = _is_coroutine
-
+_wrap_awaitable._is_coroutine = _is_coroutine
+
class _GatheringFuture(futures.Future):
"""Helper for gather().
@@ -703,12 +703,12 @@ class _GatheringFuture(futures.Future):
self._children = children
self._cancel_requested = False
- def cancel(self, msg=None):
+ def cancel(self, msg=None):
if self.done():
return False
ret = False
for child in self._children:
- if child.cancel(msg=msg):
+ if child.cancel(msg=msg):
ret = True
if ret:
# If any child tasks were actually cancelled, we should
@@ -740,23 +740,23 @@ def gather(*coros_or_futures, loop=None, return_exceptions=False):
the outer Future is *not* cancelled in this case. (This is to
prevent the cancellation of one child to cause other children to
be cancelled.)
-
- If *return_exceptions* is False, cancelling gather() after it
- has been marked done won't cancel any submitted awaitables.
- For instance, gather can be marked done after propagating an
- exception to the caller, therefore, calling ``gather.cancel()``
- after catching an exception (raised by one of the awaitables) from
- gather won't cancel any other awaitables.
+
+ If *return_exceptions* is False, cancelling gather() after it
+ has been marked done won't cancel any submitted awaitables.
+ For instance, gather can be marked done after propagating an
+ exception to the caller, therefore, calling ``gather.cancel()``
+ after catching an exception (raised by one of the awaitables) from
+ gather won't cancel any other awaitables.
"""
- if loop is not None:
- warnings.warn("The loop argument is deprecated since Python 3.8, "
- "and scheduled for removal in Python 3.10.",
- DeprecationWarning, stacklevel=2)
-
- return _gather(*coros_or_futures, loop=loop, return_exceptions=return_exceptions)
-
-
-def _gather(*coros_or_futures, loop=None, return_exceptions=False):
+ if loop is not None:
+ warnings.warn("The loop argument is deprecated since Python 3.8, "
+ "and scheduled for removal in Python 3.10.",
+ DeprecationWarning, stacklevel=2)
+
+ return _gather(*coros_or_futures, loop=loop, return_exceptions=return_exceptions)
+
+
+def _gather(*coros_or_futures, loop=None, return_exceptions=False):
if not coros_or_futures:
if loop is None:
loop = events.get_event_loop()
@@ -779,7 +779,7 @@ def _gather(*coros_or_futures, loop=None, return_exceptions=False):
# Check if 'fut' is cancelled first, as
# 'fut.exception()' will *raise* a CancelledError
# instead of returning it.
- exc = fut._make_cancelled_error()
+ exc = fut._make_cancelled_error()
outer.set_exception(exc)
return
else:
@@ -795,15 +795,15 @@ def _gather(*coros_or_futures, loop=None, return_exceptions=False):
for fut in children:
if fut.cancelled():
- # Check if 'fut' is cancelled first, as 'fut.exception()'
- # will *raise* a CancelledError instead of returning it.
- # Also, since we're adding the exception return value
- # to 'results' instead of raising it, don't bother
- # setting __context__. This also lets us preserve
- # calling '_make_cancelled_error()' at most once.
- res = exceptions.CancelledError(
- '' if fut._cancel_message is None else
- fut._cancel_message)
+ # Check if 'fut' is cancelled first, as 'fut.exception()'
+ # will *raise* a CancelledError instead of returning it.
+ # Also, since we're adding the exception return value
+ # to 'results' instead of raising it, don't bother
+ # setting __context__. This also lets us preserve
+ # calling '_make_cancelled_error()' at most once.
+ res = exceptions.CancelledError(
+ '' if fut._cancel_message is None else
+ fut._cancel_message)
else:
res = fut.exception()
if res is None:
@@ -814,8 +814,8 @@ def _gather(*coros_or_futures, loop=None, return_exceptions=False):
# If gather is being cancelled we must propagate the
# cancellation regardless of *return_exceptions* argument.
# See issue 32684.
- exc = fut._make_cancelled_error()
- outer.set_exception(exc)
+ exc = fut._make_cancelled_error()
+ outer.set_exception(exc)
else:
outer.set_result(results)
@@ -875,10 +875,10 @@ def shield(arg, *, loop=None):
except CancelledError:
res = None
"""
- if loop is not None:
- warnings.warn("The loop argument is deprecated since Python 3.8, "
- "and scheduled for removal in Python 3.10.",
- DeprecationWarning, stacklevel=2)
+ if loop is not None:
+ warnings.warn("The loop argument is deprecated since Python 3.8, "
+ "and scheduled for removal in Python 3.10.",
+ DeprecationWarning, stacklevel=2)
inner = ensure_future(arg, loop=loop)
if inner.done():
# Shortcut.
@@ -886,7 +886,7 @@ def shield(arg, *, loop=None):
loop = futures._get_loop(inner)
outer = loop.create_future()
- def _inner_done_callback(inner):
+ def _inner_done_callback(inner):
if outer.cancelled():
if not inner.cancelled():
# Mark inner's result as retrieved.
@@ -902,13 +902,13 @@ def shield(arg, *, loop=None):
else:
outer.set_result(inner.result())
-
- def _outer_done_callback(outer):
- if not inner.done():
- inner.remove_done_callback(_inner_done_callback)
-
- inner.add_done_callback(_inner_done_callback)
- outer.add_done_callback(_outer_done_callback)
+
+ def _outer_done_callback(outer):
+ if not inner.done():
+ inner.remove_done_callback(_inner_done_callback)
+
+ inner.add_done_callback(_inner_done_callback)
+ outer.add_done_callback(_outer_done_callback)
return outer
@@ -924,9 +924,9 @@ def run_coroutine_threadsafe(coro, loop):
def callback():
try:
futures._chain_future(ensure_future(coro, loop=loop), future)
- except (SystemExit, KeyboardInterrupt):
- raise
- except BaseException as exc:
+ except (SystemExit, KeyboardInterrupt):
+ raise
+ except BaseException as exc:
if future.set_running_or_notify_cancel():
future.set_exception(exc)
raise
diff --git a/contrib/tools/python3/src/Lib/asyncio/threads.py b/contrib/tools/python3/src/Lib/asyncio/threads.py
index db048a8231..cce2f05e10 100644
--- a/contrib/tools/python3/src/Lib/asyncio/threads.py
+++ b/contrib/tools/python3/src/Lib/asyncio/threads.py
@@ -1,25 +1,25 @@
-"""High-level support for working with threads in asyncio"""
-
-import functools
-import contextvars
-
-from . import events
-
-
-__all__ = "to_thread",
-
-
-async def to_thread(func, /, *args, **kwargs):
- """Asynchronously run function *func* in a separate thread.
-
- Any *args and **kwargs supplied for this function are directly passed
- to *func*. Also, the current :class:`contextvars.Context` is propagated,
- allowing context variables from the main thread to be accessed in the
- separate thread.
-
- Return a coroutine that can be awaited to get the eventual result of *func*.
- """
- loop = events.get_running_loop()
- ctx = contextvars.copy_context()
- func_call = functools.partial(ctx.run, func, *args, **kwargs)
- return await loop.run_in_executor(None, func_call)
+"""High-level support for working with threads in asyncio"""
+
+import functools
+import contextvars
+
+from . import events
+
+
+__all__ = "to_thread",
+
+
+async def to_thread(func, /, *args, **kwargs):
+ """Asynchronously run function *func* in a separate thread.
+
+ Any *args and **kwargs supplied for this function are directly passed
+ to *func*. Also, the current :class:`contextvars.Context` is propagated,
+ allowing context variables from the main thread to be accessed in the
+ separate thread.
+
+ Return a coroutine that can be awaited to get the eventual result of *func*.
+ """
+ loop = events.get_running_loop()
+ ctx = contextvars.copy_context()
+ func_call = functools.partial(ctx.run, func, *args, **kwargs)
+ return await loop.run_in_executor(None, func_call)
diff --git a/contrib/tools/python3/src/Lib/asyncio/transports.py b/contrib/tools/python3/src/Lib/asyncio/transports.py
index 45e155c94c..06143ed829 100644
--- a/contrib/tools/python3/src/Lib/asyncio/transports.py
+++ b/contrib/tools/python3/src/Lib/asyncio/transports.py
@@ -9,8 +9,8 @@ __all__ = (
class BaseTransport:
"""Base class for transports."""
- __slots__ = ('_extra',)
-
+ __slots__ = ('_extra',)
+
def __init__(self, extra=None):
if extra is None:
extra = {}
@@ -29,8 +29,8 @@ class BaseTransport:
Buffered data will be flushed asynchronously. No more data
will be received. After all buffered data is flushed, the
- protocol's connection_lost() method will (eventually) be
- called with None as its argument.
+ protocol's connection_lost() method will (eventually) be
+ called with None as its argument.
"""
raise NotImplementedError
@@ -46,8 +46,8 @@ class BaseTransport:
class ReadTransport(BaseTransport):
"""Interface for read-only transports."""
- __slots__ = ()
-
+ __slots__ = ()
+
def is_reading(self):
"""Return True if the transport is receiving."""
raise NotImplementedError
@@ -72,8 +72,8 @@ class ReadTransport(BaseTransport):
class WriteTransport(BaseTransport):
"""Interface for write-only transports."""
- __slots__ = ()
-
+ __slots__ = ()
+
def set_write_buffer_limits(self, high=None, low=None):
"""Set the high- and low-water limits for write flow control.
@@ -160,14 +160,14 @@ class Transport(ReadTransport, WriteTransport):
except writelines(), which calls write() in a loop.
"""
- __slots__ = ()
-
+ __slots__ = ()
+
class DatagramTransport(BaseTransport):
"""Interface for datagram (UDP) transports."""
- __slots__ = ()
-
+ __slots__ = ()
+
def sendto(self, data, addr=None):
"""Send data to the transport.
@@ -190,8 +190,8 @@ class DatagramTransport(BaseTransport):
class SubprocessTransport(BaseTransport):
- __slots__ = ()
-
+ __slots__ = ()
+
def get_pid(self):
"""Get subprocess id."""
raise NotImplementedError
@@ -259,8 +259,8 @@ class _FlowControlMixin(Transport):
resume_writing() may be called.
"""
- __slots__ = ('_loop', '_protocol_paused', '_high_water', '_low_water')
-
+ __slots__ = ('_loop', '_protocol_paused', '_high_water', '_low_water')
+
def __init__(self, extra=None, loop=None):
super().__init__(extra)
assert loop is not None
@@ -276,9 +276,9 @@ class _FlowControlMixin(Transport):
self._protocol_paused = True
try:
self._protocol.pause_writing()
- except (SystemExit, KeyboardInterrupt):
- raise
- except BaseException as exc:
+ except (SystemExit, KeyboardInterrupt):
+ raise
+ except BaseException as exc:
self._loop.call_exception_handler({
'message': 'protocol.pause_writing() failed',
'exception': exc,
@@ -292,9 +292,9 @@ class _FlowControlMixin(Transport):
self._protocol_paused = False
try:
self._protocol.resume_writing()
- except (SystemExit, KeyboardInterrupt):
- raise
- except BaseException as exc:
+ except (SystemExit, KeyboardInterrupt):
+ raise
+ except BaseException as exc:
self._loop.call_exception_handler({
'message': 'protocol.resume_writing() failed',
'exception': exc,
diff --git a/contrib/tools/python3/src/Lib/asyncio/trsock.py b/contrib/tools/python3/src/Lib/asyncio/trsock.py
index e9ebcc3261..80ac049e85 100644
--- a/contrib/tools/python3/src/Lib/asyncio/trsock.py
+++ b/contrib/tools/python3/src/Lib/asyncio/trsock.py
@@ -1,206 +1,206 @@
-import socket
-import warnings
-
-
-class TransportSocket:
-
- """A socket-like wrapper for exposing real transport sockets.
-
- These objects can be safely returned by APIs like
- `transport.get_extra_info('socket')`. All potentially disruptive
- operations (like "socket.close()") are banned.
- """
-
- __slots__ = ('_sock',)
-
- def __init__(self, sock: socket.socket):
- self._sock = sock
-
- def _na(self, what):
- warnings.warn(
- f"Using {what} on sockets returned from get_extra_info('socket') "
- f"will be prohibited in asyncio 3.9. Please report your use case "
- f"to bugs.python.org.",
- DeprecationWarning, source=self)
-
- @property
- def family(self):
- return self._sock.family
-
- @property
- def type(self):
- return self._sock.type
-
- @property
- def proto(self):
- return self._sock.proto
-
- def __repr__(self):
- s = (
- f"<asyncio.TransportSocket fd={self.fileno()}, "
- f"family={self.family!s}, type={self.type!s}, "
- f"proto={self.proto}"
- )
-
- if self.fileno() != -1:
- try:
- laddr = self.getsockname()
- if laddr:
- s = f"{s}, laddr={laddr}"
- except socket.error:
- pass
- try:
- raddr = self.getpeername()
- if raddr:
- s = f"{s}, raddr={raddr}"
- except socket.error:
- pass
-
- return f"{s}>"
-
- def __getstate__(self):
- raise TypeError("Cannot serialize asyncio.TransportSocket object")
-
- def fileno(self):
- return self._sock.fileno()
-
- def dup(self):
- return self._sock.dup()
-
- def get_inheritable(self):
- return self._sock.get_inheritable()
-
- def shutdown(self, how):
- # asyncio doesn't currently provide a high-level transport API
- # to shutdown the connection.
- self._sock.shutdown(how)
-
- def getsockopt(self, *args, **kwargs):
- return self._sock.getsockopt(*args, **kwargs)
-
- def setsockopt(self, *args, **kwargs):
- self._sock.setsockopt(*args, **kwargs)
-
- def getpeername(self):
- return self._sock.getpeername()
-
- def getsockname(self):
- return self._sock.getsockname()
-
- def getsockbyname(self):
- return self._sock.getsockbyname()
-
- def accept(self):
- self._na('accept() method')
- return self._sock.accept()
-
- def connect(self, *args, **kwargs):
- self._na('connect() method')
- return self._sock.connect(*args, **kwargs)
-
- def connect_ex(self, *args, **kwargs):
- self._na('connect_ex() method')
- return self._sock.connect_ex(*args, **kwargs)
-
- def bind(self, *args, **kwargs):
- self._na('bind() method')
- return self._sock.bind(*args, **kwargs)
-
- def ioctl(self, *args, **kwargs):
- self._na('ioctl() method')
- return self._sock.ioctl(*args, **kwargs)
-
- def listen(self, *args, **kwargs):
- self._na('listen() method')
- return self._sock.listen(*args, **kwargs)
-
- def makefile(self):
- self._na('makefile() method')
- return self._sock.makefile()
-
- def sendfile(self, *args, **kwargs):
- self._na('sendfile() method')
- return self._sock.sendfile(*args, **kwargs)
-
- def close(self):
- self._na('close() method')
- return self._sock.close()
-
- def detach(self):
- self._na('detach() method')
- return self._sock.detach()
-
- def sendmsg_afalg(self, *args, **kwargs):
- self._na('sendmsg_afalg() method')
- return self._sock.sendmsg_afalg(*args, **kwargs)
-
- def sendmsg(self, *args, **kwargs):
- self._na('sendmsg() method')
- return self._sock.sendmsg(*args, **kwargs)
-
- def sendto(self, *args, **kwargs):
- self._na('sendto() method')
- return self._sock.sendto(*args, **kwargs)
-
- def send(self, *args, **kwargs):
- self._na('send() method')
- return self._sock.send(*args, **kwargs)
-
- def sendall(self, *args, **kwargs):
- self._na('sendall() method')
- return self._sock.sendall(*args, **kwargs)
-
- def set_inheritable(self, *args, **kwargs):
- self._na('set_inheritable() method')
- return self._sock.set_inheritable(*args, **kwargs)
-
- def share(self, process_id):
- self._na('share() method')
- return self._sock.share(process_id)
-
- def recv_into(self, *args, **kwargs):
- self._na('recv_into() method')
- return self._sock.recv_into(*args, **kwargs)
-
- def recvfrom_into(self, *args, **kwargs):
- self._na('recvfrom_into() method')
- return self._sock.recvfrom_into(*args, **kwargs)
-
- def recvmsg_into(self, *args, **kwargs):
- self._na('recvmsg_into() method')
- return self._sock.recvmsg_into(*args, **kwargs)
-
- def recvmsg(self, *args, **kwargs):
- self._na('recvmsg() method')
- return self._sock.recvmsg(*args, **kwargs)
-
- def recvfrom(self, *args, **kwargs):
- self._na('recvfrom() method')
- return self._sock.recvfrom(*args, **kwargs)
-
- def recv(self, *args, **kwargs):
- self._na('recv() method')
- return self._sock.recv(*args, **kwargs)
-
- def settimeout(self, value):
- if value == 0:
- return
- raise ValueError(
- 'settimeout(): only 0 timeout is allowed on transport sockets')
-
- def gettimeout(self):
- return 0
-
- def setblocking(self, flag):
- if not flag:
- return
- raise ValueError(
- 'setblocking(): transport sockets cannot be blocking')
-
- def __enter__(self):
- self._na('context manager protocol')
- return self._sock.__enter__()
-
- def __exit__(self, *err):
- self._na('context manager protocol')
- return self._sock.__exit__(*err)
+import socket
+import warnings
+
+
+class TransportSocket:
+
+ """A socket-like wrapper for exposing real transport sockets.
+
+ These objects can be safely returned by APIs like
+ `transport.get_extra_info('socket')`. All potentially disruptive
+ operations (like "socket.close()") are banned.
+ """
+
+ __slots__ = ('_sock',)
+
+ def __init__(self, sock: socket.socket):
+ self._sock = sock
+
+ def _na(self, what):
+ warnings.warn(
+ f"Using {what} on sockets returned from get_extra_info('socket') "
+ f"will be prohibited in asyncio 3.9. Please report your use case "
+ f"to bugs.python.org.",
+ DeprecationWarning, source=self)
+
+ @property
+ def family(self):
+ return self._sock.family
+
+ @property
+ def type(self):
+ return self._sock.type
+
+ @property
+ def proto(self):
+ return self._sock.proto
+
+ def __repr__(self):
+ s = (
+ f"<asyncio.TransportSocket fd={self.fileno()}, "
+ f"family={self.family!s}, type={self.type!s}, "
+ f"proto={self.proto}"
+ )
+
+ if self.fileno() != -1:
+ try:
+ laddr = self.getsockname()
+ if laddr:
+ s = f"{s}, laddr={laddr}"
+ except socket.error:
+ pass
+ try:
+ raddr = self.getpeername()
+ if raddr:
+ s = f"{s}, raddr={raddr}"
+ except socket.error:
+ pass
+
+ return f"{s}>"
+
+ def __getstate__(self):
+ raise TypeError("Cannot serialize asyncio.TransportSocket object")
+
+ def fileno(self):
+ return self._sock.fileno()
+
+ def dup(self):
+ return self._sock.dup()
+
+ def get_inheritable(self):
+ return self._sock.get_inheritable()
+
+ def shutdown(self, how):
+ # asyncio doesn't currently provide a high-level transport API
+ # to shutdown the connection.
+ self._sock.shutdown(how)
+
+ def getsockopt(self, *args, **kwargs):
+ return self._sock.getsockopt(*args, **kwargs)
+
+ def setsockopt(self, *args, **kwargs):
+ self._sock.setsockopt(*args, **kwargs)
+
+ def getpeername(self):
+ return self._sock.getpeername()
+
+ def getsockname(self):
+ return self._sock.getsockname()
+
+ def getsockbyname(self):
+ return self._sock.getsockbyname()
+
+ def accept(self):
+ self._na('accept() method')
+ return self._sock.accept()
+
+ def connect(self, *args, **kwargs):
+ self._na('connect() method')
+ return self._sock.connect(*args, **kwargs)
+
+ def connect_ex(self, *args, **kwargs):
+ self._na('connect_ex() method')
+ return self._sock.connect_ex(*args, **kwargs)
+
+ def bind(self, *args, **kwargs):
+ self._na('bind() method')
+ return self._sock.bind(*args, **kwargs)
+
+ def ioctl(self, *args, **kwargs):
+ self._na('ioctl() method')
+ return self._sock.ioctl(*args, **kwargs)
+
+ def listen(self, *args, **kwargs):
+ self._na('listen() method')
+ return self._sock.listen(*args, **kwargs)
+
+ def makefile(self):
+ self._na('makefile() method')
+ return self._sock.makefile()
+
+ def sendfile(self, *args, **kwargs):
+ self._na('sendfile() method')
+ return self._sock.sendfile(*args, **kwargs)
+
+ def close(self):
+ self._na('close() method')
+ return self._sock.close()
+
+ def detach(self):
+ self._na('detach() method')
+ return self._sock.detach()
+
+ def sendmsg_afalg(self, *args, **kwargs):
+ self._na('sendmsg_afalg() method')
+ return self._sock.sendmsg_afalg(*args, **kwargs)
+
+ def sendmsg(self, *args, **kwargs):
+ self._na('sendmsg() method')
+ return self._sock.sendmsg(*args, **kwargs)
+
+ def sendto(self, *args, **kwargs):
+ self._na('sendto() method')
+ return self._sock.sendto(*args, **kwargs)
+
+ def send(self, *args, **kwargs):
+ self._na('send() method')
+ return self._sock.send(*args, **kwargs)
+
+ def sendall(self, *args, **kwargs):
+ self._na('sendall() method')
+ return self._sock.sendall(*args, **kwargs)
+
+ def set_inheritable(self, *args, **kwargs):
+ self._na('set_inheritable() method')
+ return self._sock.set_inheritable(*args, **kwargs)
+
+ def share(self, process_id):
+ self._na('share() method')
+ return self._sock.share(process_id)
+
+ def recv_into(self, *args, **kwargs):
+ self._na('recv_into() method')
+ return self._sock.recv_into(*args, **kwargs)
+
+ def recvfrom_into(self, *args, **kwargs):
+ self._na('recvfrom_into() method')
+ return self._sock.recvfrom_into(*args, **kwargs)
+
+ def recvmsg_into(self, *args, **kwargs):
+ self._na('recvmsg_into() method')
+ return self._sock.recvmsg_into(*args, **kwargs)
+
+ def recvmsg(self, *args, **kwargs):
+ self._na('recvmsg() method')
+ return self._sock.recvmsg(*args, **kwargs)
+
+ def recvfrom(self, *args, **kwargs):
+ self._na('recvfrom() method')
+ return self._sock.recvfrom(*args, **kwargs)
+
+ def recv(self, *args, **kwargs):
+ self._na('recv() method')
+ return self._sock.recv(*args, **kwargs)
+
+ def settimeout(self, value):
+ if value == 0:
+ return
+ raise ValueError(
+ 'settimeout(): only 0 timeout is allowed on transport sockets')
+
+ def gettimeout(self):
+ return 0
+
+ def setblocking(self, flag):
+ if not flag:
+ return
+ raise ValueError(
+ 'setblocking(): transport sockets cannot be blocking')
+
+ def __enter__(self):
+ self._na('context manager protocol')
+ return self._sock.__enter__()
+
+ def __exit__(self, *err):
+ self._na('context manager protocol')
+ return self._sock.__exit__(*err)
diff --git a/contrib/tools/python3/src/Lib/asyncio/unix_events.py b/contrib/tools/python3/src/Lib/asyncio/unix_events.py
index eecbc101ee..b553e2010b 100644
--- a/contrib/tools/python3/src/Lib/asyncio/unix_events.py
+++ b/contrib/tools/python3/src/Lib/asyncio/unix_events.py
@@ -2,7 +2,7 @@
import errno
import io
-import itertools
+import itertools
import os
import selectors
import signal
@@ -18,7 +18,7 @@ from . import base_subprocess
from . import constants
from . import coroutines
from . import events
-from . import exceptions
+from . import exceptions
from . import futures
from . import selector_events
from . import tasks
@@ -29,9 +29,9 @@ from .log import logger
__all__ = (
'SelectorEventLoop',
'AbstractChildWatcher', 'SafeChildWatcher',
- 'FastChildWatcher', 'PidfdChildWatcher',
- 'MultiLoopChildWatcher', 'ThreadedChildWatcher',
- 'DefaultEventLoopPolicy',
+ 'FastChildWatcher', 'PidfdChildWatcher',
+ 'MultiLoopChildWatcher', 'ThreadedChildWatcher',
+ 'DefaultEventLoopPolicy',
)
@@ -101,7 +101,7 @@ class _UnixSelectorEventLoop(selector_events.BaseSelectorEventLoop):
try:
# Register a dummy signal handler to ask Python to write the signal
- # number in the wakeup file descriptor. _process_self_data() will
+ # number in the wakeup file descriptor. _process_self_data() will
# read signal numbers from this file descriptor to handle signals.
signal.signal(sig, _sighandler_noop)
@@ -171,8 +171,8 @@ class _UnixSelectorEventLoop(selector_events.BaseSelectorEventLoop):
if not isinstance(sig, int):
raise TypeError(f'sig must be an int, not {sig!r}')
- if sig not in signal.valid_signals():
- raise ValueError(f'invalid signal number {sig}')
+ if sig not in signal.valid_signals():
+ raise ValueError(f'invalid signal number {sig}')
def _make_read_pipe_transport(self, pipe, protocol, waiter=None,
extra=None):
@@ -186,13 +186,13 @@ class _UnixSelectorEventLoop(selector_events.BaseSelectorEventLoop):
stdin, stdout, stderr, bufsize,
extra=None, **kwargs):
with events.get_child_watcher() as watcher:
- if not watcher.is_active():
- # Check early.
- # Raising exception before process creation
- # prevents subprocess execution if the watcher
- # is not ready to handle it.
- raise RuntimeError("asyncio.get_child_watcher() is not activated, "
- "subprocess support is not installed.")
+ if not watcher.is_active():
+ # Check early.
+ # Raising exception before process creation
+ # prevents subprocess execution if the watcher
+ # is not ready to handle it.
+ raise RuntimeError("asyncio.get_child_watcher() is not activated, "
+ "subprocess support is not installed.")
waiter = self.create_future()
transp = _UnixSubprocessTransport(self, protocol, args, shell,
stdin, stdout, stderr, bufsize,
@@ -203,9 +203,9 @@ class _UnixSelectorEventLoop(selector_events.BaseSelectorEventLoop):
self._child_watcher_callback, transp)
try:
await waiter
- except (SystemExit, KeyboardInterrupt):
- raise
- except BaseException:
+ except (SystemExit, KeyboardInterrupt):
+ raise
+ except BaseException:
transp.close()
await transp._wait()
raise
@@ -323,24 +323,24 @@ class _UnixSelectorEventLoop(selector_events.BaseSelectorEventLoop):
server._start_serving()
# Skip one loop iteration so that all 'loop.add_reader'
# go through.
- await tasks.sleep(0)
+ await tasks.sleep(0)
return server
async def _sock_sendfile_native(self, sock, file, offset, count):
try:
os.sendfile
- except AttributeError:
- raise exceptions.SendfileNotAvailableError(
+ except AttributeError:
+ raise exceptions.SendfileNotAvailableError(
"os.sendfile() is not available")
try:
fileno = file.fileno()
except (AttributeError, io.UnsupportedOperation) as err:
- raise exceptions.SendfileNotAvailableError("not a regular file")
+ raise exceptions.SendfileNotAvailableError("not a regular file")
try:
fsize = os.fstat(fileno).st_size
- except OSError:
- raise exceptions.SendfileNotAvailableError("not a regular file")
+ except OSError:
+ raise exceptions.SendfileNotAvailableError("not a regular file")
blocksize = count if count else fsize
if not blocksize:
return 0 # empty file
@@ -394,16 +394,16 @@ class _UnixSelectorEventLoop(selector_events.BaseSelectorEventLoop):
# one being 'file' is not a regular mmap(2)-like
# file, in which case we'll fall back on using
# plain send().
- err = exceptions.SendfileNotAvailableError(
+ err = exceptions.SendfileNotAvailableError(
"os.sendfile call failed")
self._sock_sendfile_update_filepos(fileno, offset, total_sent)
fut.set_exception(err)
else:
self._sock_sendfile_update_filepos(fileno, offset, total_sent)
fut.set_exception(exc)
- except (SystemExit, KeyboardInterrupt):
- raise
- except BaseException as exc:
+ except (SystemExit, KeyboardInterrupt):
+ raise
+ except BaseException as exc:
self._sock_sendfile_update_filepos(fileno, offset, total_sent)
fut.set_exception(exc)
else:
@@ -445,7 +445,7 @@ class _UnixReadPipeTransport(transports.ReadTransport):
self._fileno = pipe.fileno()
self._protocol = protocol
self._closing = False
- self._paused = False
+ self._paused = False
mode = os.fstat(self._fileno).st_mode
if not (stat.S_ISFIFO(mode) or
@@ -507,20 +507,20 @@ class _UnixReadPipeTransport(transports.ReadTransport):
self._loop.call_soon(self._call_connection_lost, None)
def pause_reading(self):
- if self._closing or self._paused:
- return
- self._paused = True
+ if self._closing or self._paused:
+ return
+ self._paused = True
self._loop._remove_reader(self._fileno)
- if self._loop.get_debug():
- logger.debug("%r pauses reading", self)
+ if self._loop.get_debug():
+ logger.debug("%r pauses reading", self)
def resume_reading(self):
- if self._closing or not self._paused:
- return
- self._paused = False
+ if self._closing or not self._paused:
+ return
+ self._paused = False
self._loop._add_reader(self._fileno, self._read_ready)
- if self._loop.get_debug():
- logger.debug("%r resumes reading", self)
+ if self._loop.get_debug():
+ logger.debug("%r resumes reading", self)
def set_protocol(self, protocol):
self._protocol = protocol
@@ -535,9 +535,9 @@ class _UnixReadPipeTransport(transports.ReadTransport):
if not self._closing:
self._close(None)
- def __del__(self, _warn=warnings.warn):
+ def __del__(self, _warn=warnings.warn):
if self._pipe is not None:
- _warn(f"unclosed transport {self!r}", ResourceWarning, source=self)
+ _warn(f"unclosed transport {self!r}", ResourceWarning, source=self)
self._pipe.close()
def _fatal_error(self, exc, message='Fatal error on pipe transport'):
@@ -665,9 +665,9 @@ class _UnixWritePipeTransport(transports._FlowControlMixin,
n = os.write(self._fileno, data)
except (BlockingIOError, InterruptedError):
n = 0
- except (SystemExit, KeyboardInterrupt):
- raise
- except BaseException as exc:
+ except (SystemExit, KeyboardInterrupt):
+ raise
+ except BaseException as exc:
self._conn_lost += 1
self._fatal_error(exc, 'Fatal write error on pipe transport')
return
@@ -687,9 +687,9 @@ class _UnixWritePipeTransport(transports._FlowControlMixin,
n = os.write(self._fileno, self._buffer)
except (BlockingIOError, InterruptedError):
pass
- except (SystemExit, KeyboardInterrupt):
- raise
- except BaseException as exc:
+ except (SystemExit, KeyboardInterrupt):
+ raise
+ except BaseException as exc:
self._buffer.clear()
self._conn_lost += 1
# Remove writer here, _fatal_error() doesn't it
@@ -734,9 +734,9 @@ class _UnixWritePipeTransport(transports._FlowControlMixin,
# write_eof is all what we needed to close the write pipe
self.write_eof()
- def __del__(self, _warn=warnings.warn):
+ def __del__(self, _warn=warnings.warn):
if self._pipe is not None:
- _warn(f"unclosed transport {self!r}", ResourceWarning, source=self)
+ _warn(f"unclosed transport {self!r}", ResourceWarning, source=self)
self._pipe.close()
def abort(self):
@@ -744,7 +744,7 @@ class _UnixWritePipeTransport(transports._FlowControlMixin,
def _fatal_error(self, exc, message='Fatal error on pipe transport'):
# should be called by exception handler only
- if isinstance(exc, OSError):
+ if isinstance(exc, OSError):
if self._loop.get_debug():
logger.debug("%r: %s", self, message, exc_info=True)
else:
@@ -785,18 +785,18 @@ class _UnixSubprocessTransport(base_subprocess.BaseSubprocessTransport):
# other end). Notably this is needed on AIX, and works
# just fine on other platforms.
stdin, stdin_w = socket.socketpair()
- try:
- self._proc = subprocess.Popen(
- args, shell=shell, stdin=stdin, stdout=stdout, stderr=stderr,
- universal_newlines=False, bufsize=bufsize, **kwargs)
- if stdin_w is not None:
- stdin.close()
- self._proc.stdin = open(stdin_w.detach(), 'wb', buffering=bufsize)
- stdin_w = None
- finally:
- if stdin_w is not None:
- stdin.close()
- stdin_w.close()
+ try:
+ self._proc = subprocess.Popen(
+ args, shell=shell, stdin=stdin, stdout=stdout, stderr=stderr,
+ universal_newlines=False, bufsize=bufsize, **kwargs)
+ if stdin_w is not None:
+ stdin.close()
+ self._proc.stdin = open(stdin_w.detach(), 'wb', buffering=bufsize)
+ stdin_w = None
+ finally:
+ if stdin_w is not None:
+ stdin.close()
+ stdin_w.close()
class AbstractChildWatcher:
@@ -858,15 +858,15 @@ class AbstractChildWatcher:
"""
raise NotImplementedError()
- def is_active(self):
- """Return ``True`` if the watcher is active and is used by the event loop.
-
- Return True if the watcher is installed and ready to handle process exit
- notifications.
-
- """
- raise NotImplementedError()
-
+ def is_active(self):
+ """Return ``True`` if the watcher is active and is used by the event loop.
+
+ Return True if the watcher is installed and ready to handle process exit
+ notifications.
+
+ """
+ raise NotImplementedError()
+
def __enter__(self):
"""Enter the watcher's context and allow starting new processes
@@ -878,98 +878,98 @@ class AbstractChildWatcher:
raise NotImplementedError()
-class PidfdChildWatcher(AbstractChildWatcher):
- """Child watcher implementation using Linux's pid file descriptors.
-
- This child watcher polls process file descriptors (pidfds) to await child
- process termination. In some respects, PidfdChildWatcher is a "Goldilocks"
- child watcher implementation. It doesn't require signals or threads, doesn't
- interfere with any processes launched outside the event loop, and scales
- linearly with the number of subprocesses launched by the event loop. The
- main disadvantage is that pidfds are specific to Linux, and only work on
- recent (5.3+) kernels.
- """
-
- def __init__(self):
- self._loop = None
- self._callbacks = {}
-
- def __enter__(self):
- return self
-
- def __exit__(self, exc_type, exc_value, exc_traceback):
- pass
-
- def is_active(self):
- return self._loop is not None and self._loop.is_running()
-
- def close(self):
- self.attach_loop(None)
-
- def attach_loop(self, loop):
- if self._loop is not None and loop is None and self._callbacks:
- warnings.warn(
- 'A loop is being detached '
- 'from a child watcher with pending handlers',
- RuntimeWarning)
- for pidfd, _, _ in self._callbacks.values():
- self._loop._remove_reader(pidfd)
- os.close(pidfd)
- self._callbacks.clear()
- self._loop = loop
-
- def add_child_handler(self, pid, callback, *args):
- existing = self._callbacks.get(pid)
- if existing is not None:
- self._callbacks[pid] = existing[0], callback, args
- else:
- pidfd = os.pidfd_open(pid)
- self._loop._add_reader(pidfd, self._do_wait, pid)
- self._callbacks[pid] = pidfd, callback, args
-
- def _do_wait(self, pid):
- pidfd, callback, args = self._callbacks.pop(pid)
- self._loop._remove_reader(pidfd)
- try:
- _, status = os.waitpid(pid, 0)
- except ChildProcessError:
- # The child process is already reaped
- # (may happen if waitpid() is called elsewhere).
- returncode = 255
- logger.warning(
- "child process pid %d exit status already read: "
- " will report returncode 255",
- pid)
- else:
- returncode = _compute_returncode(status)
-
- os.close(pidfd)
- callback(pid, returncode, *args)
-
- def remove_child_handler(self, pid):
- try:
- pidfd, _, _ = self._callbacks.pop(pid)
- except KeyError:
- return False
- self._loop._remove_reader(pidfd)
- os.close(pidfd)
- return True
-
-
-def _compute_returncode(status):
- if os.WIFSIGNALED(status):
- # The child process died because of a signal.
- return -os.WTERMSIG(status)
- elif os.WIFEXITED(status):
- # The child process exited (e.g sys.exit()).
- return os.WEXITSTATUS(status)
- else:
- # The child exited, but we don't understand its status.
- # This shouldn't happen, but if it does, let's just
- # return that status; perhaps that helps debug it.
- return status
-
-
+class PidfdChildWatcher(AbstractChildWatcher):
+ """Child watcher implementation using Linux's pid file descriptors.
+
+ This child watcher polls process file descriptors (pidfds) to await child
+ process termination. In some respects, PidfdChildWatcher is a "Goldilocks"
+ child watcher implementation. It doesn't require signals or threads, doesn't
+ interfere with any processes launched outside the event loop, and scales
+ linearly with the number of subprocesses launched by the event loop. The
+ main disadvantage is that pidfds are specific to Linux, and only work on
+ recent (5.3+) kernels.
+ """
+
+ def __init__(self):
+ self._loop = None
+ self._callbacks = {}
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_value, exc_traceback):
+ pass
+
+ def is_active(self):
+ return self._loop is not None and self._loop.is_running()
+
+ def close(self):
+ self.attach_loop(None)
+
+ def attach_loop(self, loop):
+ if self._loop is not None and loop is None and self._callbacks:
+ warnings.warn(
+ 'A loop is being detached '
+ 'from a child watcher with pending handlers',
+ RuntimeWarning)
+ for pidfd, _, _ in self._callbacks.values():
+ self._loop._remove_reader(pidfd)
+ os.close(pidfd)
+ self._callbacks.clear()
+ self._loop = loop
+
+ def add_child_handler(self, pid, callback, *args):
+ existing = self._callbacks.get(pid)
+ if existing is not None:
+ self._callbacks[pid] = existing[0], callback, args
+ else:
+ pidfd = os.pidfd_open(pid)
+ self._loop._add_reader(pidfd, self._do_wait, pid)
+ self._callbacks[pid] = pidfd, callback, args
+
+ def _do_wait(self, pid):
+ pidfd, callback, args = self._callbacks.pop(pid)
+ self._loop._remove_reader(pidfd)
+ try:
+ _, status = os.waitpid(pid, 0)
+ except ChildProcessError:
+ # The child process is already reaped
+ # (may happen if waitpid() is called elsewhere).
+ returncode = 255
+ logger.warning(
+ "child process pid %d exit status already read: "
+ " will report returncode 255",
+ pid)
+ else:
+ returncode = _compute_returncode(status)
+
+ os.close(pidfd)
+ callback(pid, returncode, *args)
+
+ def remove_child_handler(self, pid):
+ try:
+ pidfd, _, _ = self._callbacks.pop(pid)
+ except KeyError:
+ return False
+ self._loop._remove_reader(pidfd)
+ os.close(pidfd)
+ return True
+
+
+def _compute_returncode(status):
+ if os.WIFSIGNALED(status):
+ # The child process died because of a signal.
+ return -os.WTERMSIG(status)
+ elif os.WIFEXITED(status):
+ # The child process exited (e.g sys.exit()).
+ return os.WEXITSTATUS(status)
+ else:
+ # The child exited, but we don't understand its status.
+ # This shouldn't happen, but if it does, let's just
+ # return that status; perhaps that helps debug it.
+ return status
+
+
class BaseChildWatcher(AbstractChildWatcher):
def __init__(self):
@@ -979,9 +979,9 @@ class BaseChildWatcher(AbstractChildWatcher):
def close(self):
self.attach_loop(None)
- def is_active(self):
- return self._loop is not None and self._loop.is_running()
-
+ def is_active(self):
+ return self._loop is not None and self._loop.is_running()
+
def _do_waitpid(self, expected_pid):
raise NotImplementedError()
@@ -1011,9 +1011,9 @@ class BaseChildWatcher(AbstractChildWatcher):
def _sig_chld(self):
try:
self._do_waitpid_all()
- except (SystemExit, KeyboardInterrupt):
- raise
- except BaseException as exc:
+ except (SystemExit, KeyboardInterrupt):
+ raise
+ except BaseException as exc:
# self._loop should always be available here
# as '_sig_chld' is added as a signal handler
# in 'attach_loop'
@@ -1080,7 +1080,7 @@ class SafeChildWatcher(BaseChildWatcher):
# The child process is still alive.
return
- returncode = _compute_returncode(status)
+ returncode = _compute_returncode(status)
if self._loop.get_debug():
logger.debug('process %s exited with returncode %s',
expected_pid, returncode)
@@ -1173,7 +1173,7 @@ class FastChildWatcher(BaseChildWatcher):
# A child process is still alive.
return
- returncode = _compute_returncode(status)
+ returncode = _compute_returncode(status)
with self._lock:
try:
@@ -1202,220 +1202,220 @@ class FastChildWatcher(BaseChildWatcher):
callback(pid, returncode, *args)
-class MultiLoopChildWatcher(AbstractChildWatcher):
- """A watcher that doesn't require running loop in the main thread.
-
- This implementation registers a SIGCHLD signal handler on
- instantiation (which may conflict with other code that
- install own handler for this signal).
-
- The solution is safe but it has a significant overhead when
- handling a big number of processes (*O(n)* each time a
- SIGCHLD is received).
- """
-
- # Implementation note:
- # The class keeps compatibility with AbstractChildWatcher ABC
- # To achieve this it has empty attach_loop() method
- # and doesn't accept explicit loop argument
- # for add_child_handler()/remove_child_handler()
- # but retrieves the current loop by get_running_loop()
-
- def __init__(self):
- self._callbacks = {}
- self._saved_sighandler = None
-
- def is_active(self):
- return self._saved_sighandler is not None
-
- def close(self):
- self._callbacks.clear()
- if self._saved_sighandler is None:
- return
-
- handler = signal.getsignal(signal.SIGCHLD)
- if handler != self._sig_chld:
- logger.warning("SIGCHLD handler was changed by outside code")
- else:
- signal.signal(signal.SIGCHLD, self._saved_sighandler)
- self._saved_sighandler = None
-
- def __enter__(self):
- return self
-
- def __exit__(self, exc_type, exc_val, exc_tb):
- pass
-
- def add_child_handler(self, pid, callback, *args):
- loop = events.get_running_loop()
- self._callbacks[pid] = (loop, callback, args)
-
- # Prevent a race condition in case the child is already terminated.
- self._do_waitpid(pid)
-
- def remove_child_handler(self, pid):
- try:
- del self._callbacks[pid]
- return True
- except KeyError:
- return False
-
- def attach_loop(self, loop):
- # Don't save the loop but initialize itself if called first time
- # The reason to do it here is that attach_loop() is called from
- # unix policy only for the main thread.
- # Main thread is required for subscription on SIGCHLD signal
- if self._saved_sighandler is not None:
- return
-
- self._saved_sighandler = signal.signal(signal.SIGCHLD, self._sig_chld)
- if self._saved_sighandler is None:
- logger.warning("Previous SIGCHLD handler was set by non-Python code, "
- "restore to default handler on watcher close.")
- self._saved_sighandler = signal.SIG_DFL
-
- # Set SA_RESTART to limit EINTR occurrences.
- signal.siginterrupt(signal.SIGCHLD, False)
-
- def _do_waitpid_all(self):
- for pid in list(self._callbacks):
- self._do_waitpid(pid)
-
- def _do_waitpid(self, expected_pid):
- assert expected_pid > 0
-
- try:
- pid, status = os.waitpid(expected_pid, os.WNOHANG)
- except ChildProcessError:
- # The child process is already reaped
- # (may happen if waitpid() is called elsewhere).
- pid = expected_pid
- returncode = 255
- logger.warning(
- "Unknown child process pid %d, will report returncode 255",
- pid)
- debug_log = False
- else:
- if pid == 0:
- # The child process is still alive.
- return
-
- returncode = _compute_returncode(status)
- debug_log = True
- try:
- loop, callback, args = self._callbacks.pop(pid)
- except KeyError: # pragma: no cover
- # May happen if .remove_child_handler() is called
- # after os.waitpid() returns.
- logger.warning("Child watcher got an unexpected pid: %r",
- pid, exc_info=True)
- else:
- if loop.is_closed():
- logger.warning("Loop %r that handles pid %r is closed", loop, pid)
- else:
- if debug_log and loop.get_debug():
- logger.debug('process %s exited with returncode %s',
- expected_pid, returncode)
- loop.call_soon_threadsafe(callback, pid, returncode, *args)
-
- def _sig_chld(self, signum, frame):
- try:
- self._do_waitpid_all()
- except (SystemExit, KeyboardInterrupt):
- raise
- except BaseException:
- logger.warning('Unknown exception in SIGCHLD handler', exc_info=True)
-
-
-class ThreadedChildWatcher(AbstractChildWatcher):
- """Threaded child watcher implementation.
-
- The watcher uses a thread per process
- for waiting for the process finish.
-
- It doesn't require subscription on POSIX signal
- but a thread creation is not free.
-
- The watcher has O(1) complexity, its performance doesn't depend
- on amount of spawn processes.
- """
-
- def __init__(self):
- self._pid_counter = itertools.count(0)
- self._threads = {}
-
- def is_active(self):
- return True
-
- def close(self):
- self._join_threads()
-
- def _join_threads(self):
- """Internal: Join all non-daemon threads"""
- threads = [thread for thread in list(self._threads.values())
- if thread.is_alive() and not thread.daemon]
- for thread in threads:
- thread.join()
-
- def __enter__(self):
- return self
-
- def __exit__(self, exc_type, exc_val, exc_tb):
- pass
-
- def __del__(self, _warn=warnings.warn):
- threads = [thread for thread in list(self._threads.values())
- if thread.is_alive()]
- if threads:
- _warn(f"{self.__class__} has registered but not finished child processes",
- ResourceWarning,
- source=self)
-
- def add_child_handler(self, pid, callback, *args):
- loop = events.get_running_loop()
- thread = threading.Thread(target=self._do_waitpid,
- name=f"waitpid-{next(self._pid_counter)}",
- args=(loop, pid, callback, args),
- daemon=True)
- self._threads[pid] = thread
- thread.start()
-
- def remove_child_handler(self, pid):
- # asyncio never calls remove_child_handler() !!!
- # The method is no-op but is implemented because
- # abstract base classes require it.
- return True
-
- def attach_loop(self, loop):
- pass
-
- def _do_waitpid(self, loop, expected_pid, callback, args):
- assert expected_pid > 0
-
- try:
- pid, status = os.waitpid(expected_pid, 0)
- except ChildProcessError:
- # The child process is already reaped
- # (may happen if waitpid() is called elsewhere).
- pid = expected_pid
- returncode = 255
- logger.warning(
- "Unknown child process pid %d, will report returncode 255",
- pid)
- else:
- returncode = _compute_returncode(status)
- if loop.get_debug():
- logger.debug('process %s exited with returncode %s',
- expected_pid, returncode)
-
- if loop.is_closed():
- logger.warning("Loop %r that handles pid %r is closed", loop, pid)
- else:
- loop.call_soon_threadsafe(callback, pid, returncode, *args)
-
- self._threads.pop(expected_pid)
-
-
+class MultiLoopChildWatcher(AbstractChildWatcher):
+ """A watcher that doesn't require running loop in the main thread.
+
+ This implementation registers a SIGCHLD signal handler on
+ instantiation (which may conflict with other code that
+ install own handler for this signal).
+
+ The solution is safe but it has a significant overhead when
+ handling a big number of processes (*O(n)* each time a
+ SIGCHLD is received).
+ """
+
+ # Implementation note:
+ # The class keeps compatibility with AbstractChildWatcher ABC
+ # To achieve this it has empty attach_loop() method
+ # and doesn't accept explicit loop argument
+ # for add_child_handler()/remove_child_handler()
+ # but retrieves the current loop by get_running_loop()
+
+ def __init__(self):
+ self._callbacks = {}
+ self._saved_sighandler = None
+
+ def is_active(self):
+ return self._saved_sighandler is not None
+
+ def close(self):
+ self._callbacks.clear()
+ if self._saved_sighandler is None:
+ return
+
+ handler = signal.getsignal(signal.SIGCHLD)
+ if handler != self._sig_chld:
+ logger.warning("SIGCHLD handler was changed by outside code")
+ else:
+ signal.signal(signal.SIGCHLD, self._saved_sighandler)
+ self._saved_sighandler = None
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ pass
+
+ def add_child_handler(self, pid, callback, *args):
+ loop = events.get_running_loop()
+ self._callbacks[pid] = (loop, callback, args)
+
+ # Prevent a race condition in case the child is already terminated.
+ self._do_waitpid(pid)
+
+ def remove_child_handler(self, pid):
+ try:
+ del self._callbacks[pid]
+ return True
+ except KeyError:
+ return False
+
+ def attach_loop(self, loop):
+ # Don't save the loop but initialize itself if called first time
+ # The reason to do it here is that attach_loop() is called from
+ # unix policy only for the main thread.
+ # Main thread is required for subscription on SIGCHLD signal
+ if self._saved_sighandler is not None:
+ return
+
+ self._saved_sighandler = signal.signal(signal.SIGCHLD, self._sig_chld)
+ if self._saved_sighandler is None:
+ logger.warning("Previous SIGCHLD handler was set by non-Python code, "
+ "restore to default handler on watcher close.")
+ self._saved_sighandler = signal.SIG_DFL
+
+ # Set SA_RESTART to limit EINTR occurrences.
+ signal.siginterrupt(signal.SIGCHLD, False)
+
+ def _do_waitpid_all(self):
+ for pid in list(self._callbacks):
+ self._do_waitpid(pid)
+
+ def _do_waitpid(self, expected_pid):
+ assert expected_pid > 0
+
+ try:
+ pid, status = os.waitpid(expected_pid, os.WNOHANG)
+ except ChildProcessError:
+ # The child process is already reaped
+ # (may happen if waitpid() is called elsewhere).
+ pid = expected_pid
+ returncode = 255
+ logger.warning(
+ "Unknown child process pid %d, will report returncode 255",
+ pid)
+ debug_log = False
+ else:
+ if pid == 0:
+ # The child process is still alive.
+ return
+
+ returncode = _compute_returncode(status)
+ debug_log = True
+ try:
+ loop, callback, args = self._callbacks.pop(pid)
+ except KeyError: # pragma: no cover
+ # May happen if .remove_child_handler() is called
+ # after os.waitpid() returns.
+ logger.warning("Child watcher got an unexpected pid: %r",
+ pid, exc_info=True)
+ else:
+ if loop.is_closed():
+ logger.warning("Loop %r that handles pid %r is closed", loop, pid)
+ else:
+ if debug_log and loop.get_debug():
+ logger.debug('process %s exited with returncode %s',
+ expected_pid, returncode)
+ loop.call_soon_threadsafe(callback, pid, returncode, *args)
+
+ def _sig_chld(self, signum, frame):
+ try:
+ self._do_waitpid_all()
+ except (SystemExit, KeyboardInterrupt):
+ raise
+ except BaseException:
+ logger.warning('Unknown exception in SIGCHLD handler', exc_info=True)
+
+
+class ThreadedChildWatcher(AbstractChildWatcher):
+ """Threaded child watcher implementation.
+
+ The watcher uses a thread per process
+ for waiting for the process finish.
+
+ It doesn't require subscription on POSIX signal
+ but a thread creation is not free.
+
+ The watcher has O(1) complexity, its performance doesn't depend
+ on amount of spawn processes.
+ """
+
+ def __init__(self):
+ self._pid_counter = itertools.count(0)
+ self._threads = {}
+
+ def is_active(self):
+ return True
+
+ def close(self):
+ self._join_threads()
+
+ def _join_threads(self):
+ """Internal: Join all non-daemon threads"""
+ threads = [thread for thread in list(self._threads.values())
+ if thread.is_alive() and not thread.daemon]
+ for thread in threads:
+ thread.join()
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ pass
+
+ def __del__(self, _warn=warnings.warn):
+ threads = [thread for thread in list(self._threads.values())
+ if thread.is_alive()]
+ if threads:
+ _warn(f"{self.__class__} has registered but not finished child processes",
+ ResourceWarning,
+ source=self)
+
+ def add_child_handler(self, pid, callback, *args):
+ loop = events.get_running_loop()
+ thread = threading.Thread(target=self._do_waitpid,
+ name=f"waitpid-{next(self._pid_counter)}",
+ args=(loop, pid, callback, args),
+ daemon=True)
+ self._threads[pid] = thread
+ thread.start()
+
+ def remove_child_handler(self, pid):
+ # asyncio never calls remove_child_handler() !!!
+ # The method is no-op but is implemented because
+ # abstract base classes require it.
+ return True
+
+ def attach_loop(self, loop):
+ pass
+
+ def _do_waitpid(self, loop, expected_pid, callback, args):
+ assert expected_pid > 0
+
+ try:
+ pid, status = os.waitpid(expected_pid, 0)
+ except ChildProcessError:
+ # The child process is already reaped
+ # (may happen if waitpid() is called elsewhere).
+ pid = expected_pid
+ returncode = 255
+ logger.warning(
+ "Unknown child process pid %d, will report returncode 255",
+ pid)
+ else:
+ returncode = _compute_returncode(status)
+ if loop.get_debug():
+ logger.debug('process %s exited with returncode %s',
+ expected_pid, returncode)
+
+ if loop.is_closed():
+ logger.warning("Loop %r that handles pid %r is closed", loop, pid)
+ else:
+ loop.call_soon_threadsafe(callback, pid, returncode, *args)
+
+ self._threads.pop(expected_pid)
+
+
class _UnixDefaultEventLoopPolicy(events.BaseDefaultEventLoopPolicy):
"""UNIX event loop policy with a watcher for child processes."""
_loop_factory = _UnixSelectorEventLoop
@@ -1427,8 +1427,8 @@ class _UnixDefaultEventLoopPolicy(events.BaseDefaultEventLoopPolicy):
def _init_watcher(self):
with events._lock:
if self._watcher is None: # pragma: no branch
- self._watcher = ThreadedChildWatcher()
- if threading.current_thread() is threading.main_thread():
+ self._watcher = ThreadedChildWatcher()
+ if threading.current_thread() is threading.main_thread():
self._watcher.attach_loop(self._local._loop)
def set_event_loop(self, loop):
@@ -1442,13 +1442,13 @@ class _UnixDefaultEventLoopPolicy(events.BaseDefaultEventLoopPolicy):
super().set_event_loop(loop)
if (self._watcher is not None and
- threading.current_thread() is threading.main_thread()):
+ threading.current_thread() is threading.main_thread()):
self._watcher.attach_loop(loop)
def get_child_watcher(self):
"""Get the watcher for child processes.
- If not yet set, a ThreadedChildWatcher object is automatically created.
+ If not yet set, a ThreadedChildWatcher object is automatically created.
"""
if self._watcher is None:
self._init_watcher()
diff --git a/contrib/tools/python3/src/Lib/asyncio/windows_events.py b/contrib/tools/python3/src/Lib/asyncio/windows_events.py
index da81ab435b..bf9e43b7a0 100644
--- a/contrib/tools/python3/src/Lib/asyncio/windows_events.py
+++ b/contrib/tools/python3/src/Lib/asyncio/windows_events.py
@@ -1,10 +1,10 @@
"""Selector and proactor event loops for Windows."""
-import sys
-
-if sys.platform != 'win32': # pragma: no cover
- raise ImportError('win32 only')
-
+import sys
+
+if sys.platform != 'win32': # pragma: no cover
+ raise ImportError('win32 only')
+
import _overlapped
import _winapi
import errno
@@ -18,7 +18,7 @@ import weakref
from . import events
from . import base_subprocess
from . import futures
-from . import exceptions
+from . import exceptions
from . import proactor_events
from . import selector_events
from . import tasks
@@ -80,9 +80,9 @@ class _OverlappedFuture(futures.Future):
self._loop.call_exception_handler(context)
self._ov = None
- def cancel(self, msg=None):
+ def cancel(self, msg=None):
self._cancel_overlapped()
- return super().cancel(msg=msg)
+ return super().cancel(msg=msg)
def set_exception(self, exception):
super().set_exception(exception)
@@ -154,9 +154,9 @@ class _BaseWaitHandleFuture(futures.Future):
self._unregister_wait_cb(None)
- def cancel(self, msg=None):
+ def cancel(self, msg=None):
self._unregister_wait()
- return super().cancel(msg=msg)
+ return super().cancel(msg=msg)
def set_exception(self, exception):
self._unregister_wait()
@@ -314,25 +314,25 @@ class ProactorEventLoop(proactor_events.BaseProactorEventLoop):
proactor = IocpProactor()
super().__init__(proactor)
- def run_forever(self):
- try:
- assert self._self_reading_future is None
- self.call_soon(self._loop_self_reading)
- super().run_forever()
- finally:
- if self._self_reading_future is not None:
- ov = self._self_reading_future._ov
- self._self_reading_future.cancel()
- # self_reading_future was just cancelled so if it hasn't been
- # finished yet, it never will be (it's possible that it has
- # already finished and its callback is waiting in the queue,
- # where it could still happen if the event loop is restarted).
- # Unregister it otherwise IocpProactor.close will wait for it
- # forever
- if ov is not None:
- self._proactor._unregister(ov)
- self._self_reading_future = None
-
+ def run_forever(self):
+ try:
+ assert self._self_reading_future is None
+ self.call_soon(self._loop_self_reading)
+ super().run_forever()
+ finally:
+ if self._self_reading_future is not None:
+ ov = self._self_reading_future._ov
+ self._self_reading_future.cancel()
+ # self_reading_future was just cancelled so if it hasn't been
+ # finished yet, it never will be (it's possible that it has
+ # already finished and its callback is waiting in the queue,
+ # where it could still happen if the event loop is restarted).
+ # Unregister it otherwise IocpProactor.close will wait for it
+ # forever
+ if ov is not None:
+ self._proactor._unregister(ov)
+ self._self_reading_future = None
+
async def create_pipe_connection(self, protocol_factory, address):
f = self._proactor.connect_pipe(address)
pipe = await f
@@ -377,7 +377,7 @@ class ProactorEventLoop(proactor_events.BaseProactorEventLoop):
elif self._debug:
logger.warning("Accept pipe failed on pipe %r",
pipe, exc_info=True)
- except exceptions.CancelledError:
+ except exceptions.CancelledError:
if pipe:
pipe.close()
else:
@@ -397,9 +397,9 @@ class ProactorEventLoop(proactor_events.BaseProactorEventLoop):
**kwargs)
try:
await waiter
- except (SystemExit, KeyboardInterrupt):
- raise
- except BaseException:
+ except (SystemExit, KeyboardInterrupt):
+ raise
+ except BaseException:
transp.close()
await transp._wait()
raise
@@ -478,7 +478,7 @@ class IocpProactor:
else:
ov.ReadFileInto(conn.fileno(), buf)
except BrokenPipeError:
- return self._result(0)
+ return self._result(0)
def finish_recv(trans, key, ov):
try:
@@ -492,44 +492,44 @@ class IocpProactor:
return self._register(ov, conn, finish_recv)
- def recvfrom(self, conn, nbytes, flags=0):
- self._register_with_iocp(conn)
- ov = _overlapped.Overlapped(NULL)
- try:
- ov.WSARecvFrom(conn.fileno(), nbytes, flags)
- except BrokenPipeError:
- return self._result((b'', None))
-
- def finish_recv(trans, key, ov):
- try:
- return ov.getresult()
- except OSError as exc:
- if exc.winerror in (_overlapped.ERROR_NETNAME_DELETED,
- _overlapped.ERROR_OPERATION_ABORTED):
- raise ConnectionResetError(*exc.args)
- else:
- raise
-
- return self._register(ov, conn, finish_recv)
-
- def sendto(self, conn, buf, flags=0, addr=None):
- self._register_with_iocp(conn)
- ov = _overlapped.Overlapped(NULL)
-
- ov.WSASendTo(conn.fileno(), buf, flags, addr)
-
- def finish_send(trans, key, ov):
- try:
- return ov.getresult()
- except OSError as exc:
- if exc.winerror in (_overlapped.ERROR_NETNAME_DELETED,
- _overlapped.ERROR_OPERATION_ABORTED):
- raise ConnectionResetError(*exc.args)
- else:
- raise
-
- return self._register(ov, conn, finish_send)
-
+ def recvfrom(self, conn, nbytes, flags=0):
+ self._register_with_iocp(conn)
+ ov = _overlapped.Overlapped(NULL)
+ try:
+ ov.WSARecvFrom(conn.fileno(), nbytes, flags)
+ except BrokenPipeError:
+ return self._result((b'', None))
+
+ def finish_recv(trans, key, ov):
+ try:
+ return ov.getresult()
+ except OSError as exc:
+ if exc.winerror in (_overlapped.ERROR_NETNAME_DELETED,
+ _overlapped.ERROR_OPERATION_ABORTED):
+ raise ConnectionResetError(*exc.args)
+ else:
+ raise
+
+ return self._register(ov, conn, finish_recv)
+
+ def sendto(self, conn, buf, flags=0, addr=None):
+ self._register_with_iocp(conn)
+ ov = _overlapped.Overlapped(NULL)
+
+ ov.WSASendTo(conn.fileno(), buf, flags, addr)
+
+ def finish_send(trans, key, ov):
+ try:
+ return ov.getresult()
+ except OSError as exc:
+ if exc.winerror in (_overlapped.ERROR_NETNAME_DELETED,
+ _overlapped.ERROR_OPERATION_ABORTED):
+ raise ConnectionResetError(*exc.args)
+ else:
+ raise
+
+ return self._register(ov, conn, finish_send)
+
def send(self, conn, buf, flags=0):
self._register_with_iocp(conn)
ov = _overlapped.Overlapped(NULL)
@@ -569,7 +569,7 @@ class IocpProactor:
# Coroutine closing the accept socket if the future is cancelled
try:
await future
- except exceptions.CancelledError:
+ except exceptions.CancelledError:
conn.close()
raise
@@ -579,14 +579,14 @@ class IocpProactor:
return future
def connect(self, conn, address):
- if conn.type == socket.SOCK_DGRAM:
- # WSAConnect will complete immediately for UDP sockets so we don't
- # need to register any IOCP operation
- _overlapped.WSAConnect(conn.fileno(), address)
- fut = self._loop.create_future()
- fut.set_result(None)
- return fut
-
+ if conn.type == socket.SOCK_DGRAM:
+ # WSAConnect will complete immediately for UDP sockets so we don't
+ # need to register any IOCP operation
+ _overlapped.WSAConnect(conn.fileno(), address)
+ fut = self._loop.create_future()
+ fut.set_result(None)
+ return fut
+
self._register_with_iocp(conn)
# The socket needs to be locally bound before we call ConnectEx().
try:
@@ -662,7 +662,7 @@ class IocpProactor:
# ConnectPipe() failed with ERROR_PIPE_BUSY: retry later
delay = min(delay * 2, CONNECT_PIPE_MAX_DELAY)
- await tasks.sleep(delay)
+ await tasks.sleep(delay)
return windows_utils.PipeHandle(handle)
@@ -910,4 +910,4 @@ class WindowsProactorEventLoopPolicy(events.BaseDefaultEventLoopPolicy):
_loop_factory = ProactorEventLoop
-DefaultEventLoopPolicy = WindowsProactorEventLoopPolicy
+DefaultEventLoopPolicy = WindowsProactorEventLoopPolicy
diff --git a/contrib/tools/python3/src/Lib/asyncio/windows_utils.py b/contrib/tools/python3/src/Lib/asyncio/windows_utils.py
index ef277fac3e..1b1ce0e9cd 100644
--- a/contrib/tools/python3/src/Lib/asyncio/windows_utils.py
+++ b/contrib/tools/python3/src/Lib/asyncio/windows_utils.py
@@ -107,9 +107,9 @@ class PipeHandle:
CloseHandle(self._handle)
self._handle = None
- def __del__(self, _warn=warnings.warn):
+ def __del__(self, _warn=warnings.warn):
if self._handle is not None:
- _warn(f"unclosed {self!r}", ResourceWarning, source=self)
+ _warn(f"unclosed {self!r}", ResourceWarning, source=self)
self.close()
def __enter__(self):
diff --git a/contrib/tools/python3/src/Lib/asyncore.py b/contrib/tools/python3/src/Lib/asyncore.py
index ce16f11a2f..42dc614d1d 100644
--- a/contrib/tools/python3/src/Lib/asyncore.py
+++ b/contrib/tools/python3/src/Lib/asyncore.py
@@ -228,7 +228,7 @@ class dispatcher:
if sock:
# Set to nonblocking just to make sure for cases where we
# get a socket from a blocking source.
- sock.setblocking(False)
+ sock.setblocking(False)
self.set_socket(sock, map)
self.connected = True
# The constructor no longer requires that the socket
@@ -280,7 +280,7 @@ class dispatcher:
def create_socket(self, family=socket.AF_INET, type=socket.SOCK_STREAM):
self.family_and_type = family, type
sock = socket.socket(family, type)
- sock.setblocking(False)
+ sock.setblocking(False)
self.set_socket(sock)
def set_socket(self, sock, map=None):
diff --git a/contrib/tools/python3/src/Lib/base64.py b/contrib/tools/python3/src/Lib/base64.py
index ec3823b724..8d3a152a47 100644
--- a/contrib/tools/python3/src/Lib/base64.py
+++ b/contrib/tools/python3/src/Lib/base64.py
@@ -82,7 +82,7 @@ def b64decode(s, altchars=None, validate=False):
altchars = _bytes_from_decode_data(altchars)
assert len(altchars) == 2, repr(altchars)
s = s.translate(bytes.maketrans(altchars, b'+/'))
- if validate and not re.fullmatch(b'[A-Za-z0-9+/]*={0,2}', s):
+ if validate and not re.fullmatch(b'[A-Za-z0-9+/]*={0,2}', s):
raise binascii.Error('Non-base64 digit found')
return binascii.a2b_base64(s)
@@ -320,7 +320,7 @@ def a85encode(b, *, foldspaces=False, wrapcol=0, pad=False, adobe=False):
global _a85chars, _a85chars2
# Delay the initialization of tables to not waste memory
# if the function is never called
- if _a85chars2 is None:
+ if _a85chars2 is None:
_a85chars = [bytes((i,)) for i in range(33, 118)]
_a85chars2 = [(a + b) for a in _a85chars for b in _a85chars]
@@ -428,7 +428,7 @@ def b85encode(b, pad=False):
global _b85chars, _b85chars2
# Delay the initialization of tables to not waste memory
# if the function is never called
- if _b85chars2 is None:
+ if _b85chars2 is None:
_b85chars = [bytes((i,)) for i in _b85alphabet]
_b85chars2 = [(a + b) for a in _b85chars for b in _b85chars]
return _85encode(b, _b85chars, _b85chars2, pad)
diff --git a/contrib/tools/python3/src/Lib/bdb.py b/contrib/tools/python3/src/Lib/bdb.py
index 384a272a85..98fdd03e8d 100644
--- a/contrib/tools/python3/src/Lib/bdb.py
+++ b/contrib/tools/python3/src/Lib/bdb.py
@@ -38,7 +38,7 @@ class Bdb:
"""Return canonical form of filename.
For real filenames, the canonical form is a case-normalized (on
- case insensitive filesystems) absolute path. 'Filenames' with
+ case insensitive filesystems) absolute path. 'Filenames' with
angle brackets, such as "<stdin>", generated in interactive
mode, are returned unchanged.
"""
@@ -117,7 +117,7 @@ class Bdb:
"""Invoke user function and return trace function for call event.
If the debugger stops on this function call, invoke
- self.user_call(). Raise BdbQuit if self.quitting is set.
+ self.user_call(). Raise BdbQuit if self.quitting is set.
Return self.trace_dispatch to continue tracing in this scope.
"""
# XXX 'arg' is no longer used
@@ -190,8 +190,8 @@ class Bdb:
def is_skipped_module(self, module_name):
"Return True if module_name matches any skip pattern."
- if module_name is None: # some modules do not have names
- return False
+ if module_name is None: # some modules do not have names
+ return False
for pattern in self.skip:
if fnmatch.fnmatch(module_name, pattern):
return True
@@ -384,7 +384,7 @@ class Bdb:
return None
def _prune_breaks(self, filename, lineno):
- """Prune breakpoints for filename:lineno.
+ """Prune breakpoints for filename:lineno.
A list of breakpoints is maintained in the Bdb instance and in
the Breakpoint class. If a breakpoint in the Bdb instance no
@@ -548,7 +548,7 @@ class Bdb:
s += frame.f_code.co_name
else:
s += "<lambda>"
- s += '()'
+ s += '()'
if '__return__' in frame.f_locals:
rv = frame.f_locals['__return__']
s += '->'
@@ -611,7 +611,7 @@ class Bdb:
# This method is more useful to debug a single function call.
- def runcall(self, func, /, *args, **kwds):
+ def runcall(self, func, /, *args, **kwds):
"""Debug a single function call.
Return the result of the function call.
diff --git a/contrib/tools/python3/src/Lib/binhex.py b/contrib/tools/python3/src/Lib/binhex.py
index ace5217d27..3e89a56e15 100644
--- a/contrib/tools/python3/src/Lib/binhex.py
+++ b/contrib/tools/python3/src/Lib/binhex.py
@@ -21,17 +21,17 @@ hexbin(inputfilename, outputfilename)
# input. The resulting code (xx 90 90) would appear to be interpreted as an
# escaped *value* of 0x90. All coders I've seen appear to ignore this nicety...
#
-import binascii
-import contextlib
+import binascii
+import contextlib
import io
import os
import struct
-import warnings
-
-warnings.warn('the binhex module is deprecated', DeprecationWarning,
- stacklevel=2)
-
+import warnings
+warnings.warn('the binhex module is deprecated', DeprecationWarning,
+ stacklevel=2)
+
+
__all__ = ["binhex","hexbin","Error"]
class Error(Exception):
@@ -82,16 +82,16 @@ class openrsrc:
def close(self):
pass
-
-# DeprecationWarning is already emitted on "import binhex". There is no need
-# to repeat the warning at each call to deprecated binascii functions.
-@contextlib.contextmanager
-def _ignore_deprecation_warning():
- with warnings.catch_warnings():
- warnings.filterwarnings('ignore', '', DeprecationWarning)
- yield
-
-
+
+# DeprecationWarning is already emitted on "import binhex". There is no need
+# to repeat the warning at each call to deprecated binascii functions.
+@contextlib.contextmanager
+def _ignore_deprecation_warning():
+ with warnings.catch_warnings():
+ warnings.filterwarnings('ignore', '', DeprecationWarning)
+ yield
+
+
class _Hqxcoderengine:
"""Write data to the coder in 3-byte chunks"""
@@ -109,25 +109,25 @@ class _Hqxcoderengine:
self.data = self.data[todo:]
if not data:
return
- with _ignore_deprecation_warning():
- self.hqxdata = self.hqxdata + binascii.b2a_hqx(data)
+ with _ignore_deprecation_warning():
+ self.hqxdata = self.hqxdata + binascii.b2a_hqx(data)
self._flush(0)
def _flush(self, force):
first = 0
while first <= len(self.hqxdata) - self.linelen:
last = first + self.linelen
- self.ofp.write(self.hqxdata[first:last] + b'\r')
+ self.ofp.write(self.hqxdata[first:last] + b'\r')
self.linelen = LINELEN
first = last
self.hqxdata = self.hqxdata[first:]
if force:
- self.ofp.write(self.hqxdata + b':\r')
+ self.ofp.write(self.hqxdata + b':\r')
def close(self):
if self.data:
- with _ignore_deprecation_warning():
- self.hqxdata = self.hqxdata + binascii.b2a_hqx(self.data)
+ with _ignore_deprecation_warning():
+ self.hqxdata = self.hqxdata + binascii.b2a_hqx(self.data)
self._flush(1)
self.ofp.close()
del self.ofp
@@ -143,15 +143,15 @@ class _Rlecoderengine:
self.data = self.data + data
if len(self.data) < REASONABLY_LARGE:
return
- with _ignore_deprecation_warning():
- rledata = binascii.rlecode_hqx(self.data)
+ with _ignore_deprecation_warning():
+ rledata = binascii.rlecode_hqx(self.data)
self.ofp.write(rledata)
self.data = b''
def close(self):
if self.data:
- with _ignore_deprecation_warning():
- rledata = binascii.rlecode_hqx(self.data)
+ with _ignore_deprecation_warning():
+ rledata = binascii.rlecode_hqx(self.data)
self.ofp.write(rledata)
self.ofp.close()
del self.ofp
@@ -296,8 +296,8 @@ class _Hqxdecoderengine:
#
while True:
try:
- with _ignore_deprecation_warning():
- decdatacur, self.eof = binascii.a2b_hqx(data)
+ with _ignore_deprecation_warning():
+ decdatacur, self.eof = binascii.a2b_hqx(data)
break
except binascii.Incomplete:
pass
@@ -333,9 +333,9 @@ class _Rledecoderengine:
def _fill(self, wtd):
self.pre_buffer = self.pre_buffer + self.ifp.read(wtd + 4)
if self.ifp.eof:
- with _ignore_deprecation_warning():
- self.post_buffer = self.post_buffer + \
- binascii.rledecode_hqx(self.pre_buffer)
+ with _ignore_deprecation_warning():
+ self.post_buffer = self.post_buffer + \
+ binascii.rledecode_hqx(self.pre_buffer)
self.pre_buffer = b''
return
@@ -362,9 +362,9 @@ class _Rledecoderengine:
else:
mark = mark - 1
- with _ignore_deprecation_warning():
- self.post_buffer = self.post_buffer + \
- binascii.rledecode_hqx(self.pre_buffer[:mark])
+ with _ignore_deprecation_warning():
+ self.post_buffer = self.post_buffer + \
+ binascii.rledecode_hqx(self.pre_buffer[:mark])
self.pre_buffer = self.pre_buffer[mark:]
def close(self):
diff --git a/contrib/tools/python3/src/Lib/bisect.py b/contrib/tools/python3/src/Lib/bisect.py
index 8f3f6a3fe3..be67a335a4 100644
--- a/contrib/tools/python3/src/Lib/bisect.py
+++ b/contrib/tools/python3/src/Lib/bisect.py
@@ -9,7 +9,7 @@ def insort_right(a, x, lo=0, hi=None):
slice of a to be searched.
"""
- lo = bisect_right(a, x, lo, hi)
+ lo = bisect_right(a, x, lo, hi)
a.insert(lo, x)
def bisect_right(a, x, lo=0, hi=None):
@@ -29,7 +29,7 @@ def bisect_right(a, x, lo=0, hi=None):
hi = len(a)
while lo < hi:
mid = (lo+hi)//2
- # Use __lt__ to match the logic in list.sort() and in heapq
+ # Use __lt__ to match the logic in list.sort() and in heapq
if x < a[mid]: hi = mid
else: lo = mid+1
return lo
@@ -43,7 +43,7 @@ def insort_left(a, x, lo=0, hi=None):
slice of a to be searched.
"""
- lo = bisect_left(a, x, lo, hi)
+ lo = bisect_left(a, x, lo, hi)
a.insert(lo, x)
@@ -64,7 +64,7 @@ def bisect_left(a, x, lo=0, hi=None):
hi = len(a)
while lo < hi:
mid = (lo+hi)//2
- # Use __lt__ to match the logic in list.sort() and in heapq
+ # Use __lt__ to match the logic in list.sort() and in heapq
if a[mid] < x: lo = mid+1
else: hi = mid
return lo
diff --git a/contrib/tools/python3/src/Lib/bz2.py b/contrib/tools/python3/src/Lib/bz2.py
index 7447d12fc4..8005febeef 100644
--- a/contrib/tools/python3/src/Lib/bz2.py
+++ b/contrib/tools/python3/src/Lib/bz2.py
@@ -35,7 +35,7 @@ class BZ2File(_compression.BaseStream):
returned as bytes, and data to be written should be given as bytes.
"""
- def __init__(self, filename, mode="r", *, compresslevel=9):
+ def __init__(self, filename, mode="r", *, compresslevel=9):
"""Open a bzip2-compressed file.
If filename is a str, bytes, or PathLike object, it gives the
@@ -226,23 +226,23 @@ class BZ2File(_compression.BaseStream):
"""Write a byte string to the file.
Returns the number of uncompressed bytes written, which is
- always the length of data in bytes. Note that due to buffering,
- the file on disk may not reflect the data written until close()
- is called.
+ always the length of data in bytes. Note that due to buffering,
+ the file on disk may not reflect the data written until close()
+ is called.
"""
with self._lock:
self._check_can_write()
- if isinstance(data, (bytes, bytearray)):
- length = len(data)
- else:
- # accept any data that supports the buffer protocol
- data = memoryview(data)
- length = data.nbytes
-
+ if isinstance(data, (bytes, bytearray)):
+ length = len(data)
+ else:
+ # accept any data that supports the buffer protocol
+ data = memoryview(data)
+ length = data.nbytes
+
compressed = self._compressor.compress(data)
self._fp.write(compressed)
- self._pos += length
- return length
+ self._pos += length
+ return length
def writelines(self, seq):
"""Write a sequence of byte strings to the file.
diff --git a/contrib/tools/python3/src/Lib/cProfile.py b/contrib/tools/python3/src/Lib/cProfile.py
index 22a7d0aade..b93ec5454f 100644
--- a/contrib/tools/python3/src/Lib/cProfile.py
+++ b/contrib/tools/python3/src/Lib/cProfile.py
@@ -103,20 +103,20 @@ class Profile(_lsprof.Profiler):
return self
# This method is more useful to profile a single function call.
- def runcall(self, func, /, *args, **kw):
+ def runcall(self, func, /, *args, **kw):
self.enable()
try:
return func(*args, **kw)
finally:
self.disable()
- def __enter__(self):
- self.enable()
- return self
-
- def __exit__(self, *exc_info):
- self.disable()
-
+ def __enter__(self):
+ self.enable()
+ return self
+
+ def __exit__(self, *exc_info):
+ self.disable()
+
# ____________________________________________________________
def label(code):
@@ -152,11 +152,11 @@ def main():
(options, args) = parser.parse_args()
sys.argv[:] = args
- # The script that we're profiling may chdir, so capture the absolute path
- # to the output file at startup.
- if options.outfile is not None:
- options.outfile = os.path.abspath(options.outfile)
-
+ # The script that we're profiling may chdir, so capture the absolute path
+ # to the output file at startup.
+ if options.outfile is not None:
+ options.outfile = os.path.abspath(options.outfile)
+
if len(args) > 0:
if options.module:
code = "run_module(modname, run_name='__main__')"
@@ -175,12 +175,12 @@ def main():
'__package__': None,
'__cached__': None,
}
- try:
- runctx(code, globs, None, options.outfile, options.sort)
- except BrokenPipeError as exc:
- # Prevent "Exception ignored" during interpreter shutdown.
- sys.stdout = None
- sys.exit(exc.errno)
+ try:
+ runctx(code, globs, None, options.outfile, options.sort)
+ except BrokenPipeError as exc:
+ # Prevent "Exception ignored" during interpreter shutdown.
+ sys.stdout = None
+ sys.exit(exc.errno)
else:
parser.print_usage()
return parser
diff --git a/contrib/tools/python3/src/Lib/calendar.py b/contrib/tools/python3/src/Lib/calendar.py
index 7550d52c0a..586e489426 100644
--- a/contrib/tools/python3/src/Lib/calendar.py
+++ b/contrib/tools/python3/src/Lib/calendar.py
@@ -127,18 +127,18 @@ def monthrange(year, month):
return day1, ndays
-def _monthlen(year, month):
+def _monthlen(year, month):
return mdays[month] + (month == February and isleap(year))
-def _prevmonth(year, month):
+def _prevmonth(year, month):
if month == 1:
return year-1, 12
else:
return year, month-1
-def _nextmonth(year, month):
+def _nextmonth(year, month):
if month == 12:
return year+1, 1
else:
@@ -207,13 +207,13 @@ class Calendar(object):
day1, ndays = monthrange(year, month)
days_before = (day1 - self.firstweekday) % 7
days_after = (self.firstweekday - day1 - ndays) % 7
- y, m = _prevmonth(year, month)
- end = _monthlen(y, m) + 1
+ y, m = _prevmonth(year, month)
+ end = _monthlen(y, m) + 1
for d in range(end-days_before, end):
yield y, m, d
for d in range(1, ndays + 1):
yield year, month, d
- y, m = _nextmonth(year, month)
+ y, m = _nextmonth(year, month)
for d in range(1, days_after + 1):
yield y, m, d
diff --git a/contrib/tools/python3/src/Lib/cgi.py b/contrib/tools/python3/src/Lib/cgi.py
index 1e880e5184..59d03c0ec0 100644
--- a/contrib/tools/python3/src/Lib/cgi.py
+++ b/contrib/tools/python3/src/Lib/cgi.py
@@ -42,10 +42,10 @@ import html
import locale
import tempfile
-__all__ = ["MiniFieldStorage", "FieldStorage", "parse", "parse_multipart",
+__all__ = ["MiniFieldStorage", "FieldStorage", "parse", "parse_multipart",
"parse_header", "test", "print_exception", "print_environ",
"print_form", "print_directory", "print_arguments",
- "print_environ_usage"]
+ "print_environ_usage"]
# Logging support
# ===============
@@ -115,8 +115,8 @@ log = initlog # The current logging function
# 0 ==> unlimited input
maxlen = 0
-def parse(fp=None, environ=os.environ, keep_blank_values=0,
- strict_parsing=0, separator='&'):
+def parse(fp=None, environ=os.environ, keep_blank_values=0,
+ strict_parsing=0, separator='&'):
"""Parse a query in the environment or from a file (default stdin)
Arguments, all optional:
@@ -135,9 +135,9 @@ def parse(fp=None, environ=os.environ, keep_blank_values=0,
strict_parsing: flag indicating what to do with parsing errors.
If false (the default), errors are silently ignored.
If true, errors raise a ValueError exception.
-
- separator: str. The symbol to use for separating the query arguments.
- Defaults to &.
+
+ separator: str. The symbol to use for separating the query arguments.
+ Defaults to &.
"""
if fp is None:
fp = sys.stdin
@@ -158,7 +158,7 @@ def parse(fp=None, environ=os.environ, keep_blank_values=0,
if environ['REQUEST_METHOD'] == 'POST':
ctype, pdict = parse_header(environ['CONTENT_TYPE'])
if ctype == 'multipart/form-data':
- return parse_multipart(fp, pdict, separator=separator)
+ return parse_multipart(fp, pdict, separator=separator)
elif ctype == 'application/x-www-form-urlencoded':
clength = int(environ['CONTENT_LENGTH'])
if maxlen and clength > maxlen:
@@ -182,10 +182,10 @@ def parse(fp=None, environ=os.environ, keep_blank_values=0,
qs = ""
environ['QUERY_STRING'] = qs # XXX Shouldn't, really
return urllib.parse.parse_qs(qs, keep_blank_values, strict_parsing,
- encoding=encoding, separator=separator)
+ encoding=encoding, separator=separator)
-def parse_multipart(fp, pdict, encoding="utf-8", errors="replace", separator='&'):
+def parse_multipart(fp, pdict, encoding="utf-8", errors="replace", separator='&'):
"""Parse multipart input.
Arguments:
@@ -204,12 +204,12 @@ def parse_multipart(fp, pdict, encoding="utf-8", errors="replace", separator='&'
ctype = "multipart/form-data; boundary={}".format(boundary)
headers = Message()
headers.set_type(ctype)
- try:
- headers['Content-Length'] = pdict['CONTENT-LENGTH']
- except KeyError:
- pass
+ try:
+ headers['Content-Length'] = pdict['CONTENT-LENGTH']
+ except KeyError:
+ pass
fs = FieldStorage(fp, headers=headers, encoding=encoding, errors=errors,
- environ={'REQUEST_METHOD': 'POST'}, separator=separator)
+ environ={'REQUEST_METHOD': 'POST'}, separator=separator)
return {k: fs.getlist(k) for k in fs}
def _parseparam(s):
@@ -319,7 +319,7 @@ class FieldStorage:
def __init__(self, fp=None, headers=None, outerboundary=b'',
environ=os.environ, keep_blank_values=0, strict_parsing=0,
limit=None, encoding='utf-8', errors='replace',
- max_num_fields=None, separator='&'):
+ max_num_fields=None, separator='&'):
"""Constructor. Read multipart/* until last part.
Arguments, all optional:
@@ -367,7 +367,7 @@ class FieldStorage:
self.keep_blank_values = keep_blank_values
self.strict_parsing = strict_parsing
self.max_num_fields = max_num_fields
- self.separator = separator
+ self.separator = separator
if 'REQUEST_METHOD' in environ:
method = environ['REQUEST_METHOD'].upper()
self.qs_on_post = None
@@ -469,7 +469,7 @@ class FieldStorage:
if maxlen and clen > maxlen:
raise ValueError('Maximum content length exceeded')
self.length = clen
- if self.limit is None and clen >= 0:
+ if self.limit is None and clen >= 0:
self.limit = clen
self.list = self.file = None
@@ -594,7 +594,7 @@ class FieldStorage:
query = urllib.parse.parse_qsl(
qs, self.keep_blank_values, self.strict_parsing,
encoding=self.encoding, errors=self.errors,
- max_num_fields=self.max_num_fields, separator=self.separator)
+ max_num_fields=self.max_num_fields, separator=self.separator)
self.list = [MiniFieldStorage(key, value) for key, value in query]
self.skip_lines()
@@ -610,7 +610,7 @@ class FieldStorage:
query = urllib.parse.parse_qsl(
self.qs_on_post, self.keep_blank_values, self.strict_parsing,
encoding=self.encoding, errors=self.errors,
- max_num_fields=self.max_num_fields, separator=self.separator)
+ max_num_fields=self.max_num_fields, separator=self.separator)
self.list.extend(MiniFieldStorage(key, value) for key, value in query)
klass = self.FieldStorageClass or self.__class__
@@ -650,11 +650,11 @@ class FieldStorage:
if 'content-length' in headers:
del headers['content-length']
- limit = None if self.limit is None \
- else self.limit - self.bytes_read
+ limit = None if self.limit is None \
+ else self.limit - self.bytes_read
part = klass(self.fp, headers, ib, environ, keep_blank_values,
- strict_parsing, limit,
- self.encoding, self.errors, max_num_fields, self.separator)
+ strict_parsing, limit,
+ self.encoding, self.errors, max_num_fields, self.separator)
if max_num_fields is not None:
max_num_fields -= 1
@@ -744,8 +744,8 @@ class FieldStorage:
last_line_lfend = True
_read = 0
while 1:
-
- if self.limit is not None and 0 <= self.limit <= _read:
+
+ if self.limit is not None and 0 <= self.limit <= _read:
break
line = self.fp.readline(1<<16) # bytes
self.bytes_read += len(line)
diff --git a/contrib/tools/python3/src/Lib/cgitb.py b/contrib/tools/python3/src/Lib/cgitb.py
index 4f81271be3..3b54203d05 100644
--- a/contrib/tools/python3/src/Lib/cgitb.py
+++ b/contrib/tools/python3/src/Lib/cgitb.py
@@ -124,9 +124,9 @@ function calls leading up to the error, in the order they occurred.</p>'''
args, varargs, varkw, locals = inspect.getargvalues(frame)
call = ''
if func != '?':
- call = 'in ' + strong(pydoc.html.escape(func))
- if func != "<module>":
- call += inspect.formatargvalues(args, varargs, varkw, locals,
+ call = 'in ' + strong(pydoc.html.escape(func))
+ if func != "<module>":
+ call += inspect.formatargvalues(args, varargs, varkw, locals,
formatvalue=lambda value: '=' + pydoc.html.repr(value))
highlight = {}
@@ -208,9 +208,9 @@ function calls leading up to the error, in the order they occurred.
args, varargs, varkw, locals = inspect.getargvalues(frame)
call = ''
if func != '?':
- call = 'in ' + func
- if func != "<module>":
- call += inspect.formatargvalues(args, varargs, varkw, locals,
+ call = 'in ' + func
+ if func != "<module>":
+ call += inspect.formatargvalues(args, varargs, varkw, locals,
formatvalue=lambda value: '=' + pydoc.text.repr(value))
highlight = {}
diff --git a/contrib/tools/python3/src/Lib/code.py b/contrib/tools/python3/src/Lib/code.py
index 76000f8c8b..ed23b95e63 100644
--- a/contrib/tools/python3/src/Lib/code.py
+++ b/contrib/tools/python3/src/Lib/code.py
@@ -40,7 +40,7 @@ class InteractiveInterpreter:
Arguments are as for compile_command().
- One of several things can happen:
+ One of several things can happen:
1) The input is incorrect; compile_command() raised an
exception (SyntaxError or OverflowError). A syntax traceback
diff --git a/contrib/tools/python3/src/Lib/codecs.py b/contrib/tools/python3/src/Lib/codecs.py
index d2edd148a2..19f7b78d14 100644
--- a/contrib/tools/python3/src/Lib/codecs.py
+++ b/contrib/tools/python3/src/Lib/codecs.py
@@ -386,7 +386,7 @@ class StreamWriter(Codec):
def reset(self):
- """ Resets the codec buffers used for keeping internal state.
+ """ Resets the codec buffers used for keeping internal state.
Calling this method should ensure that the data on the
output is put into a clean state, that allows appending
@@ -620,7 +620,7 @@ class StreamReader(Codec):
def reset(self):
- """ Resets the codec buffers used for keeping internal state.
+ """ Resets the codec buffers used for keeping internal state.
Note that no stream repositioning should take place.
This method is primarily intended to be able to recover
@@ -838,7 +838,7 @@ class StreamRecoder:
def writelines(self, list):
- data = b''.join(list)
+ data = b''.join(list)
data, bytesdecoded = self.decode(data, self.errors)
return self.writer.write(data)
@@ -847,12 +847,12 @@ class StreamRecoder:
self.reader.reset()
self.writer.reset()
- def seek(self, offset, whence=0):
- # Seeks must be propagated to both the readers and writers
- # as they might need to reset their internal buffers.
- self.reader.seek(offset, whence)
- self.writer.seek(offset, whence)
-
+ def seek(self, offset, whence=0):
+ # Seeks must be propagated to both the readers and writers
+ # as they might need to reset their internal buffers.
+ self.reader.seek(offset, whence)
+ self.writer.seek(offset, whence)
+
def __getattr__(self, name,
getattr=getattr):
@@ -868,7 +868,7 @@ class StreamRecoder:
### Shortcuts
-def open(filename, mode='r', encoding=None, errors='strict', buffering=-1):
+def open(filename, mode='r', encoding=None, errors='strict', buffering=-1):
""" Open an encoded file using the given mode and return
a wrapped version providing transparent encoding/decoding.
@@ -889,8 +889,8 @@ def open(filename, mode='r', encoding=None, errors='strict', buffering=-1):
encoding error occurs.
buffering has the same meaning as for the builtin open() API.
- It defaults to -1 which means that the default buffer size will
- be used.
+ It defaults to -1 which means that the default buffer size will
+ be used.
The returned wrapped file object provides an extra attribute
.encoding which allows querying the used encoding. This
@@ -906,16 +906,16 @@ def open(filename, mode='r', encoding=None, errors='strict', buffering=-1):
if encoding is None:
return file
- try:
- info = lookup(encoding)
- srw = StreamReaderWriter(file, info.streamreader, info.streamwriter, errors)
- # Add attributes to simplify introspection
- srw.encoding = encoding
- return srw
- except:
- file.close()
- raise
-
+ try:
+ info = lookup(encoding)
+ srw = StreamReaderWriter(file, info.streamreader, info.streamwriter, errors)
+ # Add attributes to simplify introspection
+ srw.encoding = encoding
+ return srw
+ except:
+ file.close()
+ raise
+
def EncodedFile(file, data_encoding, file_encoding=None, errors='strict'):
""" Return a wrapped version of file which provides transparent
diff --git a/contrib/tools/python3/src/Lib/codeop.py b/contrib/tools/python3/src/Lib/codeop.py
index 4c10470aee..c58f4038fa 100644
--- a/contrib/tools/python3/src/Lib/codeop.py
+++ b/contrib/tools/python3/src/Lib/codeop.py
@@ -57,7 +57,7 @@ Compile():
"""
import __future__
-import warnings
+import warnings
_features = [getattr(__future__, fname)
for fname in __future__.all_feature_names]
@@ -81,31 +81,31 @@ def _maybe_compile(compiler, source, filename, symbol):
try:
code = compiler(source, filename, symbol)
- except SyntaxError:
+ except SyntaxError:
pass
- # Catch syntax warnings after the first compile
- # to emit warnings (SyntaxWarning, DeprecationWarning) at most once.
- with warnings.catch_warnings():
- warnings.simplefilter("error")
-
- try:
- code1 = compiler(source + "\n", filename, symbol)
- except SyntaxError as e:
- err1 = e
-
- try:
- code2 = compiler(source + "\n\n", filename, symbol)
- except SyntaxError as e:
- err2 = e
-
- try:
- if code:
- return code
- if not code1 and repr(err1) == repr(err2):
- raise err1
- finally:
- err1 = err2 = None
+ # Catch syntax warnings after the first compile
+ # to emit warnings (SyntaxWarning, DeprecationWarning) at most once.
+ with warnings.catch_warnings():
+ warnings.simplefilter("error")
+
+ try:
+ code1 = compiler(source + "\n", filename, symbol)
+ except SyntaxError as e:
+ err1 = e
+
+ try:
+ code2 = compiler(source + "\n\n", filename, symbol)
+ except SyntaxError as e:
+ err2 = e
+
+ try:
+ if code:
+ return code
+ if not code1 and repr(err1) == repr(err2):
+ raise err1
+ finally:
+ err1 = err2 = None
def _compile(source, filename, symbol):
return compile(source, filename, symbol, PyCF_DONT_IMPLY_DEDENT)
@@ -118,8 +118,8 @@ def compile_command(source, filename="<input>", symbol="single"):
source -- the source string; may contain \n characters
filename -- optional filename from which source was read; default
"<input>"
- symbol -- optional grammar start symbol; "single" (default), "exec"
- or "eval"
+ symbol -- optional grammar start symbol; "single" (default), "exec"
+ or "eval"
Return value / exceptions raised:
@@ -140,7 +140,7 @@ class Compile:
self.flags = PyCF_DONT_IMPLY_DEDENT
def __call__(self, source, filename, symbol):
- codeob = compile(source, filename, symbol, self.flags, True)
+ codeob = compile(source, filename, symbol, self.flags, True)
for feature in _features:
if codeob.co_flags & feature.compiler_flag:
self.flags |= feature.compiler_flag
diff --git a/contrib/tools/python3/src/Lib/collections/__init__.py b/contrib/tools/python3/src/Lib/collections/__init__.py
index 5bdd3b3516..8ff1390657 100644
--- a/contrib/tools/python3/src/Lib/collections/__init__.py
+++ b/contrib/tools/python3/src/Lib/collections/__init__.py
@@ -14,29 +14,29 @@ list, set, and tuple.
'''
-__all__ = [
- 'ChainMap',
- 'Counter',
- 'OrderedDict',
- 'UserDict',
- 'UserList',
- 'UserString',
- 'defaultdict',
- 'deque',
- 'namedtuple',
-]
+__all__ = [
+ 'ChainMap',
+ 'Counter',
+ 'OrderedDict',
+ 'UserDict',
+ 'UserList',
+ 'UserString',
+ 'defaultdict',
+ 'deque',
+ 'namedtuple',
+]
import _collections_abc
-import heapq as _heapq
-import sys as _sys
-
-from itertools import chain as _chain
-from itertools import repeat as _repeat
-from itertools import starmap as _starmap
+import heapq as _heapq
+import sys as _sys
+
+from itertools import chain as _chain
+from itertools import repeat as _repeat
+from itertools import starmap as _starmap
from keyword import iskeyword as _iskeyword
-from operator import eq as _eq
-from operator import itemgetter as _itemgetter
-from reprlib import recursive_repr as _recursive_repr
+from operator import eq as _eq
+from operator import itemgetter as _itemgetter
+from reprlib import recursive_repr as _recursive_repr
from _weakref import proxy as _proxy
try:
@@ -60,14 +60,14 @@ def __getattr__(name):
obj = getattr(_collections_abc, name)
import warnings
warnings.warn("Using or importing the ABCs from 'collections' instead "
- "of from 'collections.abc' is deprecated since Python 3.3, "
- "and in 3.10 it will stop working",
+ "of from 'collections.abc' is deprecated since Python 3.3, "
+ "and in 3.10 it will stop working",
DeprecationWarning, stacklevel=2)
globals()[name] = obj
return obj
raise AttributeError(f'module {__name__!r} has no attribute {name!r}')
-
+
################################################################################
### OrderedDict
################################################################################
@@ -107,7 +107,7 @@ class OrderedDict(dict):
# Individual links are kept alive by the hard reference in self.__map.
# Those hard references disappear when a key is deleted from an OrderedDict.
- def __init__(self, other=(), /, **kwds):
+ def __init__(self, other=(), /, **kwds):
'''Initialize an ordered dictionary. The signature is the same as
regular dictionaries. Keyword argument order is preserved.
'''
@@ -118,7 +118,7 @@ class OrderedDict(dict):
self.__root = root = _proxy(self.__hardroot)
root.prev = root.next = root
self.__map = {}
- self.__update(other, **kwds)
+ self.__update(other, **kwds)
def __setitem__(self, key, value,
dict_setitem=dict.__setitem__, proxy=_proxy, Link=_Link):
@@ -307,25 +307,25 @@ class OrderedDict(dict):
return dict.__eq__(self, other) and all(map(_eq, self, other))
return dict.__eq__(self, other)
- def __ior__(self, other):
- self.update(other)
- return self
-
- def __or__(self, other):
- if not isinstance(other, dict):
- return NotImplemented
- new = self.__class__(self)
- new.update(other)
- return new
-
- def __ror__(self, other):
- if not isinstance(other, dict):
- return NotImplemented
- new = self.__class__(other)
- new.update(self)
- return new
-
-
+ def __ior__(self, other):
+ self.update(other)
+ return self
+
+ def __or__(self, other):
+ if not isinstance(other, dict):
+ return NotImplemented
+ new = self.__class__(self)
+ new.update(other)
+ return new
+
+ def __ror__(self, other):
+ if not isinstance(other, dict):
+ return NotImplemented
+ new = self.__class__(other)
+ new.update(self)
+ return new
+
+
try:
from _collections import OrderedDict
except ImportError:
@@ -337,10 +337,10 @@ except ImportError:
### namedtuple
################################################################################
-try:
- from _collections import _tuplegetter
-except ImportError:
- _tuplegetter = lambda index, doc: property(_itemgetter(index), doc=doc)
+try:
+ from _collections import _tuplegetter
+except ImportError:
+ _tuplegetter = lambda index, doc: property(_itemgetter(index), doc=doc)
def namedtuple(typename, field_names, *, rename=False, defaults=None, module=None):
"""Returns a new subclass of tuple with named fields.
@@ -413,23 +413,23 @@ def namedtuple(typename, field_names, *, rename=False, defaults=None, module=Non
# Variables used in the methods and docstrings
field_names = tuple(map(_sys.intern, field_names))
num_fields = len(field_names)
- arg_list = ', '.join(field_names)
- if num_fields == 1:
- arg_list += ','
+ arg_list = ', '.join(field_names)
+ if num_fields == 1:
+ arg_list += ','
repr_fmt = '(' + ', '.join(f'{name}=%r' for name in field_names) + ')'
tuple_new = tuple.__new__
- _dict, _tuple, _len, _map, _zip = dict, tuple, len, map, zip
+ _dict, _tuple, _len, _map, _zip = dict, tuple, len, map, zip
# Create all the named tuple methods to be added to the class namespace
- namespace = {
- '_tuple_new': tuple_new,
- '__builtins__': {},
- '__name__': f'namedtuple_{typename}',
- }
- code = f'lambda _cls, {arg_list}: _tuple_new(_cls, ({arg_list}))'
- __new__ = eval(code, namespace)
- __new__.__name__ = '__new__'
+ namespace = {
+ '_tuple_new': tuple_new,
+ '__builtins__': {},
+ '__name__': f'namedtuple_{typename}',
+ }
+ code = f'lambda _cls, {arg_list}: _tuple_new(_cls, ({arg_list}))'
+ __new__ = eval(code, namespace)
+ __new__.__name__ = '__new__'
__new__.__doc__ = f'Create new instance of {typename}({arg_list})'
if defaults is not None:
__new__.__defaults__ = defaults
@@ -444,8 +444,8 @@ def namedtuple(typename, field_names, *, rename=False, defaults=None, module=Non
_make.__func__.__doc__ = (f'Make a new {typename} object from a sequence '
'or iterable')
- def _replace(self, /, **kwds):
- result = self._make(_map(kwds.pop, field_names, self))
+ def _replace(self, /, **kwds):
+ result = self._make(_map(kwds.pop, field_names, self))
if kwds:
raise ValueError(f'Got unexpected field names: {list(kwds)!r}')
return result
@@ -458,22 +458,22 @@ def namedtuple(typename, field_names, *, rename=False, defaults=None, module=Non
return self.__class__.__name__ + repr_fmt % self
def _asdict(self):
- 'Return a new dict which maps field names to their values.'
- return _dict(_zip(self._fields, self))
+ 'Return a new dict which maps field names to their values.'
+ return _dict(_zip(self._fields, self))
def __getnewargs__(self):
'Return self as a plain tuple. Used by copy and pickle.'
- return _tuple(self)
+ return _tuple(self)
# Modify function metadata to help with introspection and debugging
- for method in (
- __new__,
- _make.__func__,
- _replace,
- __repr__,
- _asdict,
- __getnewargs__,
- ):
+ for method in (
+ __new__,
+ _make.__func__,
+ _replace,
+ __repr__,
+ _asdict,
+ __getnewargs__,
+ ):
method.__qualname__ = f'{typename}.{method.__name__}'
# Build-up the class namespace dictionary
@@ -482,7 +482,7 @@ def namedtuple(typename, field_names, *, rename=False, defaults=None, module=Non
'__doc__': f'{typename}({arg_list})',
'__slots__': (),
'_fields': field_names,
- '_field_defaults': field_defaults,
+ '_field_defaults': field_defaults,
'__new__': __new__,
'_make': _make,
'_replace': _replace,
@@ -491,8 +491,8 @@ def namedtuple(typename, field_names, *, rename=False, defaults=None, module=Non
'__getnewargs__': __getnewargs__,
}
for index, name in enumerate(field_names):
- doc = _sys.intern(f'Alias for field number {index}')
- class_namespace[name] = _tuplegetter(index, doc)
+ doc = _sys.intern(f'Alias for field number {index}')
+ class_namespace[name] = _tuplegetter(index, doc)
result = type(typename, (tuple,), class_namespace)
@@ -578,7 +578,7 @@ class Counter(dict):
# http://code.activestate.com/recipes/259174/
# Knuth, TAOCP Vol. II section 4.6.3
- def __init__(self, iterable=None, /, **kwds):
+ def __init__(self, iterable=None, /, **kwds):
'''Create a new, empty Counter object. And if given, count elements
from an input iterable. Or, initialize the count from another mapping
of elements to their counts.
@@ -589,8 +589,8 @@ class Counter(dict):
>>> c = Counter(a=4, b=2) # a new counter from keyword args
'''
- super().__init__()
- self.update(iterable, **kwds)
+ super().__init__()
+ self.update(iterable, **kwds)
def __missing__(self, key):
'The count of elements not in the Counter is zero.'
@@ -601,8 +601,8 @@ class Counter(dict):
'''List the n most common elements and their counts from the most
common to the least. If n is None, then list all element counts.
- >>> Counter('abracadabra').most_common(3)
- [('a', 5), ('b', 2), ('r', 2)]
+ >>> Counter('abracadabra').most_common(3)
+ [('a', 5), ('b', 2), ('r', 2)]
'''
# Emulate Bag.sortedByCount from Smalltalk
@@ -636,17 +636,17 @@ class Counter(dict):
@classmethod
def fromkeys(cls, iterable, v=None):
- # There is no equivalent method for counters because the semantics
- # would be ambiguous in cases such as Counter.fromkeys('aaabbc', v=2).
- # Initializing counters to zero values isn't necessary because zero
- # is already the default value for counter lookups. Initializing
- # to one is easily accomplished with Counter(set(iterable)). For
- # more exotic cases, create a dictionary first using a dictionary
- # comprehension or dict.fromkeys().
+ # There is no equivalent method for counters because the semantics
+ # would be ambiguous in cases such as Counter.fromkeys('aaabbc', v=2).
+ # Initializing counters to zero values isn't necessary because zero
+ # is already the default value for counter lookups. Initializing
+ # to one is easily accomplished with Counter(set(iterable)). For
+ # more exotic cases, create a dictionary first using a dictionary
+ # comprehension or dict.fromkeys().
raise NotImplementedError(
'Counter.fromkeys() is undefined. Use Counter(iterable) instead.')
- def update(self, iterable=None, /, **kwds):
+ def update(self, iterable=None, /, **kwds):
'''Like dict.update() but add counts instead of replacing them.
Source can be an iterable, a dictionary, or another Counter instance.
@@ -673,14 +673,14 @@ class Counter(dict):
for elem, count in iterable.items():
self[elem] = count + self_get(elem, 0)
else:
- # fast path when counter is empty
- super().update(iterable)
+ # fast path when counter is empty
+ super().update(iterable)
else:
_count_elements(self, iterable)
if kwds:
self.update(kwds)
- def subtract(self, iterable=None, /, **kwds):
+ def subtract(self, iterable=None, /, **kwds):
'''Like dict.update() but subtracts counts instead of replacing them.
Counts can be reduced below zero. Both the inputs and outputs are
allowed to contain zero and negative counts.
@@ -721,14 +721,14 @@ class Counter(dict):
def __repr__(self):
if not self:
- return f'{self.__class__.__name__}()'
+ return f'{self.__class__.__name__}()'
try:
- # dict() preserves the ordering returned by most_common()
- d = dict(self.most_common())
+ # dict() preserves the ordering returned by most_common()
+ d = dict(self.most_common())
except TypeError:
# handle case where values are not orderable
- d = dict(self)
- return f'{self.__class__.__name__}({d!r})'
+ d = dict(self)
+ return f'{self.__class__.__name__}({d!r})'
# Multiset-style mathematical operations discussed in:
# Knuth TAOCP Volume II section 4.6.3 exercise 19
@@ -738,13 +738,13 @@ class Counter(dict):
#
# To strip negative and zero counts, add-in an empty counter:
# c += Counter()
- #
- # Rich comparison operators for multiset subset and superset tests
- # are deliberately omitted due to semantic conflicts with the
- # existing inherited dict equality method. Subset and superset
- # semantics ignore zero counts and require that p≤q ∧ p≥q → p=q;
- # however, that would not be the case for p=Counter(a=1, b=0)
- # and q=Counter(a=1) where the dictionaries are not equal.
+ #
+ # Rich comparison operators for multiset subset and superset tests
+ # are deliberately omitted due to semantic conflicts with the
+ # existing inherited dict equality method. Subset and superset
+ # semantics ignore zero counts and require that p≤q ∧ p≥q → p=q;
+ # however, that would not be the case for p=Counter(a=1, b=0)
+ # and q=Counter(a=1) where the dictionaries are not equal.
def __add__(self, other):
'''Add counts from two counters.
@@ -949,7 +949,7 @@ class ChainMap(_collections_abc.MutableMapping):
def __iter__(self):
d = {}
for mapping in reversed(self.maps):
- d.update(dict.fromkeys(mapping)) # reuses stored hash values if possible
+ d.update(dict.fromkeys(mapping)) # reuses stored hash values if possible
return iter(d)
def __contains__(self, key):
@@ -960,7 +960,7 @@ class ChainMap(_collections_abc.MutableMapping):
@_recursive_repr()
def __repr__(self):
- return f'{self.__class__.__name__}({", ".join(map(repr, self.maps))})'
+ return f'{self.__class__.__name__}({", ".join(map(repr, self.maps))})'
@classmethod
def fromkeys(cls, iterable, *args):
@@ -993,7 +993,7 @@ class ChainMap(_collections_abc.MutableMapping):
try:
del self.maps[0][key]
except KeyError:
- raise KeyError(f'Key not found in the first mapping: {key!r}')
+ raise KeyError(f'Key not found in the first mapping: {key!r}')
def popitem(self):
'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.'
@@ -1007,32 +1007,32 @@ class ChainMap(_collections_abc.MutableMapping):
try:
return self.maps[0].pop(key, *args)
except KeyError:
- raise KeyError(f'Key not found in the first mapping: {key!r}')
+ raise KeyError(f'Key not found in the first mapping: {key!r}')
def clear(self):
'Clear maps[0], leaving maps[1:] intact.'
self.maps[0].clear()
- def __ior__(self, other):
- self.maps[0].update(other)
- return self
-
- def __or__(self, other):
- if not isinstance(other, _collections_abc.Mapping):
- return NotImplemented
- m = self.copy()
- m.maps[0].update(other)
- return m
-
- def __ror__(self, other):
- if not isinstance(other, _collections_abc.Mapping):
- return NotImplemented
- m = dict(other)
- for child in reversed(self.maps):
- m.update(child)
- return self.__class__(m)
-
-
+ def __ior__(self, other):
+ self.maps[0].update(other)
+ return self
+
+ def __or__(self, other):
+ if not isinstance(other, _collections_abc.Mapping):
+ return NotImplemented
+ m = self.copy()
+ m.maps[0].update(other)
+ return m
+
+ def __ror__(self, other):
+ if not isinstance(other, _collections_abc.Mapping):
+ return NotImplemented
+ m = dict(other)
+ for child in reversed(self.maps):
+ m.update(child)
+ return self.__class__(m)
+
+
################################################################################
### UserDict
################################################################################
@@ -1040,29 +1040,29 @@ class ChainMap(_collections_abc.MutableMapping):
class UserDict(_collections_abc.MutableMapping):
# Start by filling-out the abstract methods
- def __init__(self, dict=None, /, **kwargs):
+ def __init__(self, dict=None, /, **kwargs):
self.data = {}
if dict is not None:
self.update(dict)
- if kwargs:
+ if kwargs:
self.update(kwargs)
-
- def __len__(self):
- return len(self.data)
-
+
+ def __len__(self):
+ return len(self.data)
+
def __getitem__(self, key):
if key in self.data:
return self.data[key]
if hasattr(self.__class__, "__missing__"):
return self.__class__.__missing__(self, key)
raise KeyError(key)
-
- def __setitem__(self, key, item):
- self.data[key] = item
-
- def __delitem__(self, key):
- del self.data[key]
-
+
+ def __setitem__(self, key, item):
+ self.data[key] = item
+
+ def __delitem__(self, key):
+ del self.data[key]
+
def __iter__(self):
return iter(self.data)
@@ -1071,37 +1071,37 @@ class UserDict(_collections_abc.MutableMapping):
return key in self.data
# Now, add the methods in dicts but not in MutableMapping
- def __repr__(self):
- return repr(self.data)
-
- def __or__(self, other):
- if isinstance(other, UserDict):
- return self.__class__(self.data | other.data)
- if isinstance(other, dict):
- return self.__class__(self.data | other)
- return NotImplemented
-
- def __ror__(self, other):
- if isinstance(other, UserDict):
- return self.__class__(other.data | self.data)
- if isinstance(other, dict):
- return self.__class__(other | self.data)
- return NotImplemented
-
- def __ior__(self, other):
- if isinstance(other, UserDict):
- self.data |= other.data
- else:
- self.data |= other
- return self
-
- def __copy__(self):
- inst = self.__class__.__new__(self.__class__)
- inst.__dict__.update(self.__dict__)
- # Create a copy and avoid triggering descriptors
- inst.__dict__["data"] = self.__dict__["data"].copy()
- return inst
-
+ def __repr__(self):
+ return repr(self.data)
+
+ def __or__(self, other):
+ if isinstance(other, UserDict):
+ return self.__class__(self.data | other.data)
+ if isinstance(other, dict):
+ return self.__class__(self.data | other)
+ return NotImplemented
+
+ def __ror__(self, other):
+ if isinstance(other, UserDict):
+ return self.__class__(other.data | self.data)
+ if isinstance(other, dict):
+ return self.__class__(other | self.data)
+ return NotImplemented
+
+ def __ior__(self, other):
+ if isinstance(other, UserDict):
+ self.data |= other.data
+ else:
+ self.data |= other
+ return self
+
+ def __copy__(self):
+ inst = self.__class__.__new__(self.__class__)
+ inst.__dict__.update(self.__dict__)
+ # Create a copy and avoid triggering descriptors
+ inst.__dict__["data"] = self.__dict__["data"].copy()
+ return inst
+
def copy(self):
if self.__class__ is UserDict:
return UserDict(self.data.copy())
@@ -1114,7 +1114,7 @@ class UserDict(_collections_abc.MutableMapping):
self.data = data
c.update(self)
return c
-
+
@classmethod
def fromkeys(cls, iterable, value=None):
d = cls()
@@ -1129,7 +1129,7 @@ class UserDict(_collections_abc.MutableMapping):
class UserList(_collections_abc.MutableSequence):
"""A more or less complete user-defined wrapper around list objects."""
-
+
def __init__(self, initlist=None):
self.data = []
if initlist is not None:
@@ -1140,60 +1140,60 @@ class UserList(_collections_abc.MutableSequence):
self.data[:] = initlist.data[:]
else:
self.data = list(initlist)
-
- def __repr__(self):
- return repr(self.data)
-
- def __lt__(self, other):
- return self.data < self.__cast(other)
-
- def __le__(self, other):
- return self.data <= self.__cast(other)
-
- def __eq__(self, other):
- return self.data == self.__cast(other)
-
- def __gt__(self, other):
- return self.data > self.__cast(other)
-
- def __ge__(self, other):
- return self.data >= self.__cast(other)
-
+
+ def __repr__(self):
+ return repr(self.data)
+
+ def __lt__(self, other):
+ return self.data < self.__cast(other)
+
+ def __le__(self, other):
+ return self.data <= self.__cast(other)
+
+ def __eq__(self, other):
+ return self.data == self.__cast(other)
+
+ def __gt__(self, other):
+ return self.data > self.__cast(other)
+
+ def __ge__(self, other):
+ return self.data >= self.__cast(other)
+
def __cast(self, other):
return other.data if isinstance(other, UserList) else other
-
- def __contains__(self, item):
- return item in self.data
-
- def __len__(self):
- return len(self.data)
-
- def __getitem__(self, i):
- if isinstance(i, slice):
- return self.__class__(self.data[i])
- else:
- return self.data[i]
-
- def __setitem__(self, i, item):
- self.data[i] = item
-
- def __delitem__(self, i):
- del self.data[i]
-
+
+ def __contains__(self, item):
+ return item in self.data
+
+ def __len__(self):
+ return len(self.data)
+
+ def __getitem__(self, i):
+ if isinstance(i, slice):
+ return self.__class__(self.data[i])
+ else:
+ return self.data[i]
+
+ def __setitem__(self, i, item):
+ self.data[i] = item
+
+ def __delitem__(self, i):
+ del self.data[i]
+
def __add__(self, other):
if isinstance(other, UserList):
return self.__class__(self.data + other.data)
elif isinstance(other, type(self.data)):
return self.__class__(self.data + other)
return self.__class__(self.data + list(other))
-
+
def __radd__(self, other):
if isinstance(other, UserList):
return self.__class__(other.data + self.data)
elif isinstance(other, type(self.data)):
return self.__class__(other + self.data)
return self.__class__(list(other) + self.data)
-
+
def __iadd__(self, other):
if isinstance(other, UserList):
self.data += other.data
@@ -1202,53 +1202,53 @@ class UserList(_collections_abc.MutableSequence):
else:
self.data += list(other)
return self
-
+
def __mul__(self, n):
- return self.__class__(self.data * n)
-
+ return self.__class__(self.data * n)
+
__rmul__ = __mul__
-
+
def __imul__(self, n):
self.data *= n
return self
-
- def __copy__(self):
- inst = self.__class__.__new__(self.__class__)
- inst.__dict__.update(self.__dict__)
- # Create a copy and avoid triggering descriptors
- inst.__dict__["data"] = self.__dict__["data"][:]
- return inst
-
- def append(self, item):
- self.data.append(item)
-
- def insert(self, i, item):
- self.data.insert(i, item)
-
- def pop(self, i=-1):
- return self.data.pop(i)
-
- def remove(self, item):
- self.data.remove(item)
-
- def clear(self):
- self.data.clear()
-
- def copy(self):
- return self.__class__(self)
-
- def count(self, item):
- return self.data.count(item)
-
- def index(self, item, *args):
- return self.data.index(item, *args)
-
- def reverse(self):
- self.data.reverse()
-
- def sort(self, /, *args, **kwds):
- self.data.sort(*args, **kwds)
-
+
+ def __copy__(self):
+ inst = self.__class__.__new__(self.__class__)
+ inst.__dict__.update(self.__dict__)
+ # Create a copy and avoid triggering descriptors
+ inst.__dict__["data"] = self.__dict__["data"][:]
+ return inst
+
+ def append(self, item):
+ self.data.append(item)
+
+ def insert(self, i, item):
+ self.data.insert(i, item)
+
+ def pop(self, i=-1):
+ return self.data.pop(i)
+
+ def remove(self, item):
+ self.data.remove(item)
+
+ def clear(self):
+ self.data.clear()
+
+ def copy(self):
+ return self.__class__(self)
+
+ def count(self, item):
+ return self.data.count(item)
+
+ def index(self, item, *args):
+ return self.data.index(item, *args)
+
+ def reverse(self):
+ self.data.reverse()
+
+ def sort(self, /, *args, **kwds):
+ self.data.sort(*args, **kwds)
+
def extend(self, other):
if isinstance(other, UserList):
self.data.extend(other.data)
@@ -1261,7 +1261,7 @@ class UserList(_collections_abc.MutableSequence):
################################################################################
class UserString(_collections_abc.Sequence):
-
+
def __init__(self, seq):
if isinstance(seq, str):
self.data = seq
@@ -1269,25 +1269,25 @@ class UserString(_collections_abc.Sequence):
self.data = seq.data[:]
else:
self.data = str(seq)
-
- def __str__(self):
- return str(self.data)
-
- def __repr__(self):
- return repr(self.data)
-
- def __int__(self):
- return int(self.data)
-
- def __float__(self):
- return float(self.data)
-
- def __complex__(self):
- return complex(self.data)
-
- def __hash__(self):
- return hash(self.data)
-
+
+ def __str__(self):
+ return str(self.data)
+
+ def __repr__(self):
+ return repr(self.data)
+
+ def __int__(self):
+ return int(self.data)
+
+ def __float__(self):
+ return float(self.data)
+
+ def __complex__(self):
+ return complex(self.data)
+
+ def __hash__(self):
+ return hash(self.data)
+
def __getnewargs__(self):
return (self.data[:],)
@@ -1295,22 +1295,22 @@ class UserString(_collections_abc.Sequence):
if isinstance(string, UserString):
return self.data == string.data
return self.data == string
-
+
def __lt__(self, string):
if isinstance(string, UserString):
return self.data < string.data
return self.data < string
-
+
def __le__(self, string):
if isinstance(string, UserString):
return self.data <= string.data
return self.data <= string
-
+
def __gt__(self, string):
if isinstance(string, UserString):
return self.data > string.data
return self.data > string
-
+
def __ge__(self, string):
if isinstance(string, UserString):
return self.data >= string.data
@@ -1321,188 +1321,188 @@ class UserString(_collections_abc.Sequence):
char = char.data
return char in self.data
- def __len__(self):
- return len(self.data)
-
- def __getitem__(self, index):
- return self.__class__(self.data[index])
-
+ def __len__(self):
+ return len(self.data)
+
+ def __getitem__(self, index):
+ return self.__class__(self.data[index])
+
def __add__(self, other):
if isinstance(other, UserString):
return self.__class__(self.data + other.data)
elif isinstance(other, str):
return self.__class__(self.data + other)
return self.__class__(self.data + str(other))
-
+
def __radd__(self, other):
if isinstance(other, str):
return self.__class__(other + self.data)
return self.__class__(str(other) + self.data)
-
+
def __mul__(self, n):
- return self.__class__(self.data * n)
-
+ return self.__class__(self.data * n)
+
__rmul__ = __mul__
-
+
def __mod__(self, args):
return self.__class__(self.data % args)
-
- def __rmod__(self, template):
- return self.__class__(str(template) % self)
-
+
+ def __rmod__(self, template):
+ return self.__class__(str(template) % self)
+
# the following methods are defined in alphabetical order:
- def capitalize(self):
- return self.__class__(self.data.capitalize())
-
+ def capitalize(self):
+ return self.__class__(self.data.capitalize())
+
def casefold(self):
return self.__class__(self.data.casefold())
-
+
def center(self, width, *args):
return self.__class__(self.data.center(width, *args))
-
+
def count(self, sub, start=0, end=_sys.maxsize):
if isinstance(sub, UserString):
sub = sub.data
return self.data.count(sub, start, end)
-
- def removeprefix(self, prefix, /):
- if isinstance(prefix, UserString):
- prefix = prefix.data
- return self.__class__(self.data.removeprefix(prefix))
-
- def removesuffix(self, suffix, /):
- if isinstance(suffix, UserString):
- suffix = suffix.data
- return self.__class__(self.data.removesuffix(suffix))
-
- def encode(self, encoding='utf-8', errors='strict'):
- encoding = 'utf-8' if encoding is None else encoding
- errors = 'strict' if errors is None else errors
- return self.data.encode(encoding, errors)
-
+
+ def removeprefix(self, prefix, /):
+ if isinstance(prefix, UserString):
+ prefix = prefix.data
+ return self.__class__(self.data.removeprefix(prefix))
+
+ def removesuffix(self, suffix, /):
+ if isinstance(suffix, UserString):
+ suffix = suffix.data
+ return self.__class__(self.data.removesuffix(suffix))
+
+ def encode(self, encoding='utf-8', errors='strict'):
+ encoding = 'utf-8' if encoding is None else encoding
+ errors = 'strict' if errors is None else errors
+ return self.data.encode(encoding, errors)
+
def endswith(self, suffix, start=0, end=_sys.maxsize):
return self.data.endswith(suffix, start, end)
-
+
def expandtabs(self, tabsize=8):
return self.__class__(self.data.expandtabs(tabsize))
-
+
def find(self, sub, start=0, end=_sys.maxsize):
if isinstance(sub, UserString):
sub = sub.data
return self.data.find(sub, start, end)
-
- def format(self, /, *args, **kwds):
+
+ def format(self, /, *args, **kwds):
return self.data.format(*args, **kwds)
-
+
def format_map(self, mapping):
return self.data.format_map(mapping)
-
+
def index(self, sub, start=0, end=_sys.maxsize):
return self.data.index(sub, start, end)
-
- def isalpha(self):
- return self.data.isalpha()
-
- def isalnum(self):
- return self.data.isalnum()
-
- def isascii(self):
- return self.data.isascii()
-
- def isdecimal(self):
- return self.data.isdecimal()
-
- def isdigit(self):
- return self.data.isdigit()
-
- def isidentifier(self):
- return self.data.isidentifier()
-
- def islower(self):
- return self.data.islower()
-
- def isnumeric(self):
- return self.data.isnumeric()
-
- def isprintable(self):
- return self.data.isprintable()
-
- def isspace(self):
- return self.data.isspace()
-
- def istitle(self):
- return self.data.istitle()
-
- def isupper(self):
- return self.data.isupper()
-
- def join(self, seq):
- return self.data.join(seq)
-
+
+ def isalpha(self):
+ return self.data.isalpha()
+
+ def isalnum(self):
+ return self.data.isalnum()
+
+ def isascii(self):
+ return self.data.isascii()
+
+ def isdecimal(self):
+ return self.data.isdecimal()
+
+ def isdigit(self):
+ return self.data.isdigit()
+
+ def isidentifier(self):
+ return self.data.isidentifier()
+
+ def islower(self):
+ return self.data.islower()
+
+ def isnumeric(self):
+ return self.data.isnumeric()
+
+ def isprintable(self):
+ return self.data.isprintable()
+
+ def isspace(self):
+ return self.data.isspace()
+
+ def istitle(self):
+ return self.data.istitle()
+
+ def isupper(self):
+ return self.data.isupper()
+
+ def join(self, seq):
+ return self.data.join(seq)
+
def ljust(self, width, *args):
return self.__class__(self.data.ljust(width, *args))
-
- def lower(self):
- return self.__class__(self.data.lower())
-
- def lstrip(self, chars=None):
- return self.__class__(self.data.lstrip(chars))
-
+
+ def lower(self):
+ return self.__class__(self.data.lower())
+
+ def lstrip(self, chars=None):
+ return self.__class__(self.data.lstrip(chars))
+
maketrans = str.maketrans
-
+
def partition(self, sep):
return self.data.partition(sep)
-
+
def replace(self, old, new, maxsplit=-1):
if isinstance(old, UserString):
old = old.data
if isinstance(new, UserString):
new = new.data
return self.__class__(self.data.replace(old, new, maxsplit))
-
+
def rfind(self, sub, start=0, end=_sys.maxsize):
if isinstance(sub, UserString):
sub = sub.data
return self.data.rfind(sub, start, end)
-
+
def rindex(self, sub, start=0, end=_sys.maxsize):
return self.data.rindex(sub, start, end)
-
+
def rjust(self, width, *args):
return self.__class__(self.data.rjust(width, *args))
-
+
def rpartition(self, sep):
return self.data.rpartition(sep)
-
+
def rstrip(self, chars=None):
return self.__class__(self.data.rstrip(chars))
-
+
def split(self, sep=None, maxsplit=-1):
return self.data.split(sep, maxsplit)
-
+
def rsplit(self, sep=None, maxsplit=-1):
return self.data.rsplit(sep, maxsplit)
-
- def splitlines(self, keepends=False):
- return self.data.splitlines(keepends)
-
+
+ def splitlines(self, keepends=False):
+ return self.data.splitlines(keepends)
+
def startswith(self, prefix, start=0, end=_sys.maxsize):
return self.data.startswith(prefix, start, end)
-
- def strip(self, chars=None):
- return self.__class__(self.data.strip(chars))
-
- def swapcase(self):
- return self.__class__(self.data.swapcase())
-
- def title(self):
- return self.__class__(self.data.title())
-
+
+ def strip(self, chars=None):
+ return self.__class__(self.data.strip(chars))
+
+ def swapcase(self):
+ return self.__class__(self.data.swapcase())
+
+ def title(self):
+ return self.__class__(self.data.title())
+
def translate(self, *args):
return self.__class__(self.data.translate(*args))
-
- def upper(self):
- return self.__class__(self.data.upper())
-
- def zfill(self, width):
- return self.__class__(self.data.zfill(width))
+
+ def upper(self):
+ return self.__class__(self.data.upper())
+
+ def zfill(self, width):
+ return self.__class__(self.data.zfill(width))
diff --git a/contrib/tools/python3/src/Lib/collections/abc.py b/contrib/tools/python3/src/Lib/collections/abc.py
index 86ca8b8a84..6a987916c0 100644
--- a/contrib/tools/python3/src/Lib/collections/abc.py
+++ b/contrib/tools/python3/src/Lib/collections/abc.py
@@ -1,3 +1,3 @@
from _collections_abc import *
from _collections_abc import __all__
-from _collections_abc import _CallableGenericAlias
+from _collections_abc import _CallableGenericAlias
diff --git a/contrib/tools/python3/src/Lib/compileall.py b/contrib/tools/python3/src/Lib/compileall.py
index 25ad83c229..5ec5e5675f 100644
--- a/contrib/tools/python3/src/Lib/compileall.py
+++ b/contrib/tools/python3/src/Lib/compileall.py
@@ -15,14 +15,14 @@ import sys
import importlib.util
import py_compile
import struct
-import filecmp
+import filecmp
from functools import partial
-from pathlib import Path
+from pathlib import Path
__all__ = ["compile_dir","compile_file","compile_path"]
-def _walk_dir(dir, maxlevels, quiet=0):
+def _walk_dir(dir, maxlevels, quiet=0):
if quiet < 2 and isinstance(dir, os.PathLike):
dir = os.fspath(dir)
if not quiet:
@@ -39,91 +39,91 @@ def _walk_dir(dir, maxlevels, quiet=0):
continue
fullname = os.path.join(dir, name)
if not os.path.isdir(fullname):
- yield fullname
+ yield fullname
elif (maxlevels > 0 and name != os.curdir and name != os.pardir and
os.path.isdir(fullname) and not os.path.islink(fullname)):
- yield from _walk_dir(fullname, maxlevels=maxlevels - 1,
- quiet=quiet)
+ yield from _walk_dir(fullname, maxlevels=maxlevels - 1,
+ quiet=quiet)
-def compile_dir(dir, maxlevels=None, ddir=None, force=False,
- rx=None, quiet=0, legacy=False, optimize=-1, workers=1,
- invalidation_mode=None, *, stripdir=None,
- prependdir=None, limit_sl_dest=None, hardlink_dupes=False):
+def compile_dir(dir, maxlevels=None, ddir=None, force=False,
+ rx=None, quiet=0, legacy=False, optimize=-1, workers=1,
+ invalidation_mode=None, *, stripdir=None,
+ prependdir=None, limit_sl_dest=None, hardlink_dupes=False):
"""Byte-compile all modules in the given directory tree.
Arguments (only dir is required):
dir: the directory to byte-compile
- maxlevels: maximum recursion level (default `sys.getrecursionlimit()`)
+ maxlevels: maximum recursion level (default `sys.getrecursionlimit()`)
ddir: the directory that will be prepended to the path to the
file as it is compiled into each byte-code file.
force: if True, force compilation, even if timestamps are up-to-date
quiet: full output with False or 0, errors only with 1,
no output with 2
legacy: if True, produce legacy pyc paths instead of PEP 3147 paths
- optimize: int or list of optimization levels or -1 for level of
- the interpreter. Multiple levels leads to multiple compiled
- files each with one optimization level.
+ optimize: int or list of optimization levels or -1 for level of
+ the interpreter. Multiple levels leads to multiple compiled
+ files each with one optimization level.
workers: maximum number of parallel workers
invalidation_mode: how the up-to-dateness of the pyc will be checked
- stripdir: part of path to left-strip from source file path
- prependdir: path to prepend to beginning of original file path, applied
- after stripdir
- limit_sl_dest: ignore symlinks if they are pointing outside of
- the defined path
- hardlink_dupes: hardlink duplicated pyc files
+ stripdir: part of path to left-strip from source file path
+ prependdir: path to prepend to beginning of original file path, applied
+ after stripdir
+ limit_sl_dest: ignore symlinks if they are pointing outside of
+ the defined path
+ hardlink_dupes: hardlink duplicated pyc files
"""
ProcessPoolExecutor = None
- if ddir is not None and (stripdir is not None or prependdir is not None):
- raise ValueError(("Destination dir (ddir) cannot be used "
- "in combination with stripdir or prependdir"))
- if ddir is not None:
- stripdir = dir
- prependdir = ddir
- ddir = None
- if workers < 0:
- raise ValueError('workers must be greater or equal to 0')
- if workers != 1:
- try:
- # Only import when needed, as low resource platforms may
- # fail to import it
- from concurrent.futures import ProcessPoolExecutor
- except ImportError:
- workers = 1
- if maxlevels is None:
- maxlevels = sys.getrecursionlimit()
- files = _walk_dir(dir, quiet=quiet, maxlevels=maxlevels)
+ if ddir is not None and (stripdir is not None or prependdir is not None):
+ raise ValueError(("Destination dir (ddir) cannot be used "
+ "in combination with stripdir or prependdir"))
+ if ddir is not None:
+ stripdir = dir
+ prependdir = ddir
+ ddir = None
+ if workers < 0:
+ raise ValueError('workers must be greater or equal to 0')
+ if workers != 1:
+ try:
+ # Only import when needed, as low resource platforms may
+ # fail to import it
+ from concurrent.futures import ProcessPoolExecutor
+ except ImportError:
+ workers = 1
+ if maxlevels is None:
+ maxlevels = sys.getrecursionlimit()
+ files = _walk_dir(dir, quiet=quiet, maxlevels=maxlevels)
success = True
- if workers != 1 and ProcessPoolExecutor is not None:
- # If workers == 0, let ProcessPoolExecutor choose
+ if workers != 1 and ProcessPoolExecutor is not None:
+ # If workers == 0, let ProcessPoolExecutor choose
workers = workers or None
with ProcessPoolExecutor(max_workers=workers) as executor:
- results = executor.map(partial(compile_file,
- ddir=ddir, force=force,
- rx=rx, quiet=quiet,
- legacy=legacy,
- optimize=optimize,
- invalidation_mode=invalidation_mode,
- stripdir=stripdir,
- prependdir=prependdir,
- limit_sl_dest=limit_sl_dest,
- hardlink_dupes=hardlink_dupes),
- files)
+ results = executor.map(partial(compile_file,
+ ddir=ddir, force=force,
+ rx=rx, quiet=quiet,
+ legacy=legacy,
+ optimize=optimize,
+ invalidation_mode=invalidation_mode,
+ stripdir=stripdir,
+ prependdir=prependdir,
+ limit_sl_dest=limit_sl_dest,
+ hardlink_dupes=hardlink_dupes),
+ files)
success = min(results, default=True)
else:
- for file in files:
- if not compile_file(file, ddir, force, rx, quiet,
- legacy, optimize, invalidation_mode,
- stripdir=stripdir, prependdir=prependdir,
- limit_sl_dest=limit_sl_dest,
- hardlink_dupes=hardlink_dupes):
+ for file in files:
+ if not compile_file(file, ddir, force, rx, quiet,
+ legacy, optimize, invalidation_mode,
+ stripdir=stripdir, prependdir=prependdir,
+ limit_sl_dest=limit_sl_dest,
+ hardlink_dupes=hardlink_dupes):
success = False
return success
def compile_file(fullname, ddir=None, force=False, rx=None, quiet=0,
legacy=False, optimize=-1,
- invalidation_mode=None, *, stripdir=None, prependdir=None,
- limit_sl_dest=None, hardlink_dupes=False):
+ invalidation_mode=None, *, stripdir=None, prependdir=None,
+ limit_sl_dest=None, hardlink_dupes=False):
"""Byte-compile one file.
Arguments (only fullname is required):
@@ -135,114 +135,114 @@ def compile_file(fullname, ddir=None, force=False, rx=None, quiet=0,
quiet: full output with False or 0, errors only with 1,
no output with 2
legacy: if True, produce legacy pyc paths instead of PEP 3147 paths
- optimize: int or list of optimization levels or -1 for level of
- the interpreter. Multiple levels leads to multiple compiled
- files each with one optimization level.
+ optimize: int or list of optimization levels or -1 for level of
+ the interpreter. Multiple levels leads to multiple compiled
+ files each with one optimization level.
invalidation_mode: how the up-to-dateness of the pyc will be checked
- stripdir: part of path to left-strip from source file path
- prependdir: path to prepend to beginning of original file path, applied
- after stripdir
- limit_sl_dest: ignore symlinks if they are pointing outside of
- the defined path.
- hardlink_dupes: hardlink duplicated pyc files
+ stripdir: part of path to left-strip from source file path
+ prependdir: path to prepend to beginning of original file path, applied
+ after stripdir
+ limit_sl_dest: ignore symlinks if they are pointing outside of
+ the defined path.
+ hardlink_dupes: hardlink duplicated pyc files
"""
-
- if ddir is not None and (stripdir is not None or prependdir is not None):
- raise ValueError(("Destination dir (ddir) cannot be used "
- "in combination with stripdir or prependdir"))
-
+
+ if ddir is not None and (stripdir is not None or prependdir is not None):
+ raise ValueError(("Destination dir (ddir) cannot be used "
+ "in combination with stripdir or prependdir"))
+
success = True
if quiet < 2 and isinstance(fullname, os.PathLike):
fullname = os.fspath(fullname)
name = os.path.basename(fullname)
-
- dfile = None
-
+
+ dfile = None
+
if ddir is not None:
dfile = os.path.join(ddir, name)
-
- if stripdir is not None:
- fullname_parts = fullname.split(os.path.sep)
- stripdir_parts = stripdir.split(os.path.sep)
- ddir_parts = list(fullname_parts)
-
- for spart, opart in zip(stripdir_parts, fullname_parts):
- if spart == opart:
- ddir_parts.remove(spart)
-
- dfile = os.path.join(*ddir_parts)
-
- if prependdir is not None:
- if dfile is None:
- dfile = os.path.join(prependdir, fullname)
- else:
- dfile = os.path.join(prependdir, dfile)
-
- if isinstance(optimize, int):
- optimize = [optimize]
-
- # Use set() to remove duplicates.
- # Use sorted() to create pyc files in a deterministic order.
- optimize = sorted(set(optimize))
-
- if hardlink_dupes and len(optimize) < 2:
- raise ValueError("Hardlinking of duplicated bytecode makes sense "
- "only for more than one optimization level")
-
+
+ if stripdir is not None:
+ fullname_parts = fullname.split(os.path.sep)
+ stripdir_parts = stripdir.split(os.path.sep)
+ ddir_parts = list(fullname_parts)
+
+ for spart, opart in zip(stripdir_parts, fullname_parts):
+ if spart == opart:
+ ddir_parts.remove(spart)
+
+ dfile = os.path.join(*ddir_parts)
+
+ if prependdir is not None:
+ if dfile is None:
+ dfile = os.path.join(prependdir, fullname)
+ else:
+ dfile = os.path.join(prependdir, dfile)
+
+ if isinstance(optimize, int):
+ optimize = [optimize]
+
+ # Use set() to remove duplicates.
+ # Use sorted() to create pyc files in a deterministic order.
+ optimize = sorted(set(optimize))
+
+ if hardlink_dupes and len(optimize) < 2:
+ raise ValueError("Hardlinking of duplicated bytecode makes sense "
+ "only for more than one optimization level")
+
if rx is not None:
mo = rx.search(fullname)
if mo:
return success
-
- if limit_sl_dest is not None and os.path.islink(fullname):
- if Path(limit_sl_dest).resolve() not in Path(fullname).resolve().parents:
- return success
-
- opt_cfiles = {}
-
+
+ if limit_sl_dest is not None and os.path.islink(fullname):
+ if Path(limit_sl_dest).resolve() not in Path(fullname).resolve().parents:
+ return success
+
+ opt_cfiles = {}
+
if os.path.isfile(fullname):
- for opt_level in optimize:
- if legacy:
- opt_cfiles[opt_level] = fullname + 'c'
+ for opt_level in optimize:
+ if legacy:
+ opt_cfiles[opt_level] = fullname + 'c'
else:
- if opt_level >= 0:
- opt = opt_level if opt_level >= 1 else ''
- cfile = (importlib.util.cache_from_source(
- fullname, optimization=opt))
- opt_cfiles[opt_level] = cfile
- else:
- cfile = importlib.util.cache_from_source(fullname)
- opt_cfiles[opt_level] = cfile
-
+ if opt_level >= 0:
+ opt = opt_level if opt_level >= 1 else ''
+ cfile = (importlib.util.cache_from_source(
+ fullname, optimization=opt))
+ opt_cfiles[opt_level] = cfile
+ else:
+ cfile = importlib.util.cache_from_source(fullname)
+ opt_cfiles[opt_level] = cfile
+
head, tail = name[:-3], name[-3:]
if tail == '.py':
if not force:
try:
mtime = int(os.stat(fullname).st_mtime)
- expect = struct.pack('<4sLL', importlib.util.MAGIC_NUMBER,
- 0, mtime & 0xFFFF_FFFF)
- for cfile in opt_cfiles.values():
- with open(cfile, 'rb') as chandle:
- actual = chandle.read(12)
- if expect != actual:
- break
- else:
+ expect = struct.pack('<4sLL', importlib.util.MAGIC_NUMBER,
+ 0, mtime & 0xFFFF_FFFF)
+ for cfile in opt_cfiles.values():
+ with open(cfile, 'rb') as chandle:
+ actual = chandle.read(12)
+ if expect != actual:
+ break
+ else:
return success
except OSError:
pass
if not quiet:
print('Compiling {!r}...'.format(fullname))
try:
- for index, opt_level in enumerate(optimize):
- cfile = opt_cfiles[opt_level]
- ok = py_compile.compile(fullname, cfile, dfile, True,
- optimize=opt_level,
- invalidation_mode=invalidation_mode)
- if index > 0 and hardlink_dupes:
- previous_cfile = opt_cfiles[optimize[index - 1]]
- if filecmp.cmp(cfile, previous_cfile, shallow=False):
- os.unlink(cfile)
- os.link(previous_cfile, cfile)
+ for index, opt_level in enumerate(optimize):
+ cfile = opt_cfiles[opt_level]
+ ok = py_compile.compile(fullname, cfile, dfile, True,
+ optimize=opt_level,
+ invalidation_mode=invalidation_mode)
+ if index > 0 and hardlink_dupes:
+ previous_cfile = opt_cfiles[optimize[index - 1]]
+ if filecmp.cmp(cfile, previous_cfile, shallow=False):
+ os.unlink(cfile)
+ os.link(previous_cfile, cfile)
except py_compile.PyCompileError as err:
success = False
if quiet >= 2:
@@ -252,8 +252,8 @@ def compile_file(fullname, ddir=None, force=False, rx=None, quiet=0,
else:
print('*** ', end='')
# escape non-printable characters in msg
- encoding = sys.stdout.encoding or sys.getdefaultencoding()
- msg = err.msg.encode(encoding, errors='backslashreplace').decode(encoding)
+ encoding = sys.stdout.encoding or sys.getdefaultencoding()
+ msg = err.msg.encode(encoding, errors='backslashreplace').decode(encoding)
print(msg)
except (SyntaxError, UnicodeError, OSError) as e:
success = False
@@ -310,7 +310,7 @@ def main():
parser = argparse.ArgumentParser(
description='Utilities to support installing Python libraries.')
parser.add_argument('-l', action='store_const', const=0,
- default=None, dest='maxlevels',
+ default=None, dest='maxlevels',
help="don't recurse into subdirectories")
parser.add_argument('-r', type=int, dest='recursion',
help=('control the maximum recursion level. '
@@ -328,20 +328,20 @@ def main():
'compile-time tracebacks and in runtime '
'tracebacks in cases where the source file is '
'unavailable'))
- parser.add_argument('-s', metavar='STRIPDIR', dest='stripdir',
- default=None,
- help=('part of path to left-strip from path '
- 'to source file - for example buildroot. '
- '`-d` and `-s` options cannot be '
- 'specified together.'))
- parser.add_argument('-p', metavar='PREPENDDIR', dest='prependdir',
- default=None,
- help=('path to add as prefix to path '
- 'to source file - for example / to make '
- 'it absolute when some part is removed '
- 'by `-s` option. '
- '`-d` and `-p` options cannot be '
- 'specified together.'))
+ parser.add_argument('-s', metavar='STRIPDIR', dest='stripdir',
+ default=None,
+ help=('part of path to left-strip from path '
+ 'to source file - for example buildroot. '
+ '`-d` and `-s` options cannot be '
+ 'specified together.'))
+ parser.add_argument('-p', metavar='PREPENDDIR', dest='prependdir',
+ default=None,
+ help=('path to add as prefix to path '
+ 'to source file - for example / to make '
+ 'it absolute when some part is removed '
+ 'by `-s` option. '
+ '`-d` and `-p` options cannot be '
+ 'specified together.'))
parser.add_argument('-x', metavar='REGEXP', dest='rx', default=None,
help=('skip files matching the regular expression; '
'the regexp is searched for in the full path '
@@ -364,15 +364,15 @@ def main():
'"checked-hash" if the SOURCE_DATE_EPOCH '
'environment variable is set, and '
'"timestamp" otherwise.'))
- parser.add_argument('-o', action='append', type=int, dest='opt_levels',
- help=('Optimization levels to run compilation with. '
- 'Default is -1 which uses the optimization level '
- 'of the Python interpreter itself (see -O).'))
- parser.add_argument('-e', metavar='DIR', dest='limit_sl_dest',
- help='Ignore symlinks pointing outsite of the DIR')
- parser.add_argument('--hardlink-dupes', action='store_true',
- dest='hardlink_dupes',
- help='Hardlink duplicated pyc files')
+ parser.add_argument('-o', action='append', type=int, dest='opt_levels',
+ help=('Optimization levels to run compilation with. '
+ 'Default is -1 which uses the optimization level '
+ 'of the Python interpreter itself (see -O).'))
+ parser.add_argument('-e', metavar='DIR', dest='limit_sl_dest',
+ help='Ignore symlinks pointing outsite of the DIR')
+ parser.add_argument('--hardlink-dupes', action='store_true',
+ dest='hardlink_dupes',
+ help='Hardlink duplicated pyc files')
args = parser.parse_args()
compile_dests = args.compile_dest
@@ -381,26 +381,26 @@ def main():
import re
args.rx = re.compile(args.rx)
- if args.limit_sl_dest == "":
- args.limit_sl_dest = None
+ if args.limit_sl_dest == "":
+ args.limit_sl_dest = None
if args.recursion is not None:
maxlevels = args.recursion
else:
maxlevels = args.maxlevels
- if args.opt_levels is None:
- args.opt_levels = [-1]
-
- if len(args.opt_levels) == 1 and args.hardlink_dupes:
- parser.error(("Hardlinking of duplicated bytecode makes sense "
- "only for more than one optimization level."))
-
- if args.ddir is not None and (
- args.stripdir is not None or args.prependdir is not None
- ):
- parser.error("-d cannot be used in combination with -s or -p")
-
+ if args.opt_levels is None:
+ args.opt_levels = [-1]
+
+ if len(args.opt_levels) == 1 and args.hardlink_dupes:
+ parser.error(("Hardlinking of duplicated bytecode makes sense "
+ "only for more than one optimization level."))
+
+ if args.ddir is not None and (
+ args.stripdir is not None or args.prependdir is not None
+ ):
+ parser.error("-d cannot be used in combination with -s or -p")
+
# if flist is provided then load it
if args.flist:
try:
@@ -425,23 +425,23 @@ def main():
if os.path.isfile(dest):
if not compile_file(dest, args.ddir, args.force, args.rx,
args.quiet, args.legacy,
- invalidation_mode=invalidation_mode,
- stripdir=args.stripdir,
- prependdir=args.prependdir,
- optimize=args.opt_levels,
- limit_sl_dest=args.limit_sl_dest,
- hardlink_dupes=args.hardlink_dupes):
+ invalidation_mode=invalidation_mode,
+ stripdir=args.stripdir,
+ prependdir=args.prependdir,
+ optimize=args.opt_levels,
+ limit_sl_dest=args.limit_sl_dest,
+ hardlink_dupes=args.hardlink_dupes):
success = False
else:
if not compile_dir(dest, maxlevels, args.ddir,
args.force, args.rx, args.quiet,
args.legacy, workers=args.workers,
- invalidation_mode=invalidation_mode,
- stripdir=args.stripdir,
- prependdir=args.prependdir,
- optimize=args.opt_levels,
- limit_sl_dest=args.limit_sl_dest,
- hardlink_dupes=args.hardlink_dupes):
+ invalidation_mode=invalidation_mode,
+ stripdir=args.stripdir,
+ prependdir=args.prependdir,
+ optimize=args.opt_levels,
+ limit_sl_dest=args.limit_sl_dest,
+ hardlink_dupes=args.hardlink_dupes):
success = False
return success
else:
diff --git a/contrib/tools/python3/src/Lib/concurrent/futures/__init__.py b/contrib/tools/python3/src/Lib/concurrent/futures/__init__.py
index d746aeac50..07c638d39f 100644
--- a/contrib/tools/python3/src/Lib/concurrent/futures/__init__.py
+++ b/contrib/tools/python3/src/Lib/concurrent/futures/__init__.py
@@ -10,7 +10,7 @@ from concurrent.futures._base import (FIRST_COMPLETED,
ALL_COMPLETED,
CancelledError,
TimeoutError,
- InvalidStateError,
+ InvalidStateError,
BrokenExecutor,
Future,
Executor,
diff --git a/contrib/tools/python3/src/Lib/concurrent/futures/_base.py b/contrib/tools/python3/src/Lib/concurrent/futures/_base.py
index 5c00f2edbe..78d17dca4d 100644
--- a/contrib/tools/python3/src/Lib/concurrent/futures/_base.py
+++ b/contrib/tools/python3/src/Lib/concurrent/futures/_base.py
@@ -7,7 +7,7 @@ import collections
import logging
import threading
import time
-import types
+import types
FIRST_COMPLETED = 'FIRST_COMPLETED'
FIRST_EXCEPTION = 'FIRST_EXCEPTION'
@@ -54,10 +54,10 @@ class TimeoutError(Error):
"""The operation exceeded the given deadline."""
pass
-class InvalidStateError(Error):
- """The operation is not allowed in this state."""
- pass
-
+class InvalidStateError(Error):
+ """The operation is not allowed in this state."""
+ pass
+
class _Waiter(object):
"""Provides the event that wait() and as_completed() block on."""
def __init__(self):
@@ -284,14 +284,14 @@ def wait(fs, timeout=None, return_when=ALL_COMPLETED):
A named 2-tuple of sets. The first set, named 'done', contains the
futures that completed (is finished or cancelled) before the wait
completed. The second set, named 'not_done', contains uncompleted
- futures. Duplicate futures given to *fs* are removed and will be
- returned only once.
+ futures. Duplicate futures given to *fs* are removed and will be
+ returned only once.
"""
- fs = set(fs)
+ fs = set(fs)
with _AcquireFutures(fs):
- done = {f for f in fs
- if f._state in [CANCELLED_AND_NOTIFIED, FINISHED]}
- not_done = fs - done
+ done = {f for f in fs
+ if f._state in [CANCELLED_AND_NOTIFIED, FINISHED]}
+ not_done = fs - done
if (return_when == FIRST_COMPLETED) and done:
return DoneAndNotDoneFutures(done, not_done)
elif (return_when == FIRST_EXCEPTION) and done:
@@ -310,7 +310,7 @@ def wait(fs, timeout=None, return_when=ALL_COMPLETED):
f._waiters.remove(waiter)
done.update(waiter.finished_futures)
- return DoneAndNotDoneFutures(done, fs - done)
+ return DoneAndNotDoneFutures(done, fs - done)
class Future(object):
"""Represents the result of an asynchronous computation."""
@@ -387,11 +387,11 @@ class Future(object):
def __get_result(self):
if self._exception:
- try:
- raise self._exception
- finally:
- # Break a reference cycle with the exception in self._exception
- self = None
+ try:
+ raise self._exception
+ finally:
+ # Break a reference cycle with the exception in self._exception
+ self = None
else:
return self._result
@@ -410,10 +410,10 @@ class Future(object):
if self._state not in [CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED]:
self._done_callbacks.append(fn)
return
- try:
- fn(self)
- except Exception:
- LOGGER.exception('exception calling callback for %r', self)
+ try:
+ fn(self)
+ except Exception:
+ LOGGER.exception('exception calling callback for %r', self)
def result(self, timeout=None):
"""Return the result of the call that the future represents.
@@ -431,24 +431,24 @@ class Future(object):
timeout.
Exception: If the call raised then that exception will be raised.
"""
- try:
- with self._condition:
- if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
- raise CancelledError()
- elif self._state == FINISHED:
- return self.__get_result()
-
- self._condition.wait(timeout)
-
- if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
- raise CancelledError()
- elif self._state == FINISHED:
- return self.__get_result()
- else:
- raise TimeoutError()
- finally:
- # Break a reference cycle with the exception in self._exception
- self = None
+ try:
+ with self._condition:
+ if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
+ raise CancelledError()
+ elif self._state == FINISHED:
+ return self.__get_result()
+
+ self._condition.wait(timeout)
+
+ if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
+ raise CancelledError()
+ elif self._state == FINISHED:
+ return self.__get_result()
+ else:
+ raise TimeoutError()
+ finally:
+ # Break a reference cycle with the exception in self._exception
+ self = None
def exception(self, timeout=None):
"""Return the exception raised by the call that the future represents.
@@ -530,8 +530,8 @@ class Future(object):
Should only be used by Executor implementations and unit tests.
"""
with self._condition:
- if self._state in {CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED}:
- raise InvalidStateError('{}: {!r}'.format(self._state, self))
+ if self._state in {CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED}:
+ raise InvalidStateError('{}: {!r}'.format(self._state, self))
self._result = result
self._state = FINISHED
for waiter in self._waiters:
@@ -545,8 +545,8 @@ class Future(object):
Should only be used by Executor implementations and unit tests.
"""
with self._condition:
- if self._state in {CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED}:
- raise InvalidStateError('{}: {!r}'.format(self._state, self))
+ if self._state in {CANCELLED, CANCELLED_AND_NOTIFIED, FINISHED}:
+ raise InvalidStateError('{}: {!r}'.format(self._state, self))
self._exception = exception
self._state = FINISHED
for waiter in self._waiters:
@@ -554,12 +554,12 @@ class Future(object):
self._condition.notify_all()
self._invoke_callbacks()
- __class_getitem__ = classmethod(types.GenericAlias)
-
+ __class_getitem__ = classmethod(types.GenericAlias)
+
class Executor(object):
"""This is an abstract base class for concrete asynchronous executors."""
- def submit(self, fn, /, *args, **kwargs):
+ def submit(self, fn, /, *args, **kwargs):
"""Submits a callable to be executed with the given arguments.
Schedules the callable to be executed as fn(*args, **kwargs) and returns
@@ -614,7 +614,7 @@ class Executor(object):
future.cancel()
return result_iterator()
- def shutdown(self, wait=True, *, cancel_futures=False):
+ def shutdown(self, wait=True, *, cancel_futures=False):
"""Clean-up the resources associated with the Executor.
It is safe to call this method several times. Otherwise, no other
@@ -624,9 +624,9 @@ class Executor(object):
wait: If True then shutdown will not return until all running
futures have finished executing and the resources used by the
executor have been reclaimed.
- cancel_futures: If True then shutdown will cancel all pending
- futures. Futures that are completed or running will not be
- cancelled.
+ cancel_futures: If True then shutdown will cancel all pending
+ futures. Futures that are completed or running will not be
+ cancelled.
"""
pass
diff --git a/contrib/tools/python3/src/Lib/concurrent/futures/process.py b/contrib/tools/python3/src/Lib/concurrent/futures/process.py
index a29e5247ab..fde626590c 100644
--- a/contrib/tools/python3/src/Lib/concurrent/futures/process.py
+++ b/contrib/tools/python3/src/Lib/concurrent/futures/process.py
@@ -3,7 +3,7 @@
"""Implements ProcessPoolExecutor.
-The following diagram and text describe the data-flow through the system:
+The following diagram and text describe the data-flow through the system:
|======================= In-process =====================|== Out-of-process ==|
@@ -49,13 +49,13 @@ import os
from concurrent.futures import _base
import queue
import multiprocessing as mp
-import multiprocessing.connection
+import multiprocessing.connection
from multiprocessing.queues import Queue
import threading
import weakref
from functools import partial
import itertools
-import sys
+import sys
import traceback
@@ -65,23 +65,23 @@ _global_shutdown = False
class _ThreadWakeup:
def __init__(self):
- self._closed = False
+ self._closed = False
self._reader, self._writer = mp.Pipe(duplex=False)
def close(self):
- if not self._closed:
- self._closed = True
- self._writer.close()
- self._reader.close()
+ if not self._closed:
+ self._closed = True
+ self._writer.close()
+ self._reader.close()
def wakeup(self):
- if not self._closed:
- self._writer.send_bytes(b"")
+ if not self._closed:
+ self._writer.send_bytes(b"")
def clear(self):
- if not self._closed:
- while self._reader.poll():
- self._reader.recv_bytes()
+ if not self._closed:
+ while self._reader.poll():
+ self._reader.recv_bytes()
def _python_exit():
@@ -89,17 +89,17 @@ def _python_exit():
_global_shutdown = True
items = list(_threads_wakeups.items())
for _, thread_wakeup in items:
- # call not protected by ProcessPoolExecutor._shutdown_lock
+ # call not protected by ProcessPoolExecutor._shutdown_lock
thread_wakeup.wakeup()
for t, _ in items:
t.join()
-# Register for `_python_exit()` to be called just before joining all
-# non-daemon threads. This is used instead of `atexit.register()` for
-# compatibility with subinterpreters, which no longer support daemon threads.
-# See bpo-39812 for context.
-threading._register_atexit(_python_exit)
-
+# Register for `_python_exit()` to be called just before joining all
+# non-daemon threads. This is used instead of `atexit.register()` for
+# compatibility with subinterpreters, which no longer support daemon threads.
+# See bpo-39812 for context.
+threading._register_atexit(_python_exit)
+
# Controls how many more calls than processes will be queued in the call queue.
# A smaller number will mean that processes spend more time idle waiting for
# work while a larger number will make Future.cancel() succeed less frequently
@@ -107,12 +107,12 @@ threading._register_atexit(_python_exit)
EXTRA_QUEUED_CALLS = 1
-# On Windows, WaitForMultipleObjects is used to wait for processes to finish.
-# It can wait on, at most, 63 objects. There is an overhead of two objects:
-# - the result queue reader
-# - the thread wakeup reader
-_MAX_WINDOWS_WORKERS = 63 - 2
-
+# On Windows, WaitForMultipleObjects is used to wait for processes to finish.
+# It can wait on, at most, 63 objects. There is an overhead of two objects:
+# - the result queue reader
+# - the thread wakeup reader
+_MAX_WINDOWS_WORKERS = 63 - 2
+
# Hack to embed stringification of remote traceback in local traceback
class _RemoteTraceback(Exception):
@@ -157,11 +157,11 @@ class _CallItem(object):
class _SafeQueue(Queue):
"""Safe Queue set exception to the future object linked to a job"""
- def __init__(self, max_size=0, *, ctx, pending_work_items, shutdown_lock,
- thread_wakeup):
+ def __init__(self, max_size=0, *, ctx, pending_work_items, shutdown_lock,
+ thread_wakeup):
self.pending_work_items = pending_work_items
- self.shutdown_lock = shutdown_lock
- self.thread_wakeup = thread_wakeup
+ self.shutdown_lock = shutdown_lock
+ self.thread_wakeup = thread_wakeup
super().__init__(max_size, ctx=ctx)
def _on_queue_feeder_error(self, e, obj):
@@ -169,11 +169,11 @@ class _SafeQueue(Queue):
tb = traceback.format_exception(type(e), e, e.__traceback__)
e.__cause__ = _RemoteTraceback('\n"""\n{}"""'.format(''.join(tb)))
work_item = self.pending_work_items.pop(obj.work_id, None)
- with self.shutdown_lock:
- self.thread_wakeup.wakeup()
- # work_item can be None if another process terminated. In this
- # case, the executor_manager_thread fails all work_items
- # with BrokenProcessPool
+ with self.shutdown_lock:
+ self.thread_wakeup.wakeup()
+ # work_item can be None if another process terminated. In this
+ # case, the executor_manager_thread fails all work_items
+ # with BrokenProcessPool
if work_item is not None:
work_item.future.set_exception(e)
else:
@@ -189,7 +189,7 @@ def _get_chunks(*iterables, chunksize):
return
yield chunk
-
+
def _process_chunk(fn, chunk):
""" Processes a chunk of an iterable passed to map.
@@ -246,139 +246,139 @@ def _process_worker(call_queue, result_queue, initializer, initargs):
_sendback_result(result_queue, call_item.work_id, exception=exc)
else:
_sendback_result(result_queue, call_item.work_id, result=r)
- del r
+ del r
# Liberate the resource as soon as possible, to avoid holding onto
# open files or shared memory that is not needed anymore
del call_item
-class _ExecutorManagerThread(threading.Thread):
- """Manages the communication between this process and the worker processes.
+class _ExecutorManagerThread(threading.Thread):
+ """Manages the communication between this process and the worker processes.
- The manager is run in a local thread.
+ The manager is run in a local thread.
Args:
- executor: A reference to the ProcessPoolExecutor that owns
- this thread. A weakref will be own by the manager as well as
- references to internal objects used to introspect the state of
- the executor.
+ executor: A reference to the ProcessPoolExecutor that owns
+ this thread. A weakref will be own by the manager as well as
+ references to internal objects used to introspect the state of
+ the executor.
"""
- def __init__(self, executor):
- # Store references to necessary internals of the executor.
-
- # A _ThreadWakeup to allow waking up the queue_manager_thread from the
- # main Thread and avoid deadlocks caused by permanently locked queues.
- self.thread_wakeup = executor._executor_manager_thread_wakeup
- self.shutdown_lock = executor._shutdown_lock
-
- # A weakref.ref to the ProcessPoolExecutor that owns this thread. Used
- # to determine if the ProcessPoolExecutor has been garbage collected
- # and that the manager can exit.
- # When the executor gets garbage collected, the weakref callback
- # will wake up the queue management thread so that it can terminate
- # if there is no pending work item.
- def weakref_cb(_,
- thread_wakeup=self.thread_wakeup,
- shutdown_lock=self.shutdown_lock):
- mp.util.debug('Executor collected: triggering callback for'
- ' QueueManager wakeup')
- with shutdown_lock:
- thread_wakeup.wakeup()
-
- self.executor_reference = weakref.ref(executor, weakref_cb)
-
- # A list of the ctx.Process instances used as workers.
- self.processes = executor._processes
-
- # A ctx.Queue that will be filled with _CallItems derived from
- # _WorkItems for processing by the process workers.
- self.call_queue = executor._call_queue
-
- # A ctx.SimpleQueue of _ResultItems generated by the process workers.
- self.result_queue = executor._result_queue
-
- # A queue.Queue of work ids e.g. Queue([5, 6, ...]).
- self.work_ids_queue = executor._work_ids
-
- # A dict mapping work ids to _WorkItems e.g.
- # {5: <_WorkItem...>, 6: <_WorkItem...>, ...}
- self.pending_work_items = executor._pending_work_items
-
- super().__init__()
-
- def run(self):
- # Main loop for the executor manager thread.
-
- while True:
- self.add_call_item_to_queue()
-
- result_item, is_broken, cause = self.wait_result_broken_or_wakeup()
-
- if is_broken:
- self.terminate_broken(cause)
- return
- if result_item is not None:
- self.process_result_item(result_item)
- # Delete reference to result_item to avoid keeping references
- # while waiting on new results.
- del result_item
-
- # attempt to increment idle process count
- executor = self.executor_reference()
- if executor is not None:
- executor._idle_worker_semaphore.release()
- del executor
-
- if self.is_shutting_down():
- self.flag_executor_shutting_down()
-
- # Since no new work items can be added, it is safe to shutdown
- # this thread if there are no pending work items.
- if not self.pending_work_items:
- self.join_executor_internals()
- return
-
- def add_call_item_to_queue(self):
- # Fills call_queue with _WorkItems from pending_work_items.
- # This function never blocks.
- while True:
- if self.call_queue.full():
- return
- try:
- work_id = self.work_ids_queue.get(block=False)
- except queue.Empty:
- return
- else:
- work_item = self.pending_work_items[work_id]
-
- if work_item.future.set_running_or_notify_cancel():
- self.call_queue.put(_CallItem(work_id,
- work_item.fn,
- work_item.args,
- work_item.kwargs),
- block=True)
- else:
- del self.pending_work_items[work_id]
- continue
-
- def wait_result_broken_or_wakeup(self):
+ def __init__(self, executor):
+ # Store references to necessary internals of the executor.
+
+ # A _ThreadWakeup to allow waking up the queue_manager_thread from the
+ # main Thread and avoid deadlocks caused by permanently locked queues.
+ self.thread_wakeup = executor._executor_manager_thread_wakeup
+ self.shutdown_lock = executor._shutdown_lock
+
+ # A weakref.ref to the ProcessPoolExecutor that owns this thread. Used
+ # to determine if the ProcessPoolExecutor has been garbage collected
+ # and that the manager can exit.
+ # When the executor gets garbage collected, the weakref callback
+ # will wake up the queue management thread so that it can terminate
+ # if there is no pending work item.
+ def weakref_cb(_,
+ thread_wakeup=self.thread_wakeup,
+ shutdown_lock=self.shutdown_lock):
+ mp.util.debug('Executor collected: triggering callback for'
+ ' QueueManager wakeup')
+ with shutdown_lock:
+ thread_wakeup.wakeup()
+
+ self.executor_reference = weakref.ref(executor, weakref_cb)
+
+ # A list of the ctx.Process instances used as workers.
+ self.processes = executor._processes
+
+ # A ctx.Queue that will be filled with _CallItems derived from
+ # _WorkItems for processing by the process workers.
+ self.call_queue = executor._call_queue
+
+ # A ctx.SimpleQueue of _ResultItems generated by the process workers.
+ self.result_queue = executor._result_queue
+
+ # A queue.Queue of work ids e.g. Queue([5, 6, ...]).
+ self.work_ids_queue = executor._work_ids
+
+ # A dict mapping work ids to _WorkItems e.g.
+ # {5: <_WorkItem...>, 6: <_WorkItem...>, ...}
+ self.pending_work_items = executor._pending_work_items
+
+ super().__init__()
+
+ def run(self):
+ # Main loop for the executor manager thread.
+
+ while True:
+ self.add_call_item_to_queue()
+
+ result_item, is_broken, cause = self.wait_result_broken_or_wakeup()
+
+ if is_broken:
+ self.terminate_broken(cause)
+ return
+ if result_item is not None:
+ self.process_result_item(result_item)
+ # Delete reference to result_item to avoid keeping references
+ # while waiting on new results.
+ del result_item
+
+ # attempt to increment idle process count
+ executor = self.executor_reference()
+ if executor is not None:
+ executor._idle_worker_semaphore.release()
+ del executor
+
+ if self.is_shutting_down():
+ self.flag_executor_shutting_down()
+
+ # Since no new work items can be added, it is safe to shutdown
+ # this thread if there are no pending work items.
+ if not self.pending_work_items:
+ self.join_executor_internals()
+ return
+
+ def add_call_item_to_queue(self):
+ # Fills call_queue with _WorkItems from pending_work_items.
+ # This function never blocks.
+ while True:
+ if self.call_queue.full():
+ return
+ try:
+ work_id = self.work_ids_queue.get(block=False)
+ except queue.Empty:
+ return
+ else:
+ work_item = self.pending_work_items[work_id]
+
+ if work_item.future.set_running_or_notify_cancel():
+ self.call_queue.put(_CallItem(work_id,
+ work_item.fn,
+ work_item.args,
+ work_item.kwargs),
+ block=True)
+ else:
+ del self.pending_work_items[work_id]
+ continue
+
+ def wait_result_broken_or_wakeup(self):
# Wait for a result to be ready in the result_queue while checking
# that all worker processes are still running, or for a wake up
# signal send. The wake up signals come either from new tasks being
# submitted, from the executor being shutdown/gc-ed, or from the
# shutdown of the python interpreter.
- result_reader = self.result_queue._reader
- assert not self.thread_wakeup._closed
- wakeup_reader = self.thread_wakeup._reader
- readers = [result_reader, wakeup_reader]
- worker_sentinels = [p.sentinel for p in list(self.processes.values())]
- ready = mp.connection.wait(readers + worker_sentinels)
+ result_reader = self.result_queue._reader
+ assert not self.thread_wakeup._closed
+ wakeup_reader = self.thread_wakeup._reader
+ readers = [result_reader, wakeup_reader]
+ worker_sentinels = [p.sentinel for p in list(self.processes.values())]
+ ready = mp.connection.wait(readers + worker_sentinels)
cause = None
is_broken = True
- result_item = None
+ result_item = None
if result_reader in ready:
try:
result_item = result_reader.recv()
@@ -388,28 +388,28 @@ class _ExecutorManagerThread(threading.Thread):
elif wakeup_reader in ready:
is_broken = False
-
- with self.shutdown_lock:
- self.thread_wakeup.clear()
-
- return result_item, is_broken, cause
-
- def process_result_item(self, result_item):
- # Process the received a result_item. This can be either the PID of a
- # worker that exited gracefully or a _ResultItem
-
+
+ with self.shutdown_lock:
+ self.thread_wakeup.clear()
+
+ return result_item, is_broken, cause
+
+ def process_result_item(self, result_item):
+ # Process the received a result_item. This can be either the PID of a
+ # worker that exited gracefully or a _ResultItem
+
if isinstance(result_item, int):
# Clean shutdown of a worker using its PID
# (avoids marking the executor broken)
- assert self.is_shutting_down()
- p = self.processes.pop(result_item)
+ assert self.is_shutting_down()
+ p = self.processes.pop(result_item)
p.join()
- if not self.processes:
- self.join_executor_internals()
+ if not self.processes:
+ self.join_executor_internals()
return
- else:
- # Received a _ResultItem so mark the future as completed.
- work_item = self.pending_work_items.pop(result_item.work_id, None)
+ else:
+ # Received a _ResultItem so mark the future as completed.
+ work_item = self.pending_work_items.pop(result_item.work_id, None)
# work_item can be None if another process terminated (see above)
if work_item is not None:
if result_item.exception:
@@ -417,111 +417,111 @@ class _ExecutorManagerThread(threading.Thread):
else:
work_item.future.set_result(result_item.result)
- def is_shutting_down(self):
- # Check whether we should start shutting down the executor.
- executor = self.executor_reference()
+ def is_shutting_down(self):
+ # Check whether we should start shutting down the executor.
+ executor = self.executor_reference()
# No more work items can be added if:
# - The interpreter is shutting down OR
# - The executor that owns this worker has been collected OR
# - The executor that owns this worker has been shutdown.
- return (_global_shutdown or executor is None
- or executor._shutdown_thread)
-
- def terminate_broken(self, cause):
- # Terminate the executor because it is in a broken state. The cause
- # argument can be used to display more information on the error that
- # lead the executor into becoming broken.
-
- # Mark the process pool broken so that submits fail right now.
- executor = self.executor_reference()
- if executor is not None:
- executor._broken = ('A child process terminated '
- 'abruptly, the process pool is not '
- 'usable anymore')
- executor._shutdown_thread = True
- executor = None
-
- # All pending tasks are to be marked failed with the following
- # BrokenProcessPool error
- bpe = BrokenProcessPool("A process in the process pool was "
- "terminated abruptly while the future was "
- "running or pending.")
- if cause is not None:
- bpe.__cause__ = _RemoteTraceback(
- f"\n'''\n{''.join(cause)}'''")
-
- # Mark pending tasks as failed.
- for work_id, work_item in self.pending_work_items.items():
- work_item.future.set_exception(bpe)
- # Delete references to object. See issue16284
- del work_item
- self.pending_work_items.clear()
-
- # Terminate remaining workers forcibly: the queues or their
- # locks may be in a dirty state and block forever.
- for p in self.processes.values():
- p.terminate()
-
- # clean up resources
- self.join_executor_internals()
-
- def flag_executor_shutting_down(self):
- # Flag the executor as shutting down and cancel remaining tasks if
- # requested as early as possible if it is not gc-ed yet.
- executor = self.executor_reference()
- if executor is not None:
- executor._shutdown_thread = True
- # Cancel pending work items if requested.
- if executor._cancel_pending_futures:
- # Cancel all pending futures and update pending_work_items
- # to only have futures that are currently running.
- new_pending_work_items = {}
- for work_id, work_item in self.pending_work_items.items():
- if not work_item.future.cancel():
- new_pending_work_items[work_id] = work_item
- self.pending_work_items = new_pending_work_items
- # Drain work_ids_queue since we no longer need to
- # add items to the call queue.
- while True:
- try:
- self.work_ids_queue.get_nowait()
- except queue.Empty:
- break
- # Make sure we do this only once to not waste time looping
- # on running processes over and over.
- executor._cancel_pending_futures = False
-
- def shutdown_workers(self):
- n_children_to_stop = self.get_n_children_alive()
- n_sentinels_sent = 0
- # Send the right number of sentinels, to make sure all children are
- # properly terminated.
- while (n_sentinels_sent < n_children_to_stop
- and self.get_n_children_alive() > 0):
- for i in range(n_children_to_stop - n_sentinels_sent):
- try:
- self.call_queue.put_nowait(None)
- n_sentinels_sent += 1
- except queue.Full:
- break
-
- def join_executor_internals(self):
- self.shutdown_workers()
- # Release the queue's resources as soon as possible.
- self.call_queue.close()
- self.call_queue.join_thread()
- with self.shutdown_lock:
- self.thread_wakeup.close()
- # If .join() is not called on the created processes then
- # some ctx.Queue methods may deadlock on Mac OS X.
- for p in self.processes.values():
- p.join()
-
- def get_n_children_alive(self):
- # This is an upper bound on the number of children alive.
- return sum(p.is_alive() for p in self.processes.values())
-
-
+ return (_global_shutdown or executor is None
+ or executor._shutdown_thread)
+
+ def terminate_broken(self, cause):
+ # Terminate the executor because it is in a broken state. The cause
+ # argument can be used to display more information on the error that
+ # lead the executor into becoming broken.
+
+ # Mark the process pool broken so that submits fail right now.
+ executor = self.executor_reference()
+ if executor is not None:
+ executor._broken = ('A child process terminated '
+ 'abruptly, the process pool is not '
+ 'usable anymore')
+ executor._shutdown_thread = True
+ executor = None
+
+ # All pending tasks are to be marked failed with the following
+ # BrokenProcessPool error
+ bpe = BrokenProcessPool("A process in the process pool was "
+ "terminated abruptly while the future was "
+ "running or pending.")
+ if cause is not None:
+ bpe.__cause__ = _RemoteTraceback(
+ f"\n'''\n{''.join(cause)}'''")
+
+ # Mark pending tasks as failed.
+ for work_id, work_item in self.pending_work_items.items():
+ work_item.future.set_exception(bpe)
+ # Delete references to object. See issue16284
+ del work_item
+ self.pending_work_items.clear()
+
+ # Terminate remaining workers forcibly: the queues or their
+ # locks may be in a dirty state and block forever.
+ for p in self.processes.values():
+ p.terminate()
+
+ # clean up resources
+ self.join_executor_internals()
+
+ def flag_executor_shutting_down(self):
+ # Flag the executor as shutting down and cancel remaining tasks if
+ # requested as early as possible if it is not gc-ed yet.
+ executor = self.executor_reference()
+ if executor is not None:
+ executor._shutdown_thread = True
+ # Cancel pending work items if requested.
+ if executor._cancel_pending_futures:
+ # Cancel all pending futures and update pending_work_items
+ # to only have futures that are currently running.
+ new_pending_work_items = {}
+ for work_id, work_item in self.pending_work_items.items():
+ if not work_item.future.cancel():
+ new_pending_work_items[work_id] = work_item
+ self.pending_work_items = new_pending_work_items
+ # Drain work_ids_queue since we no longer need to
+ # add items to the call queue.
+ while True:
+ try:
+ self.work_ids_queue.get_nowait()
+ except queue.Empty:
+ break
+ # Make sure we do this only once to not waste time looping
+ # on running processes over and over.
+ executor._cancel_pending_futures = False
+
+ def shutdown_workers(self):
+ n_children_to_stop = self.get_n_children_alive()
+ n_sentinels_sent = 0
+ # Send the right number of sentinels, to make sure all children are
+ # properly terminated.
+ while (n_sentinels_sent < n_children_to_stop
+ and self.get_n_children_alive() > 0):
+ for i in range(n_children_to_stop - n_sentinels_sent):
+ try:
+ self.call_queue.put_nowait(None)
+ n_sentinels_sent += 1
+ except queue.Full:
+ break
+
+ def join_executor_internals(self):
+ self.shutdown_workers()
+ # Release the queue's resources as soon as possible.
+ self.call_queue.close()
+ self.call_queue.join_thread()
+ with self.shutdown_lock:
+ self.thread_wakeup.close()
+ # If .join() is not called on the created processes then
+ # some ctx.Queue methods may deadlock on Mac OS X.
+ for p in self.processes.values():
+ p.join()
+
+ def get_n_children_alive(self):
+ # This is an upper bound on the number of children alive.
+ return sum(p.is_alive() for p in self.processes.values())
+
+
_system_limits_checked = False
_system_limited = None
@@ -580,23 +580,23 @@ class ProcessPoolExecutor(_base.Executor):
worker processes will be created as the machine has processors.
mp_context: A multiprocessing context to launch the workers. This
object should provide SimpleQueue, Queue and Process.
- initializer: A callable used to initialize worker processes.
+ initializer: A callable used to initialize worker processes.
initargs: A tuple of arguments to pass to the initializer.
"""
_check_system_limits()
if max_workers is None:
self._max_workers = os.cpu_count() or 1
- if sys.platform == 'win32':
- self._max_workers = min(_MAX_WINDOWS_WORKERS,
- self._max_workers)
+ if sys.platform == 'win32':
+ self._max_workers = min(_MAX_WINDOWS_WORKERS,
+ self._max_workers)
else:
if max_workers <= 0:
raise ValueError("max_workers must be greater than 0")
- elif (sys.platform == 'win32' and
- max_workers > _MAX_WINDOWS_WORKERS):
- raise ValueError(
- f"max_workers must be <= {_MAX_WINDOWS_WORKERS}")
+ elif (sys.platform == 'win32' and
+ max_workers > _MAX_WINDOWS_WORKERS):
+ raise ValueError(
+ f"max_workers must be <= {_MAX_WINDOWS_WORKERS}")
self._max_workers = max_workers
@@ -610,7 +610,7 @@ class ProcessPoolExecutor(_base.Executor):
self._initargs = initargs
# Management thread
- self._executor_manager_thread = None
+ self._executor_manager_thread = None
# Map of pids to processes
self._processes = {}
@@ -618,22 +618,22 @@ class ProcessPoolExecutor(_base.Executor):
# Shutdown is a two-step process.
self._shutdown_thread = False
self._shutdown_lock = threading.Lock()
- self._idle_worker_semaphore = threading.Semaphore(0)
+ self._idle_worker_semaphore = threading.Semaphore(0)
self._broken = False
self._queue_count = 0
self._pending_work_items = {}
- self._cancel_pending_futures = False
-
- # _ThreadWakeup is a communication channel used to interrupt the wait
- # of the main loop of executor_manager_thread from another thread (e.g.
- # when calling executor.submit or executor.shutdown). We do not use the
- # _result_queue to send wakeup signals to the executor_manager_thread
- # as it could result in a deadlock if a worker process dies with the
- # _result_queue write lock still acquired.
- #
- # _shutdown_lock must be locked to access _ThreadWakeup.
- self._executor_manager_thread_wakeup = _ThreadWakeup()
-
+ self._cancel_pending_futures = False
+
+ # _ThreadWakeup is a communication channel used to interrupt the wait
+ # of the main loop of executor_manager_thread from another thread (e.g.
+ # when calling executor.submit or executor.shutdown). We do not use the
+ # _result_queue to send wakeup signals to the executor_manager_thread
+ # as it could result in a deadlock if a worker process dies with the
+ # _result_queue write lock still acquired.
+ #
+ # _shutdown_lock must be locked to access _ThreadWakeup.
+ self._executor_manager_thread_wakeup = _ThreadWakeup()
+
# Create communication channels for the executor
# Make the call queue slightly larger than the number of processes to
# prevent the worker processes from idling. But don't make it too big
@@ -641,9 +641,9 @@ class ProcessPoolExecutor(_base.Executor):
queue_size = self._max_workers + EXTRA_QUEUED_CALLS
self._call_queue = _SafeQueue(
max_size=queue_size, ctx=self._mp_context,
- pending_work_items=self._pending_work_items,
- shutdown_lock=self._shutdown_lock,
- thread_wakeup=self._executor_manager_thread_wakeup)
+ pending_work_items=self._pending_work_items,
+ shutdown_lock=self._shutdown_lock,
+ thread_wakeup=self._executor_manager_thread_wakeup)
# Killed worker processes can produce spurious "broken pipe"
# tracebacks in the queue's own worker thread. But we detect killed
# processes anyway, so silence the tracebacks.
@@ -651,21 +651,21 @@ class ProcessPoolExecutor(_base.Executor):
self._result_queue = mp_context.SimpleQueue()
self._work_ids = queue.Queue()
- def _start_executor_manager_thread(self):
- if self._executor_manager_thread is None:
+ def _start_executor_manager_thread(self):
+ if self._executor_manager_thread is None:
# Start the processes so that their sentinels are known.
- self._executor_manager_thread = _ExecutorManagerThread(self)
- self._executor_manager_thread.start()
- _threads_wakeups[self._executor_manager_thread] = \
- self._executor_manager_thread_wakeup
+ self._executor_manager_thread = _ExecutorManagerThread(self)
+ self._executor_manager_thread.start()
+ _threads_wakeups[self._executor_manager_thread] = \
+ self._executor_manager_thread_wakeup
def _adjust_process_count(self):
- # if there's an idle process, we don't need to spawn a new one.
- if self._idle_worker_semaphore.acquire(blocking=False):
- return
-
- process_count = len(self._processes)
- if process_count < self._max_workers:
+ # if there's an idle process, we don't need to spawn a new one.
+ if self._idle_worker_semaphore.acquire(blocking=False):
+ return
+
+ process_count = len(self._processes)
+ if process_count < self._max_workers:
p = self._mp_context.Process(
target=_process_worker,
args=(self._call_queue,
@@ -675,7 +675,7 @@ class ProcessPoolExecutor(_base.Executor):
p.start()
self._processes[p.pid] = p
- def submit(self, fn, /, *args, **kwargs):
+ def submit(self, fn, /, *args, **kwargs):
with self._shutdown_lock:
if self._broken:
raise BrokenProcessPool(self._broken)
@@ -692,10 +692,10 @@ class ProcessPoolExecutor(_base.Executor):
self._work_ids.put(self._queue_count)
self._queue_count += 1
# Wake up queue management thread
- self._executor_manager_thread_wakeup.wakeup()
+ self._executor_manager_thread_wakeup.wakeup()
- self._adjust_process_count()
- self._start_executor_manager_thread()
+ self._adjust_process_count()
+ self._start_executor_manager_thread()
return f
submit.__doc__ = _base.Executor.submit.__doc__
@@ -728,24 +728,24 @@ class ProcessPoolExecutor(_base.Executor):
timeout=timeout)
return _chain_from_iterable_of_lists(results)
- def shutdown(self, wait=True, *, cancel_futures=False):
+ def shutdown(self, wait=True, *, cancel_futures=False):
with self._shutdown_lock:
- self._cancel_pending_futures = cancel_futures
+ self._cancel_pending_futures = cancel_futures
self._shutdown_thread = True
- if self._executor_manager_thread_wakeup is not None:
- # Wake up queue management thread
- self._executor_manager_thread_wakeup.wakeup()
-
- if self._executor_manager_thread is not None and wait:
- self._executor_manager_thread.join()
+ if self._executor_manager_thread_wakeup is not None:
+ # Wake up queue management thread
+ self._executor_manager_thread_wakeup.wakeup()
+
+ if self._executor_manager_thread is not None and wait:
+ self._executor_manager_thread.join()
# To reduce the risk of opening too many files, remove references to
# objects that use file descriptors.
- self._executor_manager_thread = None
- self._call_queue = None
- if self._result_queue is not None and wait:
- self._result_queue.close()
+ self._executor_manager_thread = None
+ self._call_queue = None
+ if self._result_queue is not None and wait:
+ self._result_queue.close()
self._result_queue = None
self._processes = None
- self._executor_manager_thread_wakeup = None
+ self._executor_manager_thread_wakeup = None
shutdown.__doc__ = _base.Executor.shutdown.__doc__
diff --git a/contrib/tools/python3/src/Lib/concurrent/futures/thread.py b/contrib/tools/python3/src/Lib/concurrent/futures/thread.py
index 51c942f51a..0ed4a2c091 100644
--- a/contrib/tools/python3/src/Lib/concurrent/futures/thread.py
+++ b/contrib/tools/python3/src/Lib/concurrent/futures/thread.py
@@ -9,40 +9,40 @@ from concurrent.futures import _base
import itertools
import queue
import threading
-import types
+import types
import weakref
import os
_threads_queues = weakref.WeakKeyDictionary()
_shutdown = False
-# Lock that ensures that new workers are not created while the interpreter is
-# shutting down. Must be held while mutating _threads_queues and _shutdown.
-_global_shutdown_lock = threading.Lock()
+# Lock that ensures that new workers are not created while the interpreter is
+# shutting down. Must be held while mutating _threads_queues and _shutdown.
+_global_shutdown_lock = threading.Lock()
def _python_exit():
global _shutdown
- with _global_shutdown_lock:
- _shutdown = True
+ with _global_shutdown_lock:
+ _shutdown = True
items = list(_threads_queues.items())
for t, q in items:
q.put(None)
for t, q in items:
t.join()
-# Register for `_python_exit()` to be called just before joining all
-# non-daemon threads. This is used instead of `atexit.register()` for
-# compatibility with subinterpreters, which no longer support daemon threads.
-# See bpo-39812 for context.
-threading._register_atexit(_python_exit)
-
-# At fork, reinitialize the `_global_shutdown_lock` lock in the child process
-if hasattr(os, 'register_at_fork'):
- os.register_at_fork(before=_global_shutdown_lock.acquire,
- after_in_child=_global_shutdown_lock._at_fork_reinit,
- after_in_parent=_global_shutdown_lock.release)
+# Register for `_python_exit()` to be called just before joining all
+# non-daemon threads. This is used instead of `atexit.register()` for
+# compatibility with subinterpreters, which no longer support daemon threads.
+# See bpo-39812 for context.
+threading._register_atexit(_python_exit)
+# At fork, reinitialize the `_global_shutdown_lock` lock in the child process
+if hasattr(os, 'register_at_fork'):
+ os.register_at_fork(before=_global_shutdown_lock.acquire,
+ after_in_child=_global_shutdown_lock._at_fork_reinit,
+ after_in_parent=_global_shutdown_lock.release)
+
class _WorkItem(object):
def __init__(self, future, fn, args, kwargs):
self.future = future
@@ -63,9 +63,9 @@ class _WorkItem(object):
else:
self.future.set_result(result)
- __class_getitem__ = classmethod(types.GenericAlias)
-
+ __class_getitem__ = classmethod(types.GenericAlias)
+
def _worker(executor_reference, work_queue, initializer, initargs):
if initializer is not None:
try:
@@ -83,14 +83,14 @@ def _worker(executor_reference, work_queue, initializer, initargs):
work_item.run()
# Delete references to object. See issue16284
del work_item
-
- # attempt to increment idle count
- executor = executor_reference()
- if executor is not None:
- executor._idle_semaphore.release()
- del executor
+
+ # attempt to increment idle count
+ executor = executor_reference()
+ if executor is not None:
+ executor._idle_semaphore.release()
+ del executor
continue
-
+
executor = executor_reference()
# Exit if:
# - The interpreter is shutting down OR
@@ -128,18 +128,18 @@ class ThreadPoolExecutor(_base.Executor):
max_workers: The maximum number of threads that can be used to
execute the given calls.
thread_name_prefix: An optional name prefix to give our threads.
- initializer: A callable used to initialize worker threads.
+ initializer: A callable used to initialize worker threads.
initargs: A tuple of arguments to pass to the initializer.
"""
if max_workers is None:
- # ThreadPoolExecutor is often used to:
- # * CPU bound task which releases GIL
- # * I/O bound task (which releases GIL, of course)
- #
- # We use cpu_count + 4 for both types of tasks.
- # But we limit it to 32 to avoid consuming surprisingly large resource
- # on many core machine.
- max_workers = min(32, (os.cpu_count() or 1) + 4)
+ # ThreadPoolExecutor is often used to:
+ # * CPU bound task which releases GIL
+ # * I/O bound task (which releases GIL, of course)
+ #
+ # We use cpu_count + 4 for both types of tasks.
+ # But we limit it to 32 to avoid consuming surprisingly large resource
+ # on many core machine.
+ max_workers = min(32, (os.cpu_count() or 1) + 4)
if max_workers <= 0:
raise ValueError("max_workers must be greater than 0")
@@ -148,7 +148,7 @@ class ThreadPoolExecutor(_base.Executor):
self._max_workers = max_workers
self._work_queue = queue.SimpleQueue()
- self._idle_semaphore = threading.Semaphore(0)
+ self._idle_semaphore = threading.Semaphore(0)
self._threads = set()
self._broken = False
self._shutdown = False
@@ -158,8 +158,8 @@ class ThreadPoolExecutor(_base.Executor):
self._initializer = initializer
self._initargs = initargs
- def submit(self, fn, /, *args, **kwargs):
- with self._shutdown_lock, _global_shutdown_lock:
+ def submit(self, fn, /, *args, **kwargs):
+ with self._shutdown_lock, _global_shutdown_lock:
if self._broken:
raise BrokenThreadPool(self._broken)
@@ -178,15 +178,15 @@ class ThreadPoolExecutor(_base.Executor):
submit.__doc__ = _base.Executor.submit.__doc__
def _adjust_thread_count(self):
- # if idle threads are available, don't spin new threads
- if self._idle_semaphore.acquire(timeout=0):
- return
-
+ # if idle threads are available, don't spin new threads
+ if self._idle_semaphore.acquire(timeout=0):
+ return
+
# When the executor gets lost, the weakref callback will wake up
# the worker threads.
def weakref_cb(_, q=self._work_queue):
q.put(None)
-
+
num_threads = len(self._threads)
if num_threads < self._max_workers:
thread_name = '%s_%d' % (self._thread_name_prefix or self,
@@ -213,22 +213,22 @@ class ThreadPoolExecutor(_base.Executor):
if work_item is not None:
work_item.future.set_exception(BrokenThreadPool(self._broken))
- def shutdown(self, wait=True, *, cancel_futures=False):
+ def shutdown(self, wait=True, *, cancel_futures=False):
with self._shutdown_lock:
self._shutdown = True
- if cancel_futures:
- # Drain all work items from the queue, and then cancel their
- # associated futures.
- while True:
- try:
- work_item = self._work_queue.get_nowait()
- except queue.Empty:
- break
- if work_item is not None:
- work_item.future.cancel()
-
- # Send a wake-up to prevent threads calling
- # _work_queue.get(block=True) from permanently blocking.
+ if cancel_futures:
+ # Drain all work items from the queue, and then cancel their
+ # associated futures.
+ while True:
+ try:
+ work_item = self._work_queue.get_nowait()
+ except queue.Empty:
+ break
+ if work_item is not None:
+ work_item.future.cancel()
+
+ # Send a wake-up to prevent threads calling
+ # _work_queue.get(block=True) from permanently blocking.
self._work_queue.put(None)
if wait:
for t in self._threads:
diff --git a/contrib/tools/python3/src/Lib/configparser.py b/contrib/tools/python3/src/Lib/configparser.py
index 8dd5c13bcc..e5d6d400e2 100644
--- a/contrib/tools/python3/src/Lib/configparser.py
+++ b/contrib/tools/python3/src/Lib/configparser.py
@@ -56,7 +56,7 @@ ConfigParser -- responsible for parsing a list of
When `interpolation` is given, it should be an Interpolation subclass
instance. It will be used as the handler for option value
- pre-processing when using getters. RawConfigParser objects don't do
+ pre-processing when using getters. RawConfigParser objects don't do
any sort of interpolation, whereas ConfigParser uses an instance of
BasicInterpolation. The library also provides a ``zc.buildbot``
inspired ExtendedInterpolation implementation.
@@ -139,7 +139,7 @@ ConfigParser -- responsible for parsing a list of
"""
from collections.abc import MutableMapping
-from collections import ChainMap as _ChainMap
+from collections import ChainMap as _ChainMap
import functools
import io
import itertools
@@ -157,7 +157,7 @@ __all__ = ["NoSectionError", "DuplicateOptionError", "DuplicateSectionError",
"LegacyInterpolation", "SectionProxy", "ConverterMapping",
"DEFAULTSECT", "MAX_INTERPOLATION_DEPTH"]
-_default_dict = dict
+_default_dict = dict
DEFAULTSECT = "DEFAULT"
MAX_INTERPOLATION_DEPTH = 10
@@ -847,7 +847,7 @@ class RawConfigParser(MutableMapping):
except KeyError:
if section != self.default_section:
raise NoSectionError(section)
- orig_keys = list(d.keys())
+ orig_keys = list(d.keys())
# Update with the entry specific variables
if vars:
for key, value in vars.items():
@@ -856,7 +856,7 @@ class RawConfigParser(MutableMapping):
section, option, d[option], d)
if raw:
value_getter = lambda option: d[option]
- return [(option, value_getter(option)) for option in orig_keys]
+ return [(option, value_getter(option)) for option in orig_keys]
def popitem(self):
"""Remove a section from the parser and return it as
@@ -907,9 +907,9 @@ class RawConfigParser(MutableMapping):
If `space_around_delimiters' is True (the default), delimiters
between keys and values are surrounded by spaces.
-
- Please note that comments in the original configuration file are not
- preserved when writing the configuration back.
+
+ Please note that comments in the original configuration file are not
+ preserved when writing the configuration back.
"""
if space_around_delimiters:
d = " {} ".format(self._delimiters[0])
@@ -966,8 +966,8 @@ class RawConfigParser(MutableMapping):
def __setitem__(self, key, value):
# To conform with the mapping protocol, overwrites existing values in
# the section.
- if key in self and self[key] is value:
- return
+ if key in self and self[key] is value:
+ return
# XXX this is not atomic if read_dict fails at any point. Then again,
# no update method in configparser is atomic in this implementation.
if key == self.default_section:
@@ -1008,7 +1008,7 @@ class RawConfigParser(MutableMapping):
Configuration files may include comments, prefixed by specific
characters (`#' and `;' by default). Comments may appear on their own
in an otherwise empty line or may be entered in lines holding values or
- section names. Please note that comments get stripped off when reading configuration files.
+ section names. Please note that comments get stripped off when reading configuration files.
"""
elements_added = set()
cursect = None # None, or a dictionary
diff --git a/contrib/tools/python3/src/Lib/contextlib.py b/contrib/tools/python3/src/Lib/contextlib.py
index 4e8f5f7593..86b3010734 100644
--- a/contrib/tools/python3/src/Lib/contextlib.py
+++ b/contrib/tools/python3/src/Lib/contextlib.py
@@ -4,7 +4,7 @@ import sys
import _collections_abc
from collections import deque
from functools import wraps
-from types import MethodType, GenericAlias
+from types import MethodType, GenericAlias
__all__ = ["asynccontextmanager", "contextmanager", "closing", "nullcontext",
"AbstractContextManager", "AbstractAsyncContextManager",
@@ -16,8 +16,8 @@ class AbstractContextManager(abc.ABC):
"""An abstract base class for context managers."""
- __class_getitem__ = classmethod(GenericAlias)
-
+ __class_getitem__ = classmethod(GenericAlias)
+
def __enter__(self):
"""Return `self` upon entering the runtime context."""
return self
@@ -38,8 +38,8 @@ class AbstractAsyncContextManager(abc.ABC):
"""An abstract base class for asynchronous context managers."""
- __class_getitem__ = classmethod(GenericAlias)
-
+ __class_getitem__ = classmethod(GenericAlias)
+
async def __aenter__(self):
"""Return `self` upon entering the runtime context."""
return self
@@ -98,19 +98,19 @@ class _GeneratorContextManagerBase:
# See http://bugs.python.org/issue19404 for more details.
def _recreate_cm(self):
- # _GCMB instances are one-shot context managers, so the
+ # _GCMB instances are one-shot context managers, so the
# CM must be recreated each time a decorated function is
# called
return self.__class__(self.func, self.args, self.kwds)
-
-class _GeneratorContextManager(
- _GeneratorContextManagerBase,
- AbstractContextManager,
- ContextDecorator,
-):
- """Helper for @contextmanager decorator."""
-
+
+class _GeneratorContextManager(
+ _GeneratorContextManagerBase,
+ AbstractContextManager,
+ ContextDecorator,
+):
+ """Helper for @contextmanager decorator."""
+
def __enter__(self):
# do not keep args and kwds alive unnecessarily
# they are only needed for recreation, which is not possible anymore
@@ -120,8 +120,8 @@ class _GeneratorContextManager(
except StopIteration:
raise RuntimeError("generator didn't yield") from None
- def __exit__(self, typ, value, traceback):
- if typ is None:
+ def __exit__(self, typ, value, traceback):
+ if typ is None:
try:
next(self.gen)
except StopIteration:
@@ -132,9 +132,9 @@ class _GeneratorContextManager(
if value is None:
# Need to force instantiation so we can reliably
# tell if we get the same exception back
- value = typ()
+ value = typ()
try:
- self.gen.throw(typ, value, traceback)
+ self.gen.throw(typ, value, traceback)
except StopIteration as exc:
# Suppress StopIteration *unless* it's the same exception that
# was passed to throw(). This prevents a StopIteration
@@ -144,39 +144,39 @@ class _GeneratorContextManager(
# Don't re-raise the passed in exception. (issue27122)
if exc is value:
return False
- # Avoid suppressing if a StopIteration exception
+ # Avoid suppressing if a StopIteration exception
# was passed to throw() and later wrapped into a RuntimeError
- # (see PEP 479 for sync generators; async generators also
- # have this behavior). But do this only if the exception wrapped
- # by the RuntimeError is actually Stop(Async)Iteration (see
- # issue29692).
- if (
- isinstance(value, StopIteration)
- and exc.__cause__ is value
- ):
+ # (see PEP 479 for sync generators; async generators also
+ # have this behavior). But do this only if the exception wrapped
+ # by the RuntimeError is actually Stop(Async)Iteration (see
+ # issue29692).
+ if (
+ isinstance(value, StopIteration)
+ and exc.__cause__ is value
+ ):
return False
raise
- except BaseException as exc:
+ except BaseException as exc:
# only re-raise if it's *not* the exception that was
# passed to throw(), because __exit__() must not raise
# an exception unless __exit__() itself failed. But throw()
# has to raise the exception to signal propagation, so this
# fixes the impedance mismatch between the throw() protocol
# and the __exit__() protocol.
- if exc is not value:
- raise
- return False
+ if exc is not value:
+ raise
+ return False
raise RuntimeError("generator didn't stop after throw()")
class _AsyncGeneratorContextManager(_GeneratorContextManagerBase,
AbstractAsyncContextManager):
- """Helper for @asynccontextmanager decorator."""
+ """Helper for @asynccontextmanager decorator."""
async def __aenter__(self):
- # do not keep args and kwds alive unnecessarily
- # they are only needed for recreation, which is not possible anymore
- del self.args, self.kwds, self.func
+ # do not keep args and kwds alive unnecessarily
+ # they are only needed for recreation, which is not possible anymore
+ del self.args, self.kwds, self.func
try:
return await self.gen.__anext__()
except StopAsyncIteration:
@@ -187,48 +187,48 @@ class _AsyncGeneratorContextManager(_GeneratorContextManagerBase,
try:
await self.gen.__anext__()
except StopAsyncIteration:
- return False
+ return False
else:
raise RuntimeError("generator didn't stop")
else:
if value is None:
- # Need to force instantiation so we can reliably
- # tell if we get the same exception back
+ # Need to force instantiation so we can reliably
+ # tell if we get the same exception back
value = typ()
try:
await self.gen.athrow(typ, value, traceback)
except StopAsyncIteration as exc:
- # Suppress StopIteration *unless* it's the same exception that
- # was passed to throw(). This prevents a StopIteration
- # raised inside the "with" statement from being suppressed.
+ # Suppress StopIteration *unless* it's the same exception that
+ # was passed to throw(). This prevents a StopIteration
+ # raised inside the "with" statement from being suppressed.
return exc is not value
except RuntimeError as exc:
- # Don't re-raise the passed in exception. (issue27122)
+ # Don't re-raise the passed in exception. (issue27122)
if exc is value:
return False
- # Avoid suppressing if a Stop(Async)Iteration exception
- # was passed to athrow() and later wrapped into a RuntimeError
+ # Avoid suppressing if a Stop(Async)Iteration exception
+ # was passed to athrow() and later wrapped into a RuntimeError
# (see PEP 479 for sync generators; async generators also
# have this behavior). But do this only if the exception wrapped
# by the RuntimeError is actully Stop(Async)Iteration (see
# issue29692).
- if (
- isinstance(value, (StopIteration, StopAsyncIteration))
- and exc.__cause__ is value
- ):
- return False
+ if (
+ isinstance(value, (StopIteration, StopAsyncIteration))
+ and exc.__cause__ is value
+ ):
+ return False
raise
except BaseException as exc:
- # only re-raise if it's *not* the exception that was
- # passed to throw(), because __exit__() must not raise
- # an exception unless __exit__() itself failed. But throw()
- # has to raise the exception to signal propagation, so this
- # fixes the impedance mismatch between the throw() protocol
- # and the __exit__() protocol.
+ # only re-raise if it's *not* the exception that was
+ # passed to throw(), because __exit__() must not raise
+ # an exception unless __exit__() itself failed. But throw()
+ # has to raise the exception to signal propagation, so this
+ # fixes the impedance mismatch between the throw() protocol
+ # and the __exit__() protocol.
if exc is not value:
raise
- return False
- raise RuntimeError("generator didn't stop after athrow()")
+ return False
+ raise RuntimeError("generator didn't stop after athrow()")
def contextmanager(func):
@@ -397,10 +397,10 @@ class _BaseExitStack:
@staticmethod
def _create_exit_wrapper(cm, cm_exit):
- return MethodType(cm_exit, cm)
+ return MethodType(cm_exit, cm)
@staticmethod
- def _create_cb_wrapper(callback, /, *args, **kwds):
+ def _create_cb_wrapper(callback, /, *args, **kwds):
def _exit_wrapper(exc_type, exc, tb):
callback(*args, **kwds)
return _exit_wrapper
@@ -449,7 +449,7 @@ class _BaseExitStack:
self._push_cm_exit(cm, _exit)
return result
- def callback(self, callback, /, *args, **kwds):
+ def callback(self, callback, /, *args, **kwds):
"""Registers an arbitrary callback and arguments.
Cannot suppress exceptions.
@@ -496,10 +496,10 @@ class ExitStack(_BaseExitStack, AbstractContextManager):
# Context may not be correct, so find the end of the chain
while 1:
exc_context = new_exc.__context__
- if exc_context is None or exc_context is old_exc:
+ if exc_context is None or exc_context is old_exc:
# Context is already set correctly (see issue 20317)
return
- if exc_context is frame_exc:
+ if exc_context is frame_exc:
break
new_exc = exc_context
# Change the end of the chain to point to the exception
@@ -556,10 +556,10 @@ class AsyncExitStack(_BaseExitStack, AbstractAsyncContextManager):
@staticmethod
def _create_async_exit_wrapper(cm, cm_exit):
- return MethodType(cm_exit, cm)
+ return MethodType(cm_exit, cm)
@staticmethod
- def _create_async_cb_wrapper(callback, /, *args, **kwds):
+ def _create_async_cb_wrapper(callback, /, *args, **kwds):
async def _exit_wrapper(exc_type, exc, tb):
await callback(*args, **kwds)
return _exit_wrapper
@@ -594,7 +594,7 @@ class AsyncExitStack(_BaseExitStack, AbstractAsyncContextManager):
self._push_async_cm_exit(exit, exit_method)
return exit # Allow use as a decorator
- def push_async_callback(self, callback, /, *args, **kwds):
+ def push_async_callback(self, callback, /, *args, **kwds):
"""Registers an arbitrary coroutine function and arguments.
Cannot suppress exceptions.
@@ -630,10 +630,10 @@ class AsyncExitStack(_BaseExitStack, AbstractAsyncContextManager):
# Context may not be correct, so find the end of the chain
while 1:
exc_context = new_exc.__context__
- if exc_context is None or exc_context is old_exc:
+ if exc_context is None or exc_context is old_exc:
# Context is already set correctly (see issue 20317)
return
- if exc_context is frame_exc:
+ if exc_context is frame_exc:
break
new_exc = exc_context
# Change the end of the chain to point to the exception
diff --git a/contrib/tools/python3/src/Lib/copy.py b/contrib/tools/python3/src/Lib/copy.py
index 1081d43952..3784c6decf 100644
--- a/contrib/tools/python3/src/Lib/copy.py
+++ b/contrib/tools/python3/src/Lib/copy.py
@@ -39,8 +39,8 @@ Python's deep copy operation avoids these problems by:
set of components copied
This version does not copy types like module, class, function, method,
-nor stack trace, stack frame, nor file, socket, window, nor any
-similar types.
+nor stack trace, stack frame, nor file, socket, window, nor any
+similar types.
Classes can use the same interfaces to control copying that they use
to control pickling: they can define methods called __getinitargs__(),
@@ -75,20 +75,20 @@ def copy(x):
if copier:
return copier(x)
- if issubclass(cls, type):
+ if issubclass(cls, type):
# treat it as a regular class:
return _copy_immutable(x)
copier = getattr(cls, "__copy__", None)
- if copier is not None:
+ if copier is not None:
return copier(x)
reductor = dispatch_table.get(cls)
- if reductor is not None:
+ if reductor is not None:
rv = reductor(x)
else:
reductor = getattr(x, "__reduce_ex__", None)
- if reductor is not None:
+ if reductor is not None:
rv = reductor(4)
else:
reductor = getattr(x, "__reduce__", None)
@@ -107,7 +107,7 @@ _copy_dispatch = d = {}
def _copy_immutable(x):
return x
for t in (type(None), int, float, bool, complex, str, tuple,
- bytes, frozenset, type, range, slice, property,
+ bytes, frozenset, type, range, slice, property,
types.BuiltinFunctionType, type(Ellipsis), type(NotImplemented),
types.FunctionType, weakref.ref):
d[t] = _copy_immutable
@@ -142,14 +142,14 @@ def deepcopy(x, memo=None, _nil=[]):
cls = type(x)
copier = _deepcopy_dispatch.get(cls)
- if copier is not None:
+ if copier is not None:
y = copier(x, memo)
else:
- if issubclass(cls, type):
+ if issubclass(cls, type):
y = _deepcopy_atomic(x, memo)
else:
copier = getattr(x, "__deepcopy__", None)
- if copier is not None:
+ if copier is not None:
y = copier(memo)
else:
reductor = dispatch_table.get(cls)
@@ -157,7 +157,7 @@ def deepcopy(x, memo=None, _nil=[]):
rv = reductor(x)
else:
reductor = getattr(x, "__reduce_ex__", None)
- if reductor is not None:
+ if reductor is not None:
rv = reductor(4)
else:
reductor = getattr(x, "__reduce__", None)
@@ -190,12 +190,12 @@ d[bool] = _deepcopy_atomic
d[complex] = _deepcopy_atomic
d[bytes] = _deepcopy_atomic
d[str] = _deepcopy_atomic
-d[types.CodeType] = _deepcopy_atomic
+d[types.CodeType] = _deepcopy_atomic
d[type] = _deepcopy_atomic
d[types.BuiltinFunctionType] = _deepcopy_atomic
d[types.FunctionType] = _deepcopy_atomic
d[weakref.ref] = _deepcopy_atomic
-d[property] = _deepcopy_atomic
+d[property] = _deepcopy_atomic
def _deepcopy_list(x, memo, deepcopy=deepcopy):
y = []
diff --git a/contrib/tools/python3/src/Lib/copyreg.py b/contrib/tools/python3/src/Lib/copyreg.py
index 7ab8c128eb..5485cceb59 100644
--- a/contrib/tools/python3/src/Lib/copyreg.py
+++ b/contrib/tools/python3/src/Lib/copyreg.py
@@ -48,36 +48,36 @@ def _reconstructor(cls, base, state):
return obj
_HEAPTYPE = 1<<9
-_new_type = type(int.__new__)
+_new_type = type(int.__new__)
# Python code for object.__reduce_ex__ for protocols 0 and 1
def _reduce_ex(self, proto):
assert proto < 2
- cls = self.__class__
- for base in cls.__mro__:
+ cls = self.__class__
+ for base in cls.__mro__:
if hasattr(base, '__flags__') and not base.__flags__ & _HEAPTYPE:
break
- new = base.__new__
- if isinstance(new, _new_type) and new.__self__ is base:
- break
+ new = base.__new__
+ if isinstance(new, _new_type) and new.__self__ is base:
+ break
else:
base = object # not really reachable
if base is object:
state = None
else:
- if base is cls:
- raise TypeError(f"cannot pickle {cls.__name__!r} object")
+ if base is cls:
+ raise TypeError(f"cannot pickle {cls.__name__!r} object")
state = base(self)
- args = (cls, base, state)
+ args = (cls, base, state)
try:
getstate = self.__getstate__
except AttributeError:
if getattr(self, "__slots__", None):
- raise TypeError(f"cannot pickle {cls.__name__!r} object: "
- f"a class that defines __slots__ without "
- f"defining __getstate__ cannot be pickled "
- f"with protocol {proto}") from None
+ raise TypeError(f"cannot pickle {cls.__name__!r} object: "
+ f"a class that defines __slots__ without "
+ f"defining __getstate__ cannot be pickled "
+ f"with protocol {proto}") from None
try:
dict = self.__dict__
except AttributeError:
diff --git a/contrib/tools/python3/src/Lib/crypt.py b/contrib/tools/python3/src/Lib/crypt.py
index 33dbc46bb3..00d153c658 100644
--- a/contrib/tools/python3/src/Lib/crypt.py
+++ b/contrib/tools/python3/src/Lib/crypt.py
@@ -1,16 +1,16 @@
"""Wrapper to the POSIX crypt library call and associated functionality."""
-import sys as _sys
-
-try:
- import _crypt
-except ModuleNotFoundError:
- if _sys.platform == 'win32':
- raise ImportError("The crypt module is not supported on Windows")
- else:
- raise ImportError("The required _crypt module was not built as part of CPython")
-
-import errno
+import sys as _sys
+
+try:
+ import _crypt
+except ModuleNotFoundError:
+ if _sys.platform == 'win32':
+ raise ImportError("The crypt module is not supported on Windows")
+ else:
+ raise ImportError("The required _crypt module was not built as part of CPython")
+
+import errno
import string as _string
from random import SystemRandom as _SystemRandom
from collections import namedtuple as _namedtuple
@@ -89,14 +89,14 @@ def _add_method(name, *args, rounds=None):
method = _Method(name, *args)
globals()['METHOD_' + name] = method
salt = mksalt(method, rounds=rounds)
- result = None
- try:
- result = crypt('', salt)
- except OSError as e:
- # Not all libc libraries support all encryption methods.
- if e.errno == errno.EINVAL:
- return False
- raise
+ result = None
+ try:
+ result = crypt('', salt)
+ except OSError as e:
+ # Not all libc libraries support all encryption methods.
+ if e.errno == errno.EINVAL:
+ return False
+ raise
if result and len(result) == method.total_size:
methods.append(method)
return True
diff --git a/contrib/tools/python3/src/Lib/csv.py b/contrib/tools/python3/src/Lib/csv.py
index dc85077f3e..6cb260abe2 100644
--- a/contrib/tools/python3/src/Lib/csv.py
+++ b/contrib/tools/python3/src/Lib/csv.py
@@ -116,7 +116,7 @@ class DictReader:
# values
while row == []:
row = next(self.reader)
- d = dict(zip(self.fieldnames, row))
+ d = dict(zip(self.fieldnames, row))
lf = len(self.fieldnames)
lr = len(row)
if lf < lr:
@@ -140,7 +140,7 @@ class DictWriter:
def writeheader(self):
header = dict(zip(self.fieldnames, self.fieldnames))
- return self.writerow(header)
+ return self.writerow(header)
def _dict_to_list(self, rowdict):
if self.extrasaction == "raise":
diff --git a/contrib/tools/python3/src/Lib/ctypes/__init__.py b/contrib/tools/python3/src/Lib/ctypes/__init__.py
index 2dde747beb..d1b2d300ec 100644
--- a/contrib/tools/python3/src/Lib/ctypes/__init__.py
+++ b/contrib/tools/python3/src/Lib/ctypes/__init__.py
@@ -1,7 +1,7 @@
"""create and manipulate C data types in Python"""
import os as _os, sys as _sys
-import types as _types
+import types as _types
__version__ = "1.1.0"
@@ -54,13 +54,13 @@ def create_string_buffer(init, size=None):
if isinstance(init, bytes):
if size is None:
size = len(init)+1
- _sys.audit("ctypes.create_string_buffer", init, size)
+ _sys.audit("ctypes.create_string_buffer", init, size)
buftype = c_char * size
buf = buftype()
buf.value = init
return buf
elif isinstance(init, int):
- _sys.audit("ctypes.create_string_buffer", None, init)
+ _sys.audit("ctypes.create_string_buffer", None, init)
buftype = c_char * init
buf = buftype()
return buf
@@ -278,22 +278,22 @@ def create_unicode_buffer(init, size=None):
"""
if isinstance(init, str):
if size is None:
- if sizeof(c_wchar) == 2:
- # UTF-16 requires a surrogate pair (2 wchar_t) for non-BMP
- # characters (outside [U+0000; U+FFFF] range). +1 for trailing
- # NUL character.
- size = sum(2 if ord(c) > 0xFFFF else 1 for c in init) + 1
- else:
- # 32-bit wchar_t (1 wchar_t per Unicode character). +1 for
- # trailing NUL character.
- size = len(init) + 1
- _sys.audit("ctypes.create_unicode_buffer", init, size)
+ if sizeof(c_wchar) == 2:
+ # UTF-16 requires a surrogate pair (2 wchar_t) for non-BMP
+ # characters (outside [U+0000; U+FFFF] range). +1 for trailing
+ # NUL character.
+ size = sum(2 if ord(c) > 0xFFFF else 1 for c in init) + 1
+ else:
+ # 32-bit wchar_t (1 wchar_t per Unicode character). +1 for
+ # trailing NUL character.
+ size = len(init) + 1
+ _sys.audit("ctypes.create_unicode_buffer", init, size)
buftype = c_wchar * size
buf = buftype()
buf.value = init
return buf
elif isinstance(init, int):
- _sys.audit("ctypes.create_unicode_buffer", None, init)
+ _sys.audit("ctypes.create_unicode_buffer", None, init)
buftype = c_wchar * init
buf = buftype()
return buf
@@ -340,8 +340,8 @@ class CDLL(object):
def __init__(self, name, mode=DEFAULT_MODE, handle=None,
use_errno=False,
- use_last_error=False,
- winmode=None):
+ use_last_error=False,
+ winmode=None):
self._name = name
flags = self._func_flags_
if use_errno:
@@ -356,15 +356,15 @@ class CDLL(object):
"""
if name and name.endswith(")") and ".a(" in name:
mode |= ( _os.RTLD_MEMBER | _os.RTLD_NOW )
- if _os.name == "nt":
- if winmode is not None:
- mode = winmode
- else:
- import nt
- mode = nt._LOAD_LIBRARY_SEARCH_DEFAULT_DIRS
- if '/' in name or '\\' in name:
- self._name = nt._getfullpathname(self._name)
- mode |= nt._LOAD_LIBRARY_SEARCH_DLL_LOAD_DIR
+ if _os.name == "nt":
+ if winmode is not None:
+ mode = winmode
+ else:
+ import nt
+ mode = nt._LOAD_LIBRARY_SEARCH_DEFAULT_DIRS
+ if '/' in name or '\\' in name:
+ self._name = nt._getfullpathname(self._name)
+ mode |= nt._LOAD_LIBRARY_SEARCH_DLL_LOAD_DIR
class _FuncPtr(_CFuncPtr):
_flags_ = flags
@@ -465,8 +465,8 @@ class LibraryLoader(object):
def LoadLibrary(self, name):
return self._dlltype(name)
- __class_getitem__ = classmethod(_types.GenericAlias)
-
+ __class_getitem__ = classmethod(_types.GenericAlias)
+
cdll = LibraryLoader(CDLL)
pydll = LibraryLoader(PyDLL)
diff --git a/contrib/tools/python3/src/Lib/ctypes/_aix.py b/contrib/tools/python3/src/Lib/ctypes/_aix.py
index fc3e95cbcc..084f0e499c 100644
--- a/contrib/tools/python3/src/Lib/ctypes/_aix.py
+++ b/contrib/tools/python3/src/Lib/ctypes/_aix.py
@@ -163,7 +163,7 @@ def get_legacy(members):
return member
else:
# 32-bit legacy names - both shr.o and shr4.o exist.
- # shr.o is the preferred name so we look for shr.o first
+ # shr.o is the preferred name so we look for shr.o first
# i.e., shr4.o is returned only when shr.o does not exist
for name in ['shr.o', 'shr4.o']:
member = get_one_match(re.escape(name), members)
@@ -282,7 +282,7 @@ def find_shared(paths, name):
if path.exists(archive):
members = get_shared(get_ld_headers(archive))
member = get_member(re.escape(name), members)
- if member is not None:
+ if member is not None:
return (base, member)
else:
return (None, None)
@@ -307,7 +307,7 @@ def find_library(name):
libpaths = get_libpaths()
(base, member) = find_shared(libpaths, name)
- if base is not None:
+ if base is not None:
return f"{base}({member})"
# To get here, a member in an archive has not been found
diff --git a/contrib/tools/python3/src/Lib/ctypes/macholib/dyld.py b/contrib/tools/python3/src/Lib/ctypes/macholib/dyld.py
index 1c3f8fd38b..bf279ad087 100644
--- a/contrib/tools/python3/src/Lib/ctypes/macholib/dyld.py
+++ b/contrib/tools/python3/src/Lib/ctypes/macholib/dyld.py
@@ -6,11 +6,11 @@ import os
from ctypes.macholib.framework import framework_info
from ctypes.macholib.dylib import dylib_info
from itertools import *
-try:
- from _ctypes import _dyld_shared_cache_contains_path
-except ImportError:
- def _dyld_shared_cache_contains_path(*args):
- raise NotImplementedError
+try:
+ from _ctypes import _dyld_shared_cache_contains_path
+except ImportError:
+ def _dyld_shared_cache_contains_path(*args):
+ raise NotImplementedError
__all__ = [
'dyld_find', 'framework_find',
@@ -127,15 +127,15 @@ def dyld_find(name, executable_path=None, env=None):
dyld_executable_path_search(name, executable_path),
dyld_default_search(name, env),
), env):
-
+
if os.path.isfile(path):
return path
- try:
- if _dyld_shared_cache_contains_path(path):
- return path
- except NotImplementedError:
- pass
-
+ try:
+ if _dyld_shared_cache_contains_path(path):
+ return path
+ except NotImplementedError:
+ pass
+
raise ValueError("dylib %s could not be found" % (name,))
def framework_find(fn, executable_path=None, env=None):
@@ -161,8 +161,8 @@ def framework_find(fn, executable_path=None, env=None):
return dyld_find(fn, executable_path=executable_path, env=env)
except ValueError:
raise error
- finally:
- error = None
+ finally:
+ error = None
def test_dyld_find():
env = {}
diff --git a/contrib/tools/python3/src/Lib/ctypes/util.py b/contrib/tools/python3/src/Lib/ctypes/util.py
index a1f045e62c..55b871c575 100644
--- a/contrib/tools/python3/src/Lib/ctypes/util.py
+++ b/contrib/tools/python3/src/Lib/ctypes/util.py
@@ -93,12 +93,12 @@ elif os.name == "posix":
# Andreas Degert's find functions, using gcc, /sbin/ldconfig, objdump
import re, tempfile
- def _is_elf(filename):
- "Return True if the given file is an ELF file"
- elf_header = b'\x7fELF'
- with open(filename, 'br') as thefile:
- return thefile.read(4) == elf_header
-
+ def _is_elf(filename):
+ "Return True if the given file is an ELF file"
+ elf_header = b'\x7fELF'
+ with open(filename, 'br') as thefile:
+ return thefile.read(4) == elf_header
+
def _findLib_gcc(name):
# Run GCC's linker with the -t (aka --trace) option and examine the
# library name it prints out. The GCC command will fail because we
@@ -136,19 +136,19 @@ elif os.name == "posix":
# Raised if the file was already removed, which is the normal
# behaviour of GCC if linking fails
pass
- res = re.findall(expr, trace)
+ res = re.findall(expr, trace)
if not res:
return None
- for file in res:
- # Check if the given file is an elf file: gcc can report
- # some files that are linker scripts and not actual
- # shared objects. See bpo-41976 for more details
- if not _is_elf(file):
- continue
- return os.fsdecode(file)
-
+ for file in res:
+ # Check if the given file is an elf file: gcc can report
+ # some files that are linker scripts and not actual
+ # shared objects. See bpo-41976 for more details
+ if not _is_elf(file):
+ continue
+ return os.fsdecode(file)
+
if sys.platform == "sunos5":
# use /usr/ccs/bin/dump on solaris
def _get_soname(f):
@@ -312,22 +312,22 @@ elif os.name == "posix":
stderr=subprocess.PIPE,
universal_newlines=True)
out, _ = p.communicate()
- res = re.findall(expr, os.fsdecode(out))
- for file in res:
- # Check if the given file is an elf file: gcc can report
- # some files that are linker scripts and not actual
- # shared objects. See bpo-41976 for more details
- if not _is_elf(file):
- continue
- return os.fsdecode(file)
- except Exception:
+ res = re.findall(expr, os.fsdecode(out))
+ for file in res:
+ # Check if the given file is an elf file: gcc can report
+ # some files that are linker scripts and not actual
+ # shared objects. See bpo-41976 for more details
+ if not _is_elf(file):
+ continue
+ return os.fsdecode(file)
+ except Exception:
pass # result will be None
return result
def find_library(name):
# See issue #9998
return _findSoname_ldconfig(name) or \
- _get_soname(_findLib_gcc(name)) or _get_soname(_findLib_ld(name))
+ _get_soname(_findLib_gcc(name)) or _get_soname(_findLib_ld(name))
try:
from library.python.symbols.module import find_library as _find_library
diff --git a/contrib/tools/python3/src/Lib/curses/__init__.py b/contrib/tools/python3/src/Lib/curses/__init__.py
index 69270bfcd2..11e21daeaa 100644
--- a/contrib/tools/python3/src/Lib/curses/__init__.py
+++ b/contrib/tools/python3/src/Lib/curses/__init__.py
@@ -60,7 +60,7 @@ except NameError:
# raises an exception, wrapper() will restore the terminal to a sane state so
# you can read the resulting traceback.
-def wrapper(func, /, *args, **kwds):
+def wrapper(func, /, *args, **kwds):
"""Wrapper function that initializes curses and calls another function,
restoring normal keyboard/screen behavior on error.
The callable object 'func' is then passed the main window 'stdscr'
diff --git a/contrib/tools/python3/src/Lib/dataclasses.py b/contrib/tools/python3/src/Lib/dataclasses.py
index 5ff67ad2ea..09982bcc3e 100644
--- a/contrib/tools/python3/src/Lib/dataclasses.py
+++ b/contrib/tools/python3/src/Lib/dataclasses.py
@@ -7,7 +7,7 @@ import keyword
import builtins
import functools
import _thread
-from types import GenericAlias
+from types import GenericAlias
__all__ = ['dataclass',
@@ -200,24 +200,24 @@ _POST_INIT_NAME = '__post_init__'
# https://bugs.python.org/issue33453 for details.
_MODULE_IDENTIFIER_RE = re.compile(r'^(?:\s*(\w+)\s*\.)?\s*(\w+)')
-class InitVar:
- __slots__ = ('type', )
-
- def __init__(self, type):
- self.type = type
-
- def __repr__(self):
- if isinstance(self.type, type) and not isinstance(self.type, GenericAlias):
- type_name = self.type.__name__
- else:
- # typing objects, e.g. List[int]
- type_name = repr(self.type)
- return f'dataclasses.InitVar[{type_name}]'
-
- def __class_getitem__(cls, type):
- return InitVar(type)
-
-
+class InitVar:
+ __slots__ = ('type', )
+
+ def __init__(self, type):
+ self.type = type
+
+ def __repr__(self):
+ if isinstance(self.type, type) and not isinstance(self.type, GenericAlias):
+ type_name = self.type.__name__
+ else:
+ # typing objects, e.g. List[int]
+ type_name = repr(self.type)
+ return f'dataclasses.InitVar[{type_name}]'
+
+ def __class_getitem__(cls, type):
+ return InitVar(type)
+
+
# Instances of Field are only ever created from within this module,
# and only from the field() function, although Field instances are
# exposed externally as (conceptually) read-only objects.
@@ -285,9 +285,9 @@ class Field:
# it.
func(self.default, owner, name)
- __class_getitem__ = classmethod(GenericAlias)
-
+ __class_getitem__ = classmethod(GenericAlias)
+
class _DataclassParams:
__slots__ = ('init',
'repr',
@@ -381,26 +381,26 @@ def _create_fn(name, args, body, *, globals=None, locals=None,
# worries about external callers.
if locals is None:
locals = {}
- if 'BUILTINS' not in locals:
- locals['BUILTINS'] = builtins
+ if 'BUILTINS' not in locals:
+ locals['BUILTINS'] = builtins
return_annotation = ''
if return_type is not MISSING:
locals['_return_type'] = return_type
return_annotation = '->_return_type'
args = ','.join(args)
- body = '\n'.join(f' {b}' for b in body)
+ body = '\n'.join(f' {b}' for b in body)
# Compute the text of the entire function.
- txt = f' def {name}({args}){return_annotation}:\n{body}'
-
- local_vars = ', '.join(locals.keys())
- txt = f"def __create_fn__({local_vars}):\n{txt}\n return {name}"
+ txt = f' def {name}({args}){return_annotation}:\n{body}'
- ns = {}
- exec(txt, globals, ns)
- return ns['__create_fn__'](**locals)
+ local_vars = ', '.join(locals.keys())
+ txt = f"def __create_fn__({local_vars}):\n{txt}\n return {name}"
+ ns = {}
+ exec(txt, globals, ns)
+ return ns['__create_fn__'](**locals)
+
def _field_assign(frozen, name, value, self_name):
# If we're a frozen class, then assign to our fields in __init__
# via object.__setattr__. Otherwise, just use a simple
@@ -409,7 +409,7 @@ def _field_assign(frozen, name, value, self_name):
# self_name is what "self" is called in this function: don't
# hard-code "self", since that might be a field name.
if frozen:
- return f'BUILTINS.object.__setattr__({self_name},{name!r},{value})'
+ return f'BUILTINS.object.__setattr__({self_name},{name!r},{value})'
return f'{self_name}.{name}={value}'
@@ -486,7 +486,7 @@ def _init_param(f):
return f'{f.name}:_type_{f.name}{default}'
-def _init_fn(fields, frozen, has_post_init, self_name, globals):
+def _init_fn(fields, frozen, has_post_init, self_name, globals):
# fields contains both real fields and InitVar pseudo-fields.
# Make sure we don't have fields without defaults following fields
@@ -504,15 +504,15 @@ def _init_fn(fields, frozen, has_post_init, self_name, globals):
raise TypeError(f'non-default argument {f.name!r} '
'follows default argument')
- locals = {f'_type_{f.name}': f.type for f in fields}
- locals.update({
- 'MISSING': MISSING,
- '_HAS_DEFAULT_FACTORY': _HAS_DEFAULT_FACTORY,
- })
+ locals = {f'_type_{f.name}': f.type for f in fields}
+ locals.update({
+ 'MISSING': MISSING,
+ '_HAS_DEFAULT_FACTORY': _HAS_DEFAULT_FACTORY,
+ })
body_lines = []
for f in fields:
- line = _field_init(f, frozen, locals, self_name)
+ line = _field_init(f, frozen, locals, self_name)
# line is None means that this field doesn't require
# initialization (it's a pseudo-field). Just skip it.
if line:
@@ -536,19 +536,19 @@ def _init_fn(fields, frozen, has_post_init, self_name, globals):
return_type=None)
-def _repr_fn(fields, globals):
+def _repr_fn(fields, globals):
fn = _create_fn('__repr__',
('self',),
['return self.__class__.__qualname__ + f"(' +
', '.join([f"{f.name}={{self.{f.name}!r}}"
for f in fields]) +
- ')"'],
- globals=globals)
+ ')"'],
+ globals=globals)
return _recursive_repr(fn)
-def _frozen_get_del_attr(cls, fields, globals):
- locals = {'cls': cls,
+def _frozen_get_del_attr(cls, fields, globals):
+ locals = {'cls': cls,
'FrozenInstanceError': FrozenInstanceError}
if fields:
fields_str = '(' + ','.join(repr(f.name) for f in fields) + ',)'
@@ -560,19 +560,19 @@ def _frozen_get_del_attr(cls, fields, globals):
(f'if type(self) is cls or name in {fields_str}:',
' raise FrozenInstanceError(f"cannot assign to field {name!r}")',
f'super(cls, self).__setattr__(name, value)'),
- locals=locals,
+ locals=locals,
globals=globals),
_create_fn('__delattr__',
('self', 'name'),
(f'if type(self) is cls or name in {fields_str}:',
' raise FrozenInstanceError(f"cannot delete field {name!r}")',
f'super(cls, self).__delattr__(name)'),
- locals=locals,
+ locals=locals,
globals=globals),
)
-def _cmp_fn(name, op, self_tuple, other_tuple, globals):
+def _cmp_fn(name, op, self_tuple, other_tuple, globals):
# Create a comparison function. If the fields in the object are
# named 'x' and 'y', then self_tuple is the string
# '(self.x,self.y)' and other_tuple is the string
@@ -582,16 +582,16 @@ def _cmp_fn(name, op, self_tuple, other_tuple, globals):
('self', 'other'),
[ 'if other.__class__ is self.__class__:',
f' return {self_tuple}{op}{other_tuple}',
- 'return NotImplemented'],
- globals=globals)
+ 'return NotImplemented'],
+ globals=globals)
-def _hash_fn(fields, globals):
+def _hash_fn(fields, globals):
self_tuple = _tuple_str('self', fields)
return _create_fn('__hash__',
('self',),
- [f'return hash({self_tuple})'],
- globals=globals)
+ [f'return hash({self_tuple})'],
+ globals=globals)
def _is_classvar(a_type, typing):
@@ -605,8 +605,8 @@ def _is_classvar(a_type, typing):
def _is_initvar(a_type, dataclasses):
# The module we're checking against is the module we're
# currently in (dataclasses.py).
- return (a_type is dataclasses.InitVar
- or type(a_type) is dataclasses.InitVar)
+ return (a_type is dataclasses.InitVar
+ or type(a_type) is dataclasses.InitVar)
def _is_type(annotation, cls, a_module, a_type, is_type_predicate):
@@ -696,7 +696,7 @@ def _get_field(cls, a_name, a_type):
# In addition to checking for actual types here, also check for
# string annotations. get_type_hints() won't always work for us
# (see https://github.com/python/typing/issues/508 for example),
- # plus it's expensive and would require an eval for every string
+ # plus it's expensive and would require an eval for every string
# annotation. So, make a best effort to see if this is a ClassVar
# or InitVar using regex's and checking that the thing referenced
# is actually of the correct type.
@@ -764,14 +764,14 @@ def _set_new_attribute(cls, name, value):
# take. The common case is to do nothing, so instead of providing a
# function that is a no-op, use None to signify that.
-def _hash_set_none(cls, fields, globals):
+def _hash_set_none(cls, fields, globals):
return None
-def _hash_add(cls, fields, globals):
+def _hash_add(cls, fields, globals):
flds = [f for f in fields if (f.compare if f.hash is None else f.hash)]
- return _hash_fn(flds, globals)
+ return _hash_fn(flds, globals)
-def _hash_exception(cls, fields, globals):
+def _hash_exception(cls, fields, globals):
# Raise an exception.
raise TypeError(f'Cannot overwrite attribute __hash__ '
f'in class {cls.__name__}')
@@ -813,16 +813,16 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen):
# is defined by the base class, which is found first.
fields = {}
- if cls.__module__ in sys.modules:
- globals = sys.modules[cls.__module__].__dict__
- else:
- # Theoretically this can happen if someone writes
- # a custom string to cls.__module__. In which case
- # such dataclass won't be fully introspectable
- # (w.r.t. typing.get_type_hints) but will still function
- # correctly.
- globals = {}
-
+ if cls.__module__ in sys.modules:
+ globals = sys.modules[cls.__module__].__dict__
+ else:
+ # Theoretically this can happen if someone writes
+ # a custom string to cls.__module__. In which case
+ # such dataclass won't be fully introspectable
+ # (w.r.t. typing.get_type_hints) but will still function
+ # correctly.
+ globals = {}
+
setattr(cls, _PARAMS, _DataclassParams(init, repr, eq, order,
unsafe_hash, frozen))
@@ -836,7 +836,7 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen):
# Only process classes that have been processed by our
# decorator. That is, they have a _FIELDS attribute.
base_fields = getattr(b, _FIELDS, None)
- if base_fields is not None:
+ if base_fields is not None:
has_dataclass_bases = True
for f in base_fields.values():
fields[f.name] = f
@@ -932,7 +932,7 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen):
# if possible.
'__dataclass_self__' if 'self' in fields
else 'self',
- globals,
+ globals,
))
# Get the fields as a list, and include only real fields. This is
@@ -941,18 +941,18 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen):
if repr:
flds = [f for f in field_list if f.repr]
- _set_new_attribute(cls, '__repr__', _repr_fn(flds, globals))
+ _set_new_attribute(cls, '__repr__', _repr_fn(flds, globals))
if eq:
- # Create __eq__ method. There's no need for a __ne__ method,
+ # Create __eq__ method. There's no need for a __ne__ method,
# since python will call __eq__ and negate it.
flds = [f for f in field_list if f.compare]
self_tuple = _tuple_str('self', flds)
other_tuple = _tuple_str('other', flds)
_set_new_attribute(cls, '__eq__',
_cmp_fn('__eq__', '==',
- self_tuple, other_tuple,
- globals=globals))
+ self_tuple, other_tuple,
+ globals=globals))
if order:
# Create and set the ordering methods.
@@ -965,14 +965,14 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen):
('__ge__', '>='),
]:
if _set_new_attribute(cls, name,
- _cmp_fn(name, op, self_tuple, other_tuple,
- globals=globals)):
+ _cmp_fn(name, op, self_tuple, other_tuple,
+ globals=globals)):
raise TypeError(f'Cannot overwrite attribute {name} '
f'in class {cls.__name__}. Consider using '
'functools.total_ordering')
if frozen:
- for fn in _frozen_get_del_attr(cls, field_list, globals):
+ for fn in _frozen_get_del_attr(cls, field_list, globals):
if _set_new_attribute(cls, fn.__name__, fn):
raise TypeError(f'Cannot overwrite attribute {fn.__name__} '
f'in class {cls.__name__}')
@@ -985,7 +985,7 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen):
if hash_action:
# No need to call _set_new_attribute here, since by the time
# we're here the overwriting is unconditional.
- cls.__hash__ = hash_action(cls, field_list, globals)
+ cls.__hash__ = hash_action(cls, field_list, globals)
if not getattr(cls, '__doc__'):
# Create a class doc-string.
@@ -995,7 +995,7 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen):
return cls
-def dataclass(cls=None, /, *, init=True, repr=True, eq=True, order=False,
+def dataclass(cls=None, /, *, init=True, repr=True, eq=True, order=False,
unsafe_hash=False, frozen=False):
"""Returns the same class as was passed in, with dunder methods
added based on the fields defined in the class.
@@ -1013,12 +1013,12 @@ def dataclass(cls=None, /, *, init=True, repr=True, eq=True, order=False,
return _process_class(cls, init, repr, eq, order, unsafe_hash, frozen)
# See if we're being called as @dataclass or @dataclass().
- if cls is None:
+ if cls is None:
# We're called with parens.
return wrap
# We're called as @dataclass without parens.
- return wrap(cls)
+ return wrap(cls)
def fields(class_or_instance):
@@ -1041,14 +1041,14 @@ def fields(class_or_instance):
def _is_dataclass_instance(obj):
"""Returns True if obj is an instance of a dataclass."""
- return hasattr(type(obj), _FIELDS)
+ return hasattr(type(obj), _FIELDS)
def is_dataclass(obj):
"""Returns True if obj is a dataclass or an instance of a
dataclass."""
- cls = obj if isinstance(obj, type) and not isinstance(obj, GenericAlias) else type(obj)
- return hasattr(cls, _FIELDS)
+ cls = obj if isinstance(obj, type) and not isinstance(obj, GenericAlias) else type(obj)
+ return hasattr(cls, _FIELDS)
def asdict(obj, *, dict_factory=dict):
@@ -1094,7 +1094,7 @@ def _asdict_inner(obj, dict_factory):
# method, because:
# - it does not recurse in to the namedtuple fields and
# convert them to dicts (using dict_factory).
- # - I don't actually want to return a dict here. The main
+ # - I don't actually want to return a dict here. The main
# use case here is json.dumps, and it handles converting
# namedtuples to lists. Admittedly we're losing some
# information here when we produce a json list instead of a
@@ -1216,7 +1216,7 @@ def make_dataclass(cls_name, fields, *, bases=(), namespace=None, init=True,
raise TypeError(f'Invalid field: {item!r}')
if not isinstance(name, str) or not name.isidentifier():
- raise TypeError(f'Field names must be valid identifiers: {name!r}')
+ raise TypeError(f'Field names must be valid identifiers: {name!r}')
if keyword.iskeyword(name):
raise TypeError(f'Field names must not be keywords: {name!r}')
if name in seen:
@@ -1233,7 +1233,7 @@ def make_dataclass(cls_name, fields, *, bases=(), namespace=None, init=True,
unsafe_hash=unsafe_hash, frozen=frozen)
-def replace(obj, /, **changes):
+def replace(obj, /, **changes):
"""Return a new object replacing specified fields with new values.
This is especially useful for frozen classes. Example usage:
@@ -1271,7 +1271,7 @@ def replace(obj, /, **changes):
continue
if f.name not in changes:
- if f._field_type is _FIELD_INITVAR and f.default is MISSING:
+ if f._field_type is _FIELD_INITVAR and f.default is MISSING:
raise ValueError(f"InitVar {f.name!r} "
'must be specified with replace()')
changes[f.name] = getattr(obj, f.name)
diff --git a/contrib/tools/python3/src/Lib/datetime.py b/contrib/tools/python3/src/Lib/datetime.py
index 23d2bf0918..cdc71a5577 100644
--- a/contrib/tools/python3/src/Lib/datetime.py
+++ b/contrib/tools/python3/src/Lib/datetime.py
@@ -4,10 +4,10 @@ See http://www.iana.org/time-zones/repository/tz-link.html for
time zone and DST data sources.
"""
-__all__ = ("date", "datetime", "time", "timedelta", "timezone", "tzinfo",
- "MINYEAR", "MAXYEAR")
-
-
+__all__ = ("date", "datetime", "time", "timedelta", "timezone", "tzinfo",
+ "MINYEAR", "MAXYEAR")
+
+
import time as _time
import math as _math
import sys
@@ -383,34 +383,34 @@ def _check_utc_offset(name, offset):
def _check_int_field(value):
if isinstance(value, int):
return value
- if isinstance(value, float):
- raise TypeError('integer argument expected, got float')
- try:
- value = value.__index__()
- except AttributeError:
- pass
- else:
- if not isinstance(value, int):
- raise TypeError('__index__ returned non-int (type %s)' %
- type(value).__name__)
- return value
- orig = value
- try:
- value = value.__int__()
- except AttributeError:
- pass
- else:
- if not isinstance(value, int):
+ if isinstance(value, float):
+ raise TypeError('integer argument expected, got float')
+ try:
+ value = value.__index__()
+ except AttributeError:
+ pass
+ else:
+ if not isinstance(value, int):
+ raise TypeError('__index__ returned non-int (type %s)' %
+ type(value).__name__)
+ return value
+ orig = value
+ try:
+ value = value.__int__()
+ except AttributeError:
+ pass
+ else:
+ if not isinstance(value, int):
raise TypeError('__int__ returned non-int (type %s)' %
type(value).__name__)
- import warnings
- warnings.warn("an integer is required (got type %s)" %
- type(orig).__name__,
- DeprecationWarning,
- stacklevel=2)
- return value
- raise TypeError('an integer is required (got type %s)' %
- type(value).__name__)
+ import warnings
+ warnings.warn("an integer is required (got type %s)" %
+ type(orig).__name__,
+ DeprecationWarning,
+ stacklevel=2)
+ return value
+ raise TypeError('an integer is required (got type %s)' %
+ type(value).__name__)
def _check_date_fields(year, month, day):
year = _check_int_field(year)
@@ -737,31 +737,31 @@ class timedelta:
if isinstance(other, timedelta):
return self._cmp(other) == 0
else:
- return NotImplemented
+ return NotImplemented
def __le__(self, other):
if isinstance(other, timedelta):
return self._cmp(other) <= 0
else:
- return NotImplemented
+ return NotImplemented
def __lt__(self, other):
if isinstance(other, timedelta):
return self._cmp(other) < 0
else:
- return NotImplemented
+ return NotImplemented
def __ge__(self, other):
if isinstance(other, timedelta):
return self._cmp(other) >= 0
else:
- return NotImplemented
+ return NotImplemented
def __gt__(self, other):
if isinstance(other, timedelta):
return self._cmp(other) > 0
else:
- return NotImplemented
+ return NotImplemented
def _cmp(self, other):
assert isinstance(other, timedelta)
@@ -888,41 +888,41 @@ class date:
except Exception:
raise ValueError(f'Invalid isoformat string: {date_string!r}')
- @classmethod
- def fromisocalendar(cls, year, week, day):
- """Construct a date from the ISO year, week number and weekday.
-
- This is the inverse of the date.isocalendar() function"""
- # Year is bounded this way because 9999-12-31 is (9999, 52, 5)
- if not MINYEAR <= year <= MAXYEAR:
- raise ValueError(f"Year is out of range: {year}")
-
- if not 0 < week < 53:
- out_of_range = True
-
- if week == 53:
- # ISO years have 53 weeks in them on years starting with a
- # Thursday and leap years starting on a Wednesday
- first_weekday = _ymd2ord(year, 1, 1) % 7
- if (first_weekday == 4 or (first_weekday == 3 and
- _is_leap(year))):
- out_of_range = False
-
- if out_of_range:
- raise ValueError(f"Invalid week: {week}")
-
- if not 0 < day < 8:
- raise ValueError(f"Invalid weekday: {day} (range is [1, 7])")
-
- # Now compute the offset from (Y, 1, 1) in days:
- day_offset = (week - 1) * 7 + (day - 1)
-
- # Calculate the ordinal day for monday, week 1
- day_1 = _isoweek1monday(year)
- ord_day = day_1 + day_offset
-
- return cls(*_ord2ymd(ord_day))
-
+ @classmethod
+ def fromisocalendar(cls, year, week, day):
+ """Construct a date from the ISO year, week number and weekday.
+
+ This is the inverse of the date.isocalendar() function"""
+ # Year is bounded this way because 9999-12-31 is (9999, 52, 5)
+ if not MINYEAR <= year <= MAXYEAR:
+ raise ValueError(f"Year is out of range: {year}")
+
+ if not 0 < week < 53:
+ out_of_range = True
+
+ if week == 53:
+ # ISO years have 53 weeks in them on years starting with a
+ # Thursday and leap years starting on a Wednesday
+ first_weekday = _ymd2ord(year, 1, 1) % 7
+ if (first_weekday == 4 or (first_weekday == 3 and
+ _is_leap(year))):
+ out_of_range = False
+
+ if out_of_range:
+ raise ValueError(f"Invalid week: {week}")
+
+ if not 0 < day < 8:
+ raise ValueError(f"Invalid weekday: {day} (range is [1, 7])")
+
+ # Now compute the offset from (Y, 1, 1) in days:
+ day_offset = (week - 1) * 7 + (day - 1)
+
+ # Calculate the ordinal day for monday, week 1
+ day_1 = _isoweek1monday(year)
+ ord_day = day_1 + day_offset
+
+ return cls(*_ord2ymd(ord_day))
+
# Conversions to string
def __repr__(self):
@@ -1067,7 +1067,7 @@ class date:
if isinstance(other, timedelta):
o = self.toordinal() + other.days
if 0 < o <= _MAXORDINAL:
- return type(self).fromordinal(o)
+ return type(self).fromordinal(o)
raise OverflowError("result out of range")
return NotImplemented
@@ -1095,7 +1095,7 @@ class date:
return self.toordinal() % 7 or 7
def isocalendar(self):
- """Return a named tuple containing ISO year, week number, and weekday.
+ """Return a named tuple containing ISO year, week number, and weekday.
The first ISO week of the year is the (Mon-Sun) week
containing the year's first Thursday; everything else derives
@@ -1120,7 +1120,7 @@ class date:
if today >= _isoweek1monday(year+1):
year += 1
week = 0
- return _IsoCalendarDate(year, week+1, day+1)
+ return _IsoCalendarDate(year, week+1, day+1)
# Pickle support.
@@ -1210,36 +1210,36 @@ class tzinfo:
else:
return (self.__class__, args, state)
-
-class IsoCalendarDate(tuple):
-
- def __new__(cls, year, week, weekday, /):
- return super().__new__(cls, (year, week, weekday))
-
- @property
- def year(self):
- return self[0]
-
- @property
- def week(self):
- return self[1]
-
- @property
- def weekday(self):
- return self[2]
-
- def __reduce__(self):
- # This code is intended to pickle the object without making the
- # class public. See https://bugs.python.org/msg352381
- return (tuple, (tuple(self),))
-
- def __repr__(self):
- return (f'{self.__class__.__name__}'
- f'(year={self[0]}, week={self[1]}, weekday={self[2]})')
-
-
-_IsoCalendarDate = IsoCalendarDate
-del IsoCalendarDate
+
+class IsoCalendarDate(tuple):
+
+ def __new__(cls, year, week, weekday, /):
+ return super().__new__(cls, (year, week, weekday))
+
+ @property
+ def year(self):
+ return self[0]
+
+ @property
+ def week(self):
+ return self[1]
+
+ @property
+ def weekday(self):
+ return self[2]
+
+ def __reduce__(self):
+ # This code is intended to pickle the object without making the
+ # class public. See https://bugs.python.org/msg352381
+ return (tuple, (tuple(self),))
+
+ def __repr__(self):
+ return (f'{self.__class__.__name__}'
+ f'(year={self[0]}, week={self[1]}, weekday={self[2]})')
+
+
+_IsoCalendarDate = IsoCalendarDate
+del IsoCalendarDate
_tzinfo_class = tzinfo
class time:
@@ -1344,31 +1344,31 @@ class time:
if isinstance(other, time):
return self._cmp(other, allow_mixed=True) == 0
else:
- return NotImplemented
+ return NotImplemented
def __le__(self, other):
if isinstance(other, time):
return self._cmp(other) <= 0
else:
- return NotImplemented
+ return NotImplemented
def __lt__(self, other):
if isinstance(other, time):
return self._cmp(other) < 0
else:
- return NotImplemented
+ return NotImplemented
def __ge__(self, other):
if isinstance(other, time):
return self._cmp(other) >= 0
else:
- return NotImplemented
+ return NotImplemented
def __gt__(self, other):
if isinstance(other, time):
return self._cmp(other) > 0
else:
- return NotImplemented
+ return NotImplemented
def _cmp(self, other, allow_mixed=False):
assert isinstance(other, time)
@@ -1452,8 +1452,8 @@ class time:
part is omitted if self.microsecond == 0.
The optional argument timespec specifies the number of additional
- terms of the time to include. Valid options are 'auto', 'hours',
- 'minutes', 'seconds', 'milliseconds' and 'microseconds'.
+ terms of the time to include. Valid options are 'auto', 'hours',
+ 'minutes', 'seconds', 'milliseconds' and 'microseconds'.
"""
s = _format_time(self._hour, self._minute, self._second,
self._microsecond, timespec)
@@ -1579,7 +1579,7 @@ class time:
self._tzinfo = tzinfo
def __reduce_ex__(self, protocol):
- return (self.__class__, self._getstate(protocol))
+ return (self.__class__, self._getstate(protocol))
def __reduce__(self):
return self.__reduce_ex__(2)
@@ -1590,7 +1590,7 @@ time.min = time(0, 0, 0)
time.max = time(23, 59, 59, 999999)
time.resolution = timedelta(microseconds=1)
-
+
class datetime(date):
"""datetime(year, month, day[, hour[, minute[, second[, microsecond[,tzinfo]]]]])
@@ -1883,10 +1883,10 @@ class datetime(date):
ts = (self - _EPOCH) // timedelta(seconds=1)
localtm = _time.localtime(ts)
local = datetime(*localtm[:6])
- # Extract TZ data
- gmtoff = localtm.tm_gmtoff
- zone = localtm.tm_zone
- return timezone(timedelta(seconds=gmtoff), zone)
+ # Extract TZ data
+ gmtoff = localtm.tm_gmtoff
+ zone = localtm.tm_zone
+ return timezone(timedelta(seconds=gmtoff), zone)
def astimezone(self, tz=None):
if tz is None:
@@ -1938,8 +1938,8 @@ class datetime(date):
time, default 'T'.
The optional argument timespec specifies the number of additional
- terms of the time to include. Valid options are 'auto', 'hours',
- 'minutes', 'seconds', 'milliseconds' and 'microseconds'.
+ terms of the time to include. Valid options are 'auto', 'hours',
+ 'minutes', 'seconds', 'milliseconds' and 'microseconds'.
"""
s = ("%04d-%02d-%02d%c" % (self._year, self._month, self._day, sep) +
_format_time(self._hour, self._minute, self._second,
@@ -2110,10 +2110,10 @@ class datetime(date):
hour, rem = divmod(delta.seconds, 3600)
minute, second = divmod(rem, 60)
if 0 < delta.days <= _MAXORDINAL:
- return type(self).combine(date.fromordinal(delta.days),
- time(hour, minute, second,
- delta.microseconds,
- tzinfo=self._tzinfo))
+ return type(self).combine(date.fromordinal(delta.days),
+ time(hour, minute, second,
+ delta.microseconds,
+ tzinfo=self._tzinfo))
raise OverflowError("result out of range")
__radd__ = __add__
@@ -2212,7 +2212,7 @@ def _isoweek1monday(year):
week1monday += 7
return week1monday
-
+
class timezone(tzinfo):
__slots__ = '_offset', '_name'
@@ -2247,9 +2247,9 @@ class timezone(tzinfo):
return (self._offset, self._name)
def __eq__(self, other):
- if isinstance(other, timezone):
- return self._offset == other._offset
- return NotImplemented
+ if isinstance(other, timezone):
+ return self._offset == other._offset
+ return NotImplemented
def __hash__(self):
return hash(self._offset)
@@ -2306,7 +2306,7 @@ class timezone(tzinfo):
raise TypeError("fromutc() argument must be a datetime instance"
" or None")
- _maxoffset = timedelta(hours=24, microseconds=-1)
+ _maxoffset = timedelta(hours=24, microseconds=-1)
_minoffset = -_maxoffset
@staticmethod
@@ -2330,11 +2330,11 @@ class timezone(tzinfo):
return f'UTC{sign}{hours:02d}:{minutes:02d}'
timezone.utc = timezone._create(timedelta(0))
-# bpo-37642: These attributes are rounded to the nearest minute for backwards
-# compatibility, even though the constructor will accept a wider range of
-# values. This may change in the future.
-timezone.min = timezone._create(-timedelta(hours=23, minutes=59))
-timezone.max = timezone._create(timedelta(hours=23, minutes=59))
+# bpo-37642: These attributes are rounded to the nearest minute for backwards
+# compatibility, even though the constructor will accept a wider range of
+# values. This may change in the future.
+timezone.min = timezone._create(-timedelta(hours=23, minutes=59))
+timezone.max = timezone._create(timedelta(hours=23, minutes=59))
_EPOCH = datetime(1970, 1, 1, tzinfo=timezone.utc)
# Some time zone algebra. For a datetime x, let
@@ -2358,7 +2358,7 @@ _EPOCH = datetime(1970, 1, 1, tzinfo=timezone.utc)
# This is again a requirement for a sane tzinfo class.
#
# 4. (x+k).s = x.s
-# This follows from #2, and that datetime.timetz+timedelta preserves tzinfo.
+# This follows from #2, and that datetime.timetz+timedelta preserves tzinfo.
#
# 5. (x+k).n = x.n + k
# Again follows from how arithmetic is defined.
@@ -2547,7 +2547,7 @@ else:
_format_time, _format_offset, _is_leap, _isoweek1monday, _math,
_ord2ymd, _time, _time_class, _tzinfo_class, _wrap_strftime, _ymd2ord,
_divide_and_round, _parse_isoformat_date, _parse_isoformat_time,
- _parse_hh_mm_ss_ff, _IsoCalendarDate)
+ _parse_hh_mm_ss_ff, _IsoCalendarDate)
# XXX Since import * above excludes names that start with _,
# docstring does not get overwritten. In the future, it may be
# appropriate to maintain a single module level docstring and
diff --git a/contrib/tools/python3/src/Lib/dbm/__init__.py b/contrib/tools/python3/src/Lib/dbm/__init__.py
index f65da521af..fef8065a2b 100644
--- a/contrib/tools/python3/src/Lib/dbm/__init__.py
+++ b/contrib/tools/python3/src/Lib/dbm/__init__.py
@@ -82,8 +82,8 @@ def open(file, flag='r', mode=0o666):
# file doesn't exist and the new flag was used so use default type
mod = _defaultmod
else:
- raise error[0]("db file doesn't exist; "
- "use 'c' or 'n' flag to create a new db")
+ raise error[0]("db file doesn't exist; "
+ "use 'c' or 'n' flag to create a new db")
elif result == "":
# db type cannot be determined
raise error[0]("db type could not be determined")
diff --git a/contrib/tools/python3/src/Lib/dbm/dumb.py b/contrib/tools/python3/src/Lib/dbm/dumb.py
index 864ad371ec..82b29dd6be 100644
--- a/contrib/tools/python3/src/Lib/dbm/dumb.py
+++ b/contrib/tools/python3/src/Lib/dbm/dumb.py
@@ -82,7 +82,7 @@ class _Database(collections.abc.MutableMapping):
f = _io.open(self._datfile, 'r', encoding="Latin-1")
except OSError:
if flag not in ('c', 'n'):
- raise
+ raise
with _io.open(self._datfile, 'w', encoding="Latin-1") as f:
self._chmod(self._datfile)
else:
@@ -90,14 +90,14 @@ class _Database(collections.abc.MutableMapping):
# Read directory file into the in-memory index dict.
def _update(self, flag):
- self._modified = False
+ self._modified = False
self._index = {}
try:
f = _io.open(self._dirfile, 'r', encoding="Latin-1")
except OSError:
if flag not in ('c', 'n'):
- raise
- self._modified = True
+ raise
+ self._modified = True
else:
with f:
for line in f:
@@ -185,7 +185,7 @@ class _Database(collections.abc.MutableMapping):
def __setitem__(self, key, val):
if self._readonly:
- raise error('The database is opened for reading only')
+ raise error('The database is opened for reading only')
if isinstance(key, str):
key = key.encode('utf-8')
elif not isinstance(key, (bytes, bytearray)):
@@ -222,7 +222,7 @@ class _Database(collections.abc.MutableMapping):
def __delitem__(self, key):
if self._readonly:
- raise error('The database is opened for reading only')
+ raise error('The database is opened for reading only')
if isinstance(key, str):
key = key.encode('utf-8')
self._verify_open()
@@ -278,7 +278,7 @@ class _Database(collections.abc.MutableMapping):
__del__ = close
def _chmod(self, file):
- self._os.chmod(file, self._mode)
+ self._os.chmod(file, self._mode)
def __enter__(self):
return self
@@ -312,5 +312,5 @@ def open(file, flag='c', mode=0o666):
# Turn off any bits that are set in the umask
mode = mode & (~um)
if flag not in ('r', 'w', 'c', 'n'):
- raise ValueError("Flag must be one of 'r', 'w', 'c', or 'n'")
+ raise ValueError("Flag must be one of 'r', 'w', 'c', or 'n'")
return _Database(file, mode, flag=flag)
diff --git a/contrib/tools/python3/src/Lib/difflib.py b/contrib/tools/python3/src/Lib/difflib.py
index afd8a0c7c5..0e4afb59ce 100644
--- a/contrib/tools/python3/src/Lib/difflib.py
+++ b/contrib/tools/python3/src/Lib/difflib.py
@@ -32,7 +32,7 @@ __all__ = ['get_close_matches', 'ndiff', 'restore', 'SequenceMatcher',
from heapq import nlargest as _nlargest
from collections import namedtuple as _namedtuple
-from types import GenericAlias
+from types import GenericAlias
Match = _namedtuple('Match', 'a b size')
@@ -62,7 +62,7 @@ class SequenceMatcher:
notion, pairing up elements that appear uniquely in each sequence.
That, and the method here, appear to yield more intuitive difference
reports than does diff. This method appears to be the least vulnerable
- to syncing up on blocks of "junk lines", though (like blank lines in
+ to syncing up on blocks of "junk lines", though (like blank lines in
ordinary text files, or maybe "<P>" lines in HTML files). That may be
because this is the only method of the 3 that has a *concept* of
"junk" <wink>.
@@ -302,11 +302,11 @@ class SequenceMatcher:
for elt in popular: # ditto; as fast for 1% deletion
del b2j[elt]
- def find_longest_match(self, alo=0, ahi=None, blo=0, bhi=None):
+ def find_longest_match(self, alo=0, ahi=None, blo=0, bhi=None):
"""Find longest matching block in a[alo:ahi] and b[blo:bhi].
- By default it will find the longest match in the entirety of a and b.
-
+ By default it will find the longest match in the entirety of a and b.
+
If isjunk is not defined:
Return (i,j,k) such that a[i:i+k] is equal to b[j:j+k], where
@@ -361,10 +361,10 @@ class SequenceMatcher:
# the unique 'b's and then matching the first two 'a's.
a, b, b2j, isbjunk = self.a, self.b, self.b2j, self.bjunk.__contains__
- if ahi is None:
- ahi = len(a)
- if bhi is None:
- bhi = len(b)
+ if ahi is None:
+ ahi = len(a)
+ if bhi is None:
+ bhi = len(b)
besti, bestj, bestsize = alo, blo, 0
# find longest junk-free match
# during an iteration of the loop, j2len[j] = length of longest
@@ -660,9 +660,9 @@ class SequenceMatcher:
# shorter sequence
return _calculate_ratio(min(la, lb), la + lb)
- __class_getitem__ = classmethod(GenericAlias)
-
-
+ __class_getitem__ = classmethod(GenericAlias)
+
+
def get_close_matches(word, possibilities, n=3, cutoff=0.6):
"""Use SequenceMatcher to return list of the best "good enough" matches.
@@ -712,12 +712,12 @@ def get_close_matches(word, possibilities, n=3, cutoff=0.6):
return [x for score, x in result]
-def _keep_original_ws(s, tag_s):
- """Replace whitespace with the original whitespace characters in `s`"""
- return ''.join(
- c if tag_c == " " and c.isspace() else tag_c
- for c, tag_c in zip(s, tag_s)
- )
+def _keep_original_ws(s, tag_s):
+ """Replace whitespace with the original whitespace characters in `s`"""
+ return ''.join(
+ c if tag_c == " " and c.isspace() else tag_c
+ for c, tag_c in zip(s, tag_s)
+ )
@@ -998,7 +998,7 @@ class Differ:
def _qformat(self, aline, bline, atags, btags):
r"""
- Format "?" output and deal with tabs.
+ Format "?" output and deal with tabs.
Example:
@@ -1012,16 +1012,16 @@ class Differ:
'+ \tabcdefGhijkl\n'
'? \t ^ ^ ^\n'
"""
- atags = _keep_original_ws(aline, atags).rstrip()
- btags = _keep_original_ws(bline, btags).rstrip()
+ atags = _keep_original_ws(aline, atags).rstrip()
+ btags = _keep_original_ws(bline, btags).rstrip()
yield "- " + aline
if atags:
- yield f"? {atags}\n"
+ yield f"? {atags}\n"
yield "+ " + bline
if btags:
- yield f"? {btags}\n"
+ yield f"? {btags}\n"
# With respect to junk, an earlier version of ndiff simply refused to
# *start* a match with a junk element. The result was cases like this:
@@ -1044,7 +1044,7 @@ import re
def IS_LINE_JUNK(line, pat=re.compile(r"\s*(?:#\s*)?$").match):
r"""
- Return True for ignorable line: iff `line` is blank or contains a single '#'.
+ Return True for ignorable line: iff `line` is blank or contains a single '#'.
Examples:
@@ -1060,7 +1060,7 @@ def IS_LINE_JUNK(line, pat=re.compile(r"\s*(?:#\s*)?$").match):
def IS_CHARACTER_JUNK(ch, ws=" \t"):
r"""
- Return True for ignorable character: iff `ch` is a space or tab.
+ Return True for ignorable character: iff `ch` is a space or tab.
Examples:
diff --git a/contrib/tools/python3/src/Lib/dis.py b/contrib/tools/python3/src/Lib/dis.py
index e289e176c7..dbd0661464 100644
--- a/contrib/tools/python3/src/Lib/dis.py
+++ b/contrib/tools/python3/src/Lib/dis.py
@@ -17,16 +17,16 @@ _have_code = (types.MethodType, types.FunctionType, types.CodeType,
classmethod, staticmethod, type)
FORMAT_VALUE = opmap['FORMAT_VALUE']
-FORMAT_VALUE_CONVERTERS = (
- (None, ''),
- (str, 'str'),
- (repr, 'repr'),
- (ascii, 'ascii'),
-)
-MAKE_FUNCTION = opmap['MAKE_FUNCTION']
-MAKE_FUNCTION_FLAGS = ('defaults', 'kwdefaults', 'annotations', 'closure')
-
-
+FORMAT_VALUE_CONVERTERS = (
+ (None, ''),
+ (str, 'str'),
+ (repr, 'repr'),
+ (ascii, 'ascii'),
+)
+MAKE_FUNCTION = opmap['MAKE_FUNCTION']
+MAKE_FUNCTION_FLAGS = ('defaults', 'kwdefaults', 'annotations', 'closure')
+
+
def _try_compile(source, name):
"""Attempts to compile the given source, first as an expression and
then as a statement if the first approach fails.
@@ -157,7 +157,7 @@ def _format_code_info(co):
lines.append("Name: %s" % co.co_name)
lines.append("Filename: %s" % co.co_filename)
lines.append("Argument count: %s" % co.co_argcount)
- lines.append("Positional-only arguments: %s" % co.co_posonlyargcount)
+ lines.append("Positional-only arguments: %s" % co.co_posonlyargcount)
lines.append("Kw-only arguments: %s" % co.co_kwonlyargcount)
lines.append("Number of locals: %s" % co.co_nlocals)
lines.append("Stack size: %s" % co.co_stacksize)
@@ -349,15 +349,15 @@ def _get_instructions_bytes(code, varnames=None, names=None, constants=None,
elif op in hasfree:
argval, argrepr = _get_name_info(arg, cells)
elif op == FORMAT_VALUE:
- argval, argrepr = FORMAT_VALUE_CONVERTERS[arg & 0x3]
- argval = (argval, bool(arg & 0x4))
+ argval, argrepr = FORMAT_VALUE_CONVERTERS[arg & 0x3]
+ argval = (argval, bool(arg & 0x4))
if argval[1]:
if argrepr:
argrepr += ', '
argrepr += 'with format'
- elif op == MAKE_FUNCTION:
- argrepr = ', '.join(s for i, s in enumerate(MAKE_FUNCTION_FLAGS)
- if arg & (1<<i))
+ elif op == MAKE_FUNCTION:
+ argrepr = ', '.join(s for i, s in enumerate(MAKE_FUNCTION_FLAGS)
+ if arg & (1<<i))
yield Instruction(opname[op], op,
arg, argval, argrepr,
offset, starts_line, is_jump_target)
@@ -454,7 +454,7 @@ def findlinestarts(code):
"""
byte_increments = code.co_lnotab[0::2]
line_increments = code.co_lnotab[1::2]
- bytecode_len = len(code.co_code)
+ bytecode_len = len(code.co_code)
lastlineno = None
lineno = code.co_firstlineno
@@ -465,10 +465,10 @@ def findlinestarts(code):
yield (addr, lineno)
lastlineno = lineno
addr += byte_incr
- if addr >= bytecode_len:
- # The rest of the lnotab byte offsets are past the end of
- # the bytecode, so the lines were optimized away.
- return
+ if addr >= bytecode_len:
+ # The rest of the lnotab byte offsets are past the end of
+ # the bytecode, so the lines were optimized away.
+ return
if line_incr >= 0x80:
# line_increments is an array of 8-bit signed integers
line_incr -= 0x100
@@ -542,7 +542,7 @@ def _test():
import argparse
parser = argparse.ArgumentParser()
- parser.add_argument('infile', type=argparse.FileType('rb'), nargs='?', default='-')
+ parser.add_argument('infile', type=argparse.FileType('rb'), nargs='?', default='-')
args = parser.parse_args()
with args.infile as infile:
source = infile.read()
diff --git a/contrib/tools/python3/src/Lib/distutils/_msvccompiler.py b/contrib/tools/python3/src/Lib/distutils/_msvccompiler.py
index af8099a407..24bc487321 100644
--- a/contrib/tools/python3/src/Lib/distutils/_msvccompiler.py
+++ b/contrib/tools/python3/src/Lib/distutils/_msvccompiler.py
@@ -85,15 +85,15 @@ def _find_vc2017():
return None, None
-PLAT_SPEC_TO_RUNTIME = {
- 'x86' : 'x86',
- 'x86_amd64' : 'x64',
- 'x86_arm' : 'arm',
- 'x86_arm64' : 'arm64'
-}
-
+PLAT_SPEC_TO_RUNTIME = {
+ 'x86' : 'x86',
+ 'x86_amd64' : 'x64',
+ 'x86_arm' : 'arm',
+ 'x86_arm64' : 'arm64'
+}
+
def _find_vcvarsall(plat_spec):
- # bpo-38597: Removed vcruntime return value
+ # bpo-38597: Removed vcruntime return value
_, best_dir = _find_vc2017()
if not best_dir:
@@ -108,7 +108,7 @@ def _find_vcvarsall(plat_spec):
log.debug("%s cannot be found", vcvarsall)
return None, None
- return vcvarsall, None
+ return vcvarsall, None
def _get_vc_env(plat_spec):
if os.getenv("DISTUTILS_USE_SDK"):
@@ -117,7 +117,7 @@ def _get_vc_env(plat_spec):
for key, value in os.environ.items()
}
- vcvarsall, _ = _find_vcvarsall(plat_spec)
+ vcvarsall, _ = _find_vcvarsall(plat_spec)
if not vcvarsall:
raise DistutilsPlatformError("Unable to find vcvarsall.bat")
@@ -163,8 +163,8 @@ def _find_exe(exe, paths=None):
PLAT_TO_VCVARS = {
'win32' : 'x86',
'win-amd64' : 'x86_amd64',
- 'win-arm32' : 'x86_arm',
- 'win-arm64' : 'x86_arm64'
+ 'win-arm32' : 'x86_arm',
+ 'win-arm64' : 'x86_arm64'
}
class MSVCCompiler(CCompiler) :
@@ -240,11 +240,11 @@ class MSVCCompiler(CCompiler) :
self.add_library_dir(dir.rstrip(os.sep))
self.preprocess_options = None
- # bpo-38597: Always compile with dynamic linking
- # Future releases of Python 3.x will include all past
- # versions of vcruntime*.dll for compatibility.
+ # bpo-38597: Always compile with dynamic linking
+ # Future releases of Python 3.x will include all past
+ # versions of vcruntime*.dll for compatibility.
self.compile_options = [
- '/nologo', '/Ox', '/W3', '/GL', '/DNDEBUG', '/MD'
+ '/nologo', '/Ox', '/W3', '/GL', '/DNDEBUG', '/MD'
]
self.compile_options_debug = [
diff --git a/contrib/tools/python3/src/Lib/distutils/archive_util.py b/contrib/tools/python3/src/Lib/distutils/archive_util.py
index 565a3117b4..3c711a085d 100644
--- a/contrib/tools/python3/src/Lib/distutils/archive_util.py
+++ b/contrib/tools/python3/src/Lib/distutils/archive_util.py
@@ -166,21 +166,21 @@ def make_zipfile(base_name, base_dir, verbose=0, dry_run=0):
zip = zipfile.ZipFile(zip_filename, "w",
compression=zipfile.ZIP_STORED)
- with zip:
- if base_dir != os.curdir:
- path = os.path.normpath(os.path.join(base_dir, ''))
+ with zip:
+ if base_dir != os.curdir:
+ path = os.path.normpath(os.path.join(base_dir, ''))
zip.write(path, path)
log.info("adding '%s'", path)
- for dirpath, dirnames, filenames in os.walk(base_dir):
- for name in dirnames:
- path = os.path.normpath(os.path.join(dirpath, name, ''))
+ for dirpath, dirnames, filenames in os.walk(base_dir):
+ for name in dirnames:
+ path = os.path.normpath(os.path.join(dirpath, name, ''))
zip.write(path, path)
log.info("adding '%s'", path)
- for name in filenames:
- path = os.path.normpath(os.path.join(dirpath, name))
- if os.path.isfile(path):
- zip.write(path, path)
- log.info("adding '%s'", path)
+ for name in filenames:
+ path = os.path.normpath(os.path.join(dirpath, name))
+ if os.path.isfile(path):
+ zip.write(path, path)
+ log.info("adding '%s'", path)
return zip_filename
diff --git a/contrib/tools/python3/src/Lib/distutils/bcppcompiler.py b/contrib/tools/python3/src/Lib/distutils/bcppcompiler.py
index 071fea5d03..d0603271ff 100644
--- a/contrib/tools/python3/src/Lib/distutils/bcppcompiler.py
+++ b/contrib/tools/python3/src/Lib/distutils/bcppcompiler.py
@@ -14,10 +14,10 @@ for the Borland C++ compiler.
import os
from distutils.errors import \
- DistutilsExecError, \
+ DistutilsExecError, \
CompileError, LibError, LinkError, UnknownFileError
from distutils.ccompiler import \
- CCompiler, gen_preprocess_options
+ CCompiler, gen_preprocess_options
from distutils.file_util import write_file
from distutils.dep_util import newer
from distutils import log
diff --git a/contrib/tools/python3/src/Lib/distutils/ccompiler.py b/contrib/tools/python3/src/Lib/distutils/ccompiler.py
index 4c47f2ed24..3c8c35f0a8 100644
--- a/contrib/tools/python3/src/Lib/distutils/ccompiler.py
+++ b/contrib/tools/python3/src/Lib/distutils/ccompiler.py
@@ -8,7 +8,7 @@ from distutils.errors import *
from distutils.spawn import spawn
from distutils.file_util import move_file
from distutils.dir_util import mkpath
-from distutils.dep_util import newer_group
+from distutils.dep_util import newer_group
from distutils.util import split_quoted, execute
from distutils import log
@@ -392,7 +392,7 @@ class CCompiler:
return output_dir, macros, include_dirs
def _prep_compile(self, sources, output_dir, depends=None):
- """Decide which source files must be recompiled.
+ """Decide which source files must be recompiled.
Determine the list of object files corresponding to 'sources',
and figure out which ones really need to be recompiled.
@@ -545,7 +545,7 @@ class CCompiler:
'extra_preargs' and 'extra_postargs' are implementation- dependent.
On platforms that have the notion of a command-line (e.g. Unix,
DOS/Windows), they are most likely lists of strings: extra
- command-line arguments to prepend/append to the compiler command
+ command-line arguments to prepend/append to the compiler command
line. On other platforms, consult the implementation class
documentation. In any event, they are intended as an escape hatch
for those occasions when the abstract compiler framework doesn't
@@ -781,9 +781,9 @@ class CCompiler:
for incl in includes:
f.write("""#include "%s"\n""" % incl)
f.write("""\
-int main (int argc, char **argv) {
+int main (int argc, char **argv) {
%s();
- return 0;
+ return 0;
}
""" % funcname)
finally:
diff --git a/contrib/tools/python3/src/Lib/distutils/command/bdist_msi.py b/contrib/tools/python3/src/Lib/distutils/command/bdist_msi.py
index 0863a1883e..8ab133e18e 100644
--- a/contrib/tools/python3/src/Lib/distutils/command/bdist_msi.py
+++ b/contrib/tools/python3/src/Lib/distutils/command/bdist_msi.py
@@ -6,9 +6,9 @@
Implements the bdist_msi command.
"""
-import os
-import sys
-import warnings
+import os
+import sys
+import warnings
from distutils.core import Command
from distutils.dir_util import remove_tree
from distutils.sysconfig import get_python_version
@@ -124,12 +124,12 @@ class bdist_msi(Command):
'3.5', '3.6', '3.7', '3.8', '3.9']
other_version = 'X'
- def __init__(self, *args, **kw):
- super().__init__(*args, **kw)
- warnings.warn("bdist_msi command is deprecated since Python 3.9, "
- "use bdist_wheel (wheel packages) instead",
- DeprecationWarning, 2)
-
+ def __init__(self, *args, **kw):
+ super().__init__(*args, **kw)
+ warnings.warn("bdist_msi command is deprecated since Python 3.9, "
+ "use bdist_wheel (wheel packages) instead",
+ DeprecationWarning, 2)
+
def initialize_options(self):
self.bdist_dir = None
self.plat_name = None
@@ -398,18 +398,18 @@ class bdist_msi(Command):
# entries for each version as the above code does
if self.pre_install_script:
scriptfn = os.path.join(self.bdist_dir, "preinstall.bat")
- with open(scriptfn, "w") as f:
- # The batch file will be executed with [PYTHON], so that %1
- # is the path to the Python interpreter; %0 will be the path
- # of the batch file.
- # rem ="""
- # %1 %0
- # exit
- # """
- # <actual script>
- f.write('rem ="""\n%1 %0\nexit\n"""\n')
- with open(self.pre_install_script) as fin:
- f.write(fin.read())
+ with open(scriptfn, "w") as f:
+ # The batch file will be executed with [PYTHON], so that %1
+ # is the path to the Python interpreter; %0 will be the path
+ # of the batch file.
+ # rem ="""
+ # %1 %0
+ # exit
+ # """
+ # <actual script>
+ f.write('rem ="""\n%1 %0\nexit\n"""\n')
+ with open(self.pre_install_script) as fin:
+ f.write(fin.read())
add_data(self.db, "Binary",
[("PreInstall", msilib.Binary(scriptfn))
])
diff --git a/contrib/tools/python3/src/Lib/distutils/command/bdist_rpm.py b/contrib/tools/python3/src/Lib/distutils/command/bdist_rpm.py
index 550cbfa1e2..da98ce6a47 100644
--- a/contrib/tools/python3/src/Lib/distutils/command/bdist_rpm.py
+++ b/contrib/tools/python3/src/Lib/distutils/command/bdist_rpm.py
@@ -308,7 +308,7 @@ class bdist_rpm(Command):
# build package
log.info("building RPMs")
- rpm_cmd = ['rpmbuild']
+ rpm_cmd = ['rpmbuild']
if self.source_only: # what kind of RPMs?
rpm_cmd.append('-bs')
@@ -533,8 +533,8 @@ class bdist_rpm(Command):
'',
'%' + rpm_opt,])
if val:
- with open(val) as f:
- spec_file.extend(f.read().split('\n'))
+ with open(val) as f:
+ spec_file.extend(f.read().split('\n'))
else:
spec_file.append(default)
diff --git a/contrib/tools/python3/src/Lib/distutils/command/bdist_wininst.py b/contrib/tools/python3/src/Lib/distutils/command/bdist_wininst.py
index 0e9ddaa214..86602327cf 100644
--- a/contrib/tools/python3/src/Lib/distutils/command/bdist_wininst.py
+++ b/contrib/tools/python3/src/Lib/distutils/command/bdist_wininst.py
@@ -3,12 +3,12 @@
Implements the Distutils 'bdist_wininst' command: create a windows installer
exe-program."""
-import os
-import sys
-import warnings
+import os
+import sys
+import warnings
from distutils.core import Command
from distutils.util import get_platform
-from distutils.dir_util import remove_tree
+from distutils.dir_util import remove_tree
from distutils.errors import *
from distutils.sysconfig import get_python_version
from distutils import log
@@ -57,15 +57,15 @@ class bdist_wininst(Command):
boolean_options = ['keep-temp', 'no-target-compile', 'no-target-optimize',
'skip-build']
- # bpo-10945: bdist_wininst requires mbcs encoding only available on Windows
- _unsupported = (sys.platform != "win32")
-
- def __init__(self, *args, **kw):
- super().__init__(*args, **kw)
- warnings.warn("bdist_wininst command is deprecated since Python 3.8, "
- "use bdist_wheel (wheel packages) instead",
- DeprecationWarning, 2)
-
+ # bpo-10945: bdist_wininst requires mbcs encoding only available on Windows
+ _unsupported = (sys.platform != "win32")
+
+ def __init__(self, *args, **kw):
+ super().__init__(*args, **kw)
+ warnings.warn("bdist_wininst command is deprecated since Python 3.8, "
+ "use bdist_wheel (wheel packages) instead",
+ DeprecationWarning, 2)
+
def initialize_options(self):
self.bdist_dir = None
self.plat_name = None
@@ -258,49 +258,49 @@ class bdist_wininst(Command):
self.announce("creating %s" % installer_name)
if bitmap:
- with open(bitmap, "rb") as f:
- bitmapdata = f.read()
+ with open(bitmap, "rb") as f:
+ bitmapdata = f.read()
bitmaplen = len(bitmapdata)
else:
bitmaplen = 0
- with open(installer_name, "wb") as file:
- file.write(self.get_exe_bytes())
- if bitmap:
- file.write(bitmapdata)
+ with open(installer_name, "wb") as file:
+ file.write(self.get_exe_bytes())
+ if bitmap:
+ file.write(bitmapdata)
- # Convert cfgdata from unicode to ascii, mbcs encoded
- if isinstance(cfgdata, str):
- cfgdata = cfgdata.encode("mbcs")
+ # Convert cfgdata from unicode to ascii, mbcs encoded
+ if isinstance(cfgdata, str):
+ cfgdata = cfgdata.encode("mbcs")
- # Append the pre-install script
+ # Append the pre-install script
cfgdata = cfgdata + b"\0"
- if self.pre_install_script:
- # We need to normalize newlines, so we open in text mode and
- # convert back to bytes. "latin-1" simply avoids any possible
- # failures.
- with open(self.pre_install_script, "r",
- encoding="latin-1") as script:
- script_data = script.read().encode("latin-1")
- cfgdata = cfgdata + script_data + b"\n\0"
- else:
- # empty pre-install script
- cfgdata = cfgdata + b"\0"
- file.write(cfgdata)
-
- # The 'magic number' 0x1234567B is used to make sure that the
- # binary layout of 'cfgdata' is what the wininst.exe binary
- # expects. If the layout changes, increment that number, make
- # the corresponding changes to the wininst.exe sources, and
- # recompile them.
- header = struct.pack("<iii",
- 0x1234567B, # tag
- len(cfgdata), # length
- bitmaplen, # number of bytes in bitmap
- )
- file.write(header)
- with open(arcname, "rb") as f:
- file.write(f.read())
+ if self.pre_install_script:
+ # We need to normalize newlines, so we open in text mode and
+ # convert back to bytes. "latin-1" simply avoids any possible
+ # failures.
+ with open(self.pre_install_script, "r",
+ encoding="latin-1") as script:
+ script_data = script.read().encode("latin-1")
+ cfgdata = cfgdata + script_data + b"\n\0"
+ else:
+ # empty pre-install script
+ cfgdata = cfgdata + b"\0"
+ file.write(cfgdata)
+
+ # The 'magic number' 0x1234567B is used to make sure that the
+ # binary layout of 'cfgdata' is what the wininst.exe binary
+ # expects. If the layout changes, increment that number, make
+ # the corresponding changes to the wininst.exe sources, and
+ # recompile them.
+ header = struct.pack("<iii",
+ 0x1234567B, # tag
+ len(cfgdata), # length
+ bitmaplen, # number of bytes in bitmap
+ )
+ file.write(header)
+ with open(arcname, "rb") as f:
+ file.write(f.read())
def get_installer_filename(self, fullname):
# Factored out to allow overriding in subclasses
diff --git a/contrib/tools/python3/src/Lib/distutils/command/build.py b/contrib/tools/python3/src/Lib/distutils/command/build.py
index a86df0bc7f..fcdb21192e 100644
--- a/contrib/tools/python3/src/Lib/distutils/command/build.py
+++ b/contrib/tools/python3/src/Lib/distutils/command/build.py
@@ -116,7 +116,7 @@ class build(Command):
self.build_scripts = os.path.join(self.build_base,
'scripts-%d.%d' % sys.version_info[:2])
- if self.executable is None and sys.executable:
+ if self.executable is None and sys.executable:
self.executable = os.path.normpath(sys.executable)
if isinstance(self.parallel, str):
diff --git a/contrib/tools/python3/src/Lib/distutils/command/build_ext.py b/contrib/tools/python3/src/Lib/distutils/command/build_ext.py
index 1a9bd1200f..0785ec2bcb 100644
--- a/contrib/tools/python3/src/Lib/distutils/command/build_ext.py
+++ b/contrib/tools/python3/src/Lib/distutils/command/build_ext.py
@@ -490,8 +490,8 @@ class build_ext(Command):
"in 'ext_modules' option (extension '%s'), "
"'sources' must be present and must be "
"a list of source filenames" % ext.name)
- # sort to make the resulting .so file build reproducible
- sources = sorted(sources)
+ # sort to make the resulting .so file build reproducible
+ sources = sorted(sources)
ext_path = self.get_ext_fullpath(ext.name)
depends = sources + ext.depends
@@ -689,15 +689,15 @@ class build_ext(Command):
provided, "PyInit_" + module_name. Only relevant on Windows, where
the .pyd file (DLL) must export the module "PyInit_" function.
"""
- suffix = '_' + ext.name.split('.')[-1]
- try:
- # Unicode module name support as defined in PEP-489
- # https://www.python.org/dev/peps/pep-0489/#export-hook-name
- suffix.encode('ascii')
- except UnicodeEncodeError:
- suffix = 'U' + suffix.encode('punycode').replace(b'-', b'_').decode('ascii')
-
- initfunc_name = "PyInit" + suffix
+ suffix = '_' + ext.name.split('.')[-1]
+ try:
+ # Unicode module name support as defined in PEP-489
+ # https://www.python.org/dev/peps/pep-0489/#export-hook-name
+ suffix.encode('ascii')
+ except UnicodeEncodeError:
+ suffix = 'U' + suffix.encode('punycode').replace(b'-', b'_').decode('ascii')
+
+ initfunc_name = "PyInit" + suffix
if initfunc_name not in ext.export_symbols:
ext.export_symbols.append(initfunc_name)
return ext.export_symbols
@@ -724,31 +724,31 @@ class build_ext(Command):
# extensions, it is a reference to the original list
return ext.libraries + [pythonlib]
else:
- # On Android only the main executable and LD_PRELOADs are considered
- # to be RTLD_GLOBAL, all the dependencies of the main executable
- # remain RTLD_LOCAL and so the shared libraries must be linked with
- # libpython when python is built with a shared python library (issue
- # bpo-21536).
- # On Cygwin (and if required, other POSIX-like platforms based on
- # Windows like MinGW) it is simply necessary that all symbols in
- # shared libraries are resolved at link time.
- from distutils.sysconfig import get_config_var
- link_libpython = False
- if get_config_var('Py_ENABLE_SHARED'):
- # A native build on an Android device or on Cygwin
- if hasattr(sys, 'getandroidapilevel'):
- link_libpython = True
- elif sys.platform == 'cygwin':
- link_libpython = True
- elif '_PYTHON_HOST_PLATFORM' in os.environ:
- # We are cross-compiling for one of the relevant platforms
- if get_config_var('ANDROID_API_LEVEL') != 0:
- link_libpython = True
- elif get_config_var('MACHDEP') == 'cygwin':
- link_libpython = True
-
- if link_libpython:
- ldversion = get_config_var('LDVERSION')
- return ext.libraries + ['python' + ldversion]
-
- return ext.libraries
+ # On Android only the main executable and LD_PRELOADs are considered
+ # to be RTLD_GLOBAL, all the dependencies of the main executable
+ # remain RTLD_LOCAL and so the shared libraries must be linked with
+ # libpython when python is built with a shared python library (issue
+ # bpo-21536).
+ # On Cygwin (and if required, other POSIX-like platforms based on
+ # Windows like MinGW) it is simply necessary that all symbols in
+ # shared libraries are resolved at link time.
+ from distutils.sysconfig import get_config_var
+ link_libpython = False
+ if get_config_var('Py_ENABLE_SHARED'):
+ # A native build on an Android device or on Cygwin
+ if hasattr(sys, 'getandroidapilevel'):
+ link_libpython = True
+ elif sys.platform == 'cygwin':
+ link_libpython = True
+ elif '_PYTHON_HOST_PLATFORM' in os.environ:
+ # We are cross-compiling for one of the relevant platforms
+ if get_config_var('ANDROID_API_LEVEL') != 0:
+ link_libpython = True
+ elif get_config_var('MACHDEP') == 'cygwin':
+ link_libpython = True
+
+ if link_libpython:
+ ldversion = get_config_var('LDVERSION')
+ return ext.libraries + ['python' + ldversion]
+
+ return ext.libraries
diff --git a/contrib/tools/python3/src/Lib/distutils/command/build_py.py b/contrib/tools/python3/src/Lib/distutils/command/build_py.py
index edc2171cd1..6a830d9e41 100644
--- a/contrib/tools/python3/src/Lib/distutils/command/build_py.py
+++ b/contrib/tools/python3/src/Lib/distutils/command/build_py.py
@@ -5,7 +5,7 @@ Implements the Distutils 'build_py' command."""
import os
import importlib.util
import sys
-import glob
+import glob
from distutils.core import Command
from distutils.errors import *
@@ -125,7 +125,7 @@ class build_py (Command):
files = []
for pattern in globs:
# Each pattern has to be converted to a platform-specific path
- filelist = glob.glob(os.path.join(glob.escape(src_dir), convert_path(pattern)))
+ filelist = glob.glob(os.path.join(glob.escape(src_dir), convert_path(pattern)))
# Files that match more than one pattern are only added once
files.extend([fn for fn in filelist if fn not in files
and os.path.isfile(fn)])
@@ -216,7 +216,7 @@ class build_py (Command):
def find_package_modules(self, package, package_dir):
self.check_package(package, package_dir)
- module_files = glob.glob(os.path.join(glob.escape(package_dir), "*.py"))
+ module_files = glob.glob(os.path.join(glob.escape(package_dir), "*.py"))
modules = []
setup_script = os.path.abspath(self.distribution.script_name)
diff --git a/contrib/tools/python3/src/Lib/distutils/command/check.py b/contrib/tools/python3/src/Lib/distutils/command/check.py
index 73a30f3afd..ee734cb1bc 100644
--- a/contrib/tools/python3/src/Lib/distutils/command/check.py
+++ b/contrib/tools/python3/src/Lib/distutils/command/check.py
@@ -79,12 +79,12 @@ class check(Command):
def check_metadata(self):
"""Ensures that all required elements of meta-data are supplied.
- Required fields:
- name, version, URL
-
- Recommended fields:
- (author and author_email) or (maintainer and maintainer_email)
+ Required fields:
+ name, version, URL
+ Recommended fields:
+ (author and author_email) or (maintainer and maintainer_email)
+
Warns if any are missing.
"""
metadata = self.distribution.metadata
@@ -99,15 +99,15 @@ class check(Command):
if metadata.author:
if not metadata.author_email:
self.warn("missing meta-data: if 'author' supplied, " +
- "'author_email' should be supplied too")
+ "'author_email' should be supplied too")
elif metadata.maintainer:
if not metadata.maintainer_email:
self.warn("missing meta-data: if 'maintainer' supplied, " +
- "'maintainer_email' should be supplied too")
+ "'maintainer_email' should be supplied too")
else:
self.warn("missing meta-data: either (author and author_email) " +
"or (maintainer and maintainer_email) " +
- "should be supplied")
+ "should be supplied")
def check_restructuredtext(self):
"""Checks if the long string fields are reST-compliant."""
@@ -122,8 +122,8 @@ class check(Command):
def _check_rst_data(self, data):
"""Returns warnings when the provided data doesn't compile."""
- # the include and csv_table directives need this to be a path
- source_path = self.distribution.script_name or 'setup.py'
+ # the include and csv_table directives need this to be a path
+ source_path = self.distribution.script_name or 'setup.py'
parser = Parser()
settings = frontend.OptionParser(components=(Parser,)).get_default_values()
settings.tab_width = 4
diff --git a/contrib/tools/python3/src/Lib/distutils/command/config.py b/contrib/tools/python3/src/Lib/distutils/command/config.py
index aeda408e73..316c2886c6 100644
--- a/contrib/tools/python3/src/Lib/distutils/command/config.py
+++ b/contrib/tools/python3/src/Lib/distutils/command/config.py
@@ -106,14 +106,14 @@ class config(Command):
def _gen_temp_sourcefile(self, body, headers, lang):
filename = "_configtest" + LANG_EXT[lang]
- with open(filename, "w") as file:
- if headers:
- for header in headers:
- file.write("#include <%s>\n" % header)
- file.write("\n")
- file.write(body)
- if body[-1] != "\n":
- file.write("\n")
+ with open(filename, "w") as file:
+ if headers:
+ for header in headers:
+ file.write("#include <%s>\n" % header)
+ file.write("\n")
+ file.write(body)
+ if body[-1] != "\n":
+ file.write("\n")
return filename
def _preprocess(self, body, headers, include_dirs, lang):
@@ -202,15 +202,15 @@ class config(Command):
if isinstance(pattern, str):
pattern = re.compile(pattern)
- with open(out) as file:
- match = False
- while True:
- line = file.readline()
- if line == '':
- break
- if pattern.search(line):
- match = True
- break
+ with open(out) as file:
+ match = False
+ while True:
+ line = file.readline()
+ if line == '':
+ break
+ if pattern.search(line):
+ match = True
+ break
self._clean()
return match
diff --git a/contrib/tools/python3/src/Lib/distutils/command/install.py b/contrib/tools/python3/src/Lib/distutils/command/install.py
index aaa300efa9..3f38e34363 100644
--- a/contrib/tools/python3/src/Lib/distutils/command/install.py
+++ b/contrib/tools/python3/src/Lib/distutils/command/install.py
@@ -30,14 +30,14 @@ WINDOWS_SCHEME = {
INSTALL_SCHEMES = {
'unix_prefix': {
'purelib': '$base/lib/python$py_version_short/site-packages',
- 'platlib': '$platbase/$platlibdir/python$py_version_short/site-packages',
+ 'platlib': '$platbase/$platlibdir/python$py_version_short/site-packages',
'headers': '$base/include/python$py_version_short$abiflags/$dist_name',
'scripts': '$base/bin',
'data' : '$base',
},
'unix_home': {
'purelib': '$base/lib/python',
- 'platlib': '$base/$platlibdir/python',
+ 'platlib': '$base/$platlibdir/python',
'headers': '$base/include/python/$dist_name',
'scripts': '$base/bin',
'data' : '$base',
@@ -223,7 +223,7 @@ class install(Command):
def finalize_options(self):
"""Finalizes options."""
- # This method (and its helpers, like 'finalize_unix()',
+ # This method (and its helpers, like 'finalize_unix()',
# 'finalize_other()', and 'select_scheme()') is where the default
# installation directories for modules, extension modules, and
# anything else we care to install from a Python module
@@ -298,7 +298,7 @@ class install(Command):
'sys_exec_prefix': exec_prefix,
'exec_prefix': exec_prefix,
'abiflags': abiflags,
- 'platlibdir': sys.platlibdir,
+ 'platlibdir': sys.platlibdir,
}
if HAS_USER_SITE:
diff --git a/contrib/tools/python3/src/Lib/distutils/command/sdist.py b/contrib/tools/python3/src/Lib/distutils/command/sdist.py
index b4996fcb1d..eae03d8cce 100644
--- a/contrib/tools/python3/src/Lib/distutils/command/sdist.py
+++ b/contrib/tools/python3/src/Lib/distutils/command/sdist.py
@@ -407,13 +407,13 @@ class sdist(Command):
distribution.
"""
log.info("reading manifest file '%s'", self.manifest)
- with open(self.manifest) as manifest:
- for line in manifest:
- # ignore comments and blank lines
- line = line.strip()
- if line.startswith('#') or not line:
- continue
- self.filelist.append(line)
+ with open(self.manifest) as manifest:
+ for line in manifest:
+ # ignore comments and blank lines
+ line = line.strip()
+ if line.startswith('#') or not line:
+ continue
+ self.filelist.append(line)
def make_release_tree(self, base_dir, files):
"""Create the directory tree that will become the source
diff --git a/contrib/tools/python3/src/Lib/distutils/command/upload.py b/contrib/tools/python3/src/Lib/distutils/command/upload.py
index e0ecb655b9..d23a29c8ab 100644
--- a/contrib/tools/python3/src/Lib/distutils/command/upload.py
+++ b/contrib/tools/python3/src/Lib/distutils/command/upload.py
@@ -9,24 +9,24 @@ import os
import io
import hashlib
from base64 import standard_b64encode
-from urllib.error import HTTPError
-from urllib.request import urlopen, Request
+from urllib.error import HTTPError
+from urllib.request import urlopen, Request
from urllib.parse import urlparse
from distutils.errors import DistutilsError, DistutilsOptionError
from distutils.core import PyPIRCCommand
from distutils.spawn import spawn
from distutils import log
-
-# PyPI Warehouse supports MD5, SHA256, and Blake2 (blake2-256)
-# https://bugs.python.org/issue40698
-_FILE_CONTENT_DIGESTS = {
- "md5_digest": getattr(hashlib, "md5", None),
- "sha256_digest": getattr(hashlib, "sha256", None),
- "blake2_256_digest": getattr(hashlib, "blake2b", None),
-}
-
-
+
+# PyPI Warehouse supports MD5, SHA256, and Blake2 (blake2-256)
+# https://bugs.python.org/issue40698
+_FILE_CONTENT_DIGESTS = {
+ "md5_digest": getattr(hashlib, "md5", None),
+ "sha256_digest": getattr(hashlib, "sha256", None),
+ "blake2_256_digest": getattr(hashlib, "blake2b", None),
+}
+
+
class upload(PyPIRCCommand):
description = "upload binary package to PyPI"
@@ -98,7 +98,7 @@ class upload(PyPIRCCommand):
content = f.read()
finally:
f.close()
-
+
meta = self.distribution.metadata
data = {
# action
@@ -132,22 +132,22 @@ class upload(PyPIRCCommand):
'obsoletes': meta.get_obsoletes(),
}
- data['comment'] = ''
-
- # file content digests
- for digest_name, digest_cons in _FILE_CONTENT_DIGESTS.items():
- if digest_cons is None:
- continue
- try:
- data[digest_name] = digest_cons(content).hexdigest()
- except ValueError:
- # hash digest not available or blocked by security policy
- pass
-
+ data['comment'] = ''
+
+ # file content digests
+ for digest_name, digest_cons in _FILE_CONTENT_DIGESTS.items():
+ if digest_cons is None:
+ continue
+ try:
+ data[digest_name] = digest_cons(content).hexdigest()
+ except ValueError:
+ # hash digest not available or blocked by security policy
+ pass
+
if self.sign:
- with open(filename + ".asc", "rb") as f:
- data['gpg_signature'] = (os.path.basename(filename) + ".asc",
- f.read())
+ with open(filename + ".asc", "rb") as f:
+ data['gpg_signature'] = (os.path.basename(filename) + ".asc",
+ f.read())
# set up the authentication
user_pass = (self.username + ":" + self.password).encode('ascii')
diff --git a/contrib/tools/python3/src/Lib/distutils/msvc9compiler.py b/contrib/tools/python3/src/Lib/distutils/msvc9compiler.py
index a7976fbe3e..b4f36f44ad 100644
--- a/contrib/tools/python3/src/Lib/distutils/msvc9compiler.py
+++ b/contrib/tools/python3/src/Lib/distutils/msvc9compiler.py
@@ -19,7 +19,7 @@ import re
from distutils.errors import DistutilsExecError, DistutilsPlatformError, \
CompileError, LibError, LinkError
-from distutils.ccompiler import CCompiler, gen_lib_options
+from distutils.ccompiler import CCompiler, gen_lib_options
from distutils import log
from distutils.util import get_platform
@@ -673,7 +673,7 @@ class MSVCCompiler(CCompiler) :
# If a manifest should be embedded, return a tuple of
# (manifest_filename, resource_id). Returns None if no manifest
# should be embedded. See http://bugs.python.org/issue7833 for why
- # we want to avoid any manifest for extension modules if we can.
+ # we want to avoid any manifest for extension modules if we can.
for arg in ld_args:
if arg.startswith("/MANIFESTFILE:"):
temp_manifest = arg.split(":", 1)[1]
diff --git a/contrib/tools/python3/src/Lib/distutils/msvccompiler.py b/contrib/tools/python3/src/Lib/distutils/msvccompiler.py
index d5857cb1ff..cc2c16ae1b 100644
--- a/contrib/tools/python3/src/Lib/distutils/msvccompiler.py
+++ b/contrib/tools/python3/src/Lib/distutils/msvccompiler.py
@@ -13,7 +13,7 @@ from distutils.errors import \
DistutilsExecError, DistutilsPlatformError, \
CompileError, LibError, LinkError
from distutils.ccompiler import \
- CCompiler, gen_lib_options
+ CCompiler, gen_lib_options
from distutils import log
_can_read_reg = False
diff --git a/contrib/tools/python3/src/Lib/distutils/spawn.py b/contrib/tools/python3/src/Lib/distutils/spawn.py
index 31df3f7fac..5ea8f7801d 100644
--- a/contrib/tools/python3/src/Lib/distutils/spawn.py
+++ b/contrib/tools/python3/src/Lib/distutils/spawn.py
@@ -8,18 +8,18 @@ executable name.
import sys
import os
-import subprocess
+import subprocess
from distutils.errors import DistutilsPlatformError, DistutilsExecError
from distutils.debug import DEBUG
from distutils import log
-
-if sys.platform == 'darwin':
- _cfg_target = None
- _cfg_target_split = None
-
-
+
+if sys.platform == 'darwin':
+ _cfg_target = None
+ _cfg_target_split = None
+
+
def spawn(cmd, search_path=1, verbose=0, dry_run=0):
"""Run another program, specified as a command list 'cmd', in a new process.
@@ -40,90 +40,90 @@ def spawn(cmd, search_path=1, verbose=0, dry_run=0):
# in, protect our %-formatting code against horrible death
cmd = list(cmd)
- log.info(' '.join(cmd))
- if dry_run:
- return
+ log.info(' '.join(cmd))
+ if dry_run:
+ return
if search_path:
- executable = find_executable(cmd[0])
- if executable is not None:
- cmd[0] = executable
+ executable = find_executable(cmd[0])
+ if executable is not None:
+ cmd[0] = executable
env = None
if sys.platform == 'darwin':
global _cfg_target, _cfg_target_split
if _cfg_target is None:
- from distutils import sysconfig
- _cfg_target = sysconfig.get_config_var(
- 'MACOSX_DEPLOYMENT_TARGET') or ''
+ from distutils import sysconfig
+ _cfg_target = sysconfig.get_config_var(
+ 'MACOSX_DEPLOYMENT_TARGET') or ''
if _cfg_target:
_cfg_target_split = [int(x) for x in _cfg_target.split('.')]
if _cfg_target:
- # Ensure that the deployment target of the build process is not
- # less than 10.3 if the interpreter was built for 10.3 or later.
- # This ensures extension modules are built with correct
- # compatibility values, specifically LDSHARED which can use
- # '-undefined dynamic_lookup' which only works on >= 10.3.
+ # Ensure that the deployment target of the build process is not
+ # less than 10.3 if the interpreter was built for 10.3 or later.
+ # This ensures extension modules are built with correct
+ # compatibility values, specifically LDSHARED which can use
+ # '-undefined dynamic_lookup' which only works on >= 10.3.
cur_target = os.environ.get('MACOSX_DEPLOYMENT_TARGET', _cfg_target)
- cur_target_split = [int(x) for x in cur_target.split('.')]
- if _cfg_target_split[:2] >= [10, 3] and cur_target_split[:2] < [10, 3]:
+ cur_target_split = [int(x) for x in cur_target.split('.')]
+ if _cfg_target_split[:2] >= [10, 3] and cur_target_split[:2] < [10, 3]:
my_msg = ('$MACOSX_DEPLOYMENT_TARGET mismatch: '
- 'now "%s" but "%s" during configure;'
- 'must use 10.3 or later'
+ 'now "%s" but "%s" during configure;'
+ 'must use 10.3 or later'
% (cur_target, _cfg_target))
raise DistutilsPlatformError(my_msg)
env = dict(os.environ,
MACOSX_DEPLOYMENT_TARGET=cur_target)
- try:
- proc = subprocess.Popen(cmd, env=env)
- proc.wait()
- exitcode = proc.returncode
- except OSError as exc:
- if not DEBUG:
- cmd = cmd[0]
- raise DistutilsExecError(
- "command %r failed: %s" % (cmd, exc.args[-1])) from exc
-
- if exitcode:
+ try:
+ proc = subprocess.Popen(cmd, env=env)
+ proc.wait()
+ exitcode = proc.returncode
+ except OSError as exc:
if not DEBUG:
- cmd = cmd[0]
- raise DistutilsExecError(
- "command %r failed with exit code %s" % (cmd, exitcode))
-
-
+ cmd = cmd[0]
+ raise DistutilsExecError(
+ "command %r failed: %s" % (cmd, exc.args[-1])) from exc
+
+ if exitcode:
+ if not DEBUG:
+ cmd = cmd[0]
+ raise DistutilsExecError(
+ "command %r failed with exit code %s" % (cmd, exitcode))
+
+
def find_executable(executable, path=None):
"""Tries to find 'executable' in the directories listed in 'path'.
A string listing directories separated by 'os.pathsep'; defaults to
os.environ['PATH']. Returns the complete filename or None if not found.
"""
- _, ext = os.path.splitext(executable)
+ _, ext = os.path.splitext(executable)
if (sys.platform == 'win32') and (ext != '.exe'):
executable = executable + '.exe'
- if os.path.isfile(executable):
- return executable
-
- if path is None:
- path = os.environ.get('PATH', None)
- if path is None:
- try:
- path = os.confstr("CS_PATH")
- except (AttributeError, ValueError):
- # os.confstr() or CS_PATH is not available
- path = os.defpath
- # bpo-35755: Don't use os.defpath if the PATH environment variable is
- # set to an empty string
-
- # PATH='' doesn't match, whereas PATH=':' looks in the current directory
- if not path:
+ if os.path.isfile(executable):
+ return executable
+
+ if path is None:
+ path = os.environ.get('PATH', None)
+ if path is None:
+ try:
+ path = os.confstr("CS_PATH")
+ except (AttributeError, ValueError):
+ # os.confstr() or CS_PATH is not available
+ path = os.defpath
+ # bpo-35755: Don't use os.defpath if the PATH environment variable is
+ # set to an empty string
+
+ # PATH='' doesn't match, whereas PATH=':' looks in the current directory
+ if not path:
return None
-
- paths = path.split(os.pathsep)
- for p in paths:
- f = os.path.join(p, executable)
- if os.path.isfile(f):
- # the file exists, we have a shot at spawn working
- return f
- return None
+
+ paths = path.split(os.pathsep)
+ for p in paths:
+ f = os.path.join(p, executable)
+ if os.path.isfile(f):
+ # the file exists, we have a shot at spawn working
+ return f
+ return None
diff --git a/contrib/tools/python3/src/Lib/distutils/sysconfig.py b/contrib/tools/python3/src/Lib/distutils/sysconfig.py
index 4e5464705b..93e347c05b 100644
--- a/contrib/tools/python3/src/Lib/distutils/sysconfig.py
+++ b/contrib/tools/python3/src/Lib/distutils/sysconfig.py
@@ -28,19 +28,19 @@ BASE_EXEC_PREFIX = os.path.normpath(sys.base_exec_prefix)
if "_PYTHON_PROJECT_BASE" in os.environ:
project_base = os.path.abspath(os.environ["_PYTHON_PROJECT_BASE"])
else:
- if sys.executable:
- project_base = os.path.dirname(os.path.abspath(sys.executable))
- else:
- # sys.executable can be empty if argv[0] has been changed and Python is
- # unable to retrieve the real program name
- project_base = os.getcwd()
+ if sys.executable:
+ project_base = os.path.dirname(os.path.abspath(sys.executable))
+ else:
+ # sys.executable can be empty if argv[0] has been changed and Python is
+ # unable to retrieve the real program name
+ project_base = os.getcwd()
# python_build: (Boolean) if true, we're either building Python or
# building an extension with an un-installed Python, so we use
# different (hard-wired) directories.
def _is_python_source_dir(d):
- for fn in ("Setup", "Setup.local"):
+ for fn in ("Setup", "Setup.local"):
if os.path.isfile(os.path.join(d, "Modules", fn)):
return True
return False
@@ -145,15 +145,15 @@ def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
prefix = plat_specific and EXEC_PREFIX or PREFIX
if os.name == "posix":
- if plat_specific or standard_lib:
- # Platform-specific modules (any module from a non-pure-Python
- # module distribution) or standard Python library modules.
- libdir = sys.platlibdir
- else:
- # Pure Python
- libdir = "lib"
- libpython = os.path.join(prefix, libdir,
- "python" + get_python_version())
+ if plat_specific or standard_lib:
+ # Platform-specific modules (any module from a non-pure-Python
+ # module distribution) or standard Python library modules.
+ libdir = sys.platlibdir
+ else:
+ # Pure Python
+ libdir = "lib"
+ libpython = os.path.join(prefix, libdir,
+ "python" + get_python_version())
if standard_lib:
return libpython
else:
@@ -193,8 +193,8 @@ def customize_compiler(compiler):
_osx_support.customize_compiler(_config_vars)
_config_vars['CUSTOMIZED_OSX_COMPILER'] = 'True'
- (cc, cxx, cflags, ccshared, ldshared, shlib_suffix, ar, ar_flags) = \
- get_config_vars('CC', 'CXX', 'CFLAGS',
+ (cc, cxx, cflags, ccshared, ldshared, shlib_suffix, ar, ar_flags) = \
+ get_config_vars('CC', 'CXX', 'CFLAGS',
'CCSHARED', 'LDSHARED', 'SHLIB_SUFFIX', 'AR', 'ARFLAGS')
if 'CC' in os.environ:
@@ -217,7 +217,7 @@ def customize_compiler(compiler):
if 'LDFLAGS' in os.environ:
ldshared = ldshared + ' ' + os.environ['LDFLAGS']
if 'CFLAGS' in os.environ:
- cflags = cflags + ' ' + os.environ['CFLAGS']
+ cflags = cflags + ' ' + os.environ['CFLAGS']
ldshared = ldshared + ' ' + os.environ['CFLAGS']
if 'CPPFLAGS' in os.environ:
cpp = cpp + ' ' + os.environ['CPPFLAGS']
diff --git a/contrib/tools/python3/src/Lib/distutils/unixccompiler.py b/contrib/tools/python3/src/Lib/distutils/unixccompiler.py
index d00c48981e..38e4546b6f 100644
--- a/contrib/tools/python3/src/Lib/distutils/unixccompiler.py
+++ b/contrib/tools/python3/src/Lib/distutils/unixccompiler.py
@@ -215,8 +215,8 @@ class UnixCCompiler(CCompiler):
return "-L" + dir
def _is_gcc(self, compiler_name):
- # clang uses same syntax for rpath as gcc
- return any(name in compiler_name for name in ("gcc", "g++", "clang"))
+ # clang uses same syntax for rpath as gcc
+ return any(name in compiler_name for name in ("gcc", "g++", "clang"))
def runtime_library_dir_option(self, dir):
# XXX Hackish, at the very least. See Python bug #445902:
@@ -289,9 +289,9 @@ class UnixCCompiler(CCompiler):
# vs
# /usr/lib/libedit.dylib
cflags = sysconfig.get_config_var('CFLAGS')
- m = re.search(r'-isysroot\s*(\S+)', cflags)
+ m = re.search(r'-isysroot\s*(\S+)', cflags)
if m is None:
- sysroot = _osx_support._default_sysroot(sysconfig.get_config_var('CC'))
+ sysroot = _osx_support._default_sysroot(sysconfig.get_config_var('CC'))
else:
sysroot = m.group(1)
diff --git a/contrib/tools/python3/src/Lib/distutils/util.py b/contrib/tools/python3/src/Lib/distutils/util.py
index 4b002ecef1..a55af04a8d 100644
--- a/contrib/tools/python3/src/Lib/distutils/util.py
+++ b/contrib/tools/python3/src/Lib/distutils/util.py
@@ -15,7 +15,7 @@ from distutils.spawn import spawn
from distutils import log
from distutils.errors import DistutilsByteCompileError
-def get_host_platform():
+def get_host_platform():
"""Return a string that identifies the current platform. This is used mainly to
distinguish platform-specific build directories and platform-specific built
distributions. Typically includes the OS name and version and the
@@ -38,10 +38,10 @@ def get_host_platform():
if os.name == 'nt':
if 'amd64' in sys.version.lower():
return 'win-amd64'
- if '(arm)' in sys.version.lower():
- return 'win-arm32'
- if '(arm64)' in sys.version.lower():
- return 'win-arm64'
+ if '(arm)' in sys.version.lower():
+ return 'win-arm32'
+ if '(arm64)' in sys.version.lower():
+ return 'win-arm64'
return sys.platform
# Set for cross builds explicitly
@@ -79,8 +79,8 @@ def get_host_platform():
machine += ".%s" % bitness[sys.maxsize]
# fall through to standard osname-release-machine representation
elif osname[:3] == "aix":
- from _aix_support import aix_platform
- return aix_platform()
+ from _aix_support import aix_platform
+ return aix_platform()
elif osname[:6] == "cygwin":
osname = "cygwin"
rel_re = re.compile (r'[\d.]+', re.ASCII)
@@ -95,16 +95,16 @@ def get_host_platform():
return "%s-%s-%s" % (osname, release, machine)
-def get_platform():
- if os.name == 'nt':
- TARGET_TO_PLAT = {
- 'x86' : 'win32',
- 'x64' : 'win-amd64',
- 'arm' : 'win-arm32',
- }
- return TARGET_TO_PLAT.get(os.environ.get('VSCMD_ARG_TGT_ARCH')) or get_host_platform()
- else:
- return get_host_platform()
+def get_platform():
+ if os.name == 'nt':
+ TARGET_TO_PLAT = {
+ 'x86' : 'win32',
+ 'x64' : 'win-amd64',
+ 'arm' : 'win-arm32',
+ }
+ return TARGET_TO_PLAT.get(os.environ.get('VSCMD_ARG_TGT_ARCH')) or get_host_platform()
+ else:
+ return get_host_platform()
def convert_path (pathname):
"""Return 'pathname' as a name that will work on the native filesystem,
@@ -391,28 +391,28 @@ def byte_compile (py_files,
else:
script = open(script_name, "w")
- with script:
- script.write("""\
+ with script:
+ script.write("""\
from distutils.util import byte_compile
files = [
""")
- # XXX would be nice to write absolute filenames, just for
- # safety's sake (script should be more robust in the face of
- # chdir'ing before running it). But this requires abspath'ing
- # 'prefix' as well, and that breaks the hack in build_lib's
- # 'byte_compile()' method that carefully tacks on a trailing
- # slash (os.sep really) to make sure the prefix here is "just
- # right". This whole prefix business is rather delicate -- the
- # problem is that it's really a directory, but I'm treating it
- # as a dumb string, so trailing slashes and so forth matter.
-
- #py_files = map(os.path.abspath, py_files)
- #if prefix:
- # prefix = os.path.abspath(prefix)
-
- script.write(",\n".join(map(repr, py_files)) + "]\n")
- script.write("""
+ # XXX would be nice to write absolute filenames, just for
+ # safety's sake (script should be more robust in the face of
+ # chdir'ing before running it). But this requires abspath'ing
+ # 'prefix' as well, and that breaks the hack in build_lib's
+ # 'byte_compile()' method that carefully tacks on a trailing
+ # slash (os.sep really) to make sure the prefix here is "just
+ # right". This whole prefix business is rather delicate -- the
+ # problem is that it's really a directory, but I'm treating it
+ # as a dumb string, so trailing slashes and so forth matter.
+
+ #py_files = map(os.path.abspath, py_files)
+ #if prefix:
+ # prefix = os.path.abspath(prefix)
+
+ script.write(",\n".join(map(repr, py_files)) + "]\n")
+ script.write("""
byte_compile(files, optimize=%r, force=%r,
prefix=%r, base_dir=%r,
verbose=%r, dry_run=0,
diff --git a/contrib/tools/python3/src/Lib/distutils/version.py b/contrib/tools/python3/src/Lib/distutils/version.py
index c33bebaed2..4ce1e764da 100644
--- a/contrib/tools/python3/src/Lib/distutils/version.py
+++ b/contrib/tools/python3/src/Lib/distutils/version.py
@@ -166,8 +166,8 @@ class StrictVersion (Version):
def _cmp (self, other):
if isinstance(other, str):
other = StrictVersion(other)
- elif not isinstance(other, StrictVersion):
- return NotImplemented
+ elif not isinstance(other, StrictVersion):
+ return NotImplemented
if self.version != other.version:
# numeric versions don't match
@@ -333,8 +333,8 @@ class LooseVersion (Version):
def _cmp (self, other):
if isinstance(other, str):
other = LooseVersion(other)
- elif not isinstance(other, LooseVersion):
- return NotImplemented
+ elif not isinstance(other, LooseVersion):
+ return NotImplemented
if self.version == other.version:
return 0
diff --git a/contrib/tools/python3/src/Lib/doctest.py b/contrib/tools/python3/src/Lib/doctest.py
index 2910bebd8d..6397090717 100644
--- a/contrib/tools/python3/src/Lib/doctest.py
+++ b/contrib/tools/python3/src/Lib/doctest.py
@@ -211,13 +211,13 @@ def _normalize_module(module, depth=2):
else:
raise TypeError("Expected a module, string, or None")
-def _newline_convert(data):
- # We have two cases to cover and we need to make sure we do
- # them in the right order
- for newline in ('\r\n', '\r'):
- data = data.replace(newline, '\n')
- return data
-
+def _newline_convert(data):
+ # We have two cases to cover and we need to make sure we do
+ # them in the right order
+ for newline in ('\r\n', '\r'):
+ data = data.replace(newline, '\n')
+ return data
+
def _load_testfile(filename, package, module_relative, encoding):
if module_relative:
package = _normalize_module(package, 3)
@@ -228,7 +228,7 @@ def _load_testfile(filename, package, module_relative, encoding):
file_contents = file_contents.decode(encoding)
# get_data() opens files as 'rb', so one must do the equivalent
# conversion as universal newlines would do.
- return _newline_convert(file_contents), filename
+ return _newline_convert(file_contents), filename
with open(filename, encoding=encoding) as f:
return f.read(), filename
@@ -1025,8 +1025,8 @@ class DocTestFinder:
if inspect.isclass(obj) and self._recurse:
for valname, val in obj.__dict__.items():
# Special handling for staticmethod/classmethod.
- if isinstance(val, (staticmethod, classmethod)):
- val = val.__func__
+ if isinstance(val, (staticmethod, classmethod)):
+ val = val.__func__
# Recurse to methods, properties, and nested classes.
if ((inspect.isroutine(val) or inspect.isclass(val) or
@@ -1067,8 +1067,8 @@ class DocTestFinder:
if module is None:
filename = None
else:
- # __file__ can be None for namespace packages.
- filename = getattr(module, '__file__', None) or module.__name__
+ # __file__ can be None for namespace packages.
+ filename = getattr(module, '__file__', None) or module.__name__
if filename[-4:] == ".pyc":
filename = filename[:-1]
return self._parser.get_doctest(docstring, globs, name,
@@ -1335,7 +1335,7 @@ class DocTestRunner:
try:
# Don't blink! This is where the user's code gets run.
exec(compile(example.source, filename, "single",
- compileflags, True), test.globs)
+ compileflags, True), test.globs)
self.debugger.set_continue() # ==== Example Finished ====
exception = None
except KeyboardInterrupt:
@@ -2309,7 +2309,7 @@ class DocTestCase(unittest.TestCase):
name = self._dt_test.name.split('.')
return "%s (%s)" % (name[-1], '.'.join(name[:-1]))
- __str__ = object.__str__
+ __str__ = object.__str__
def shortDescription(self):
return "Doctest: " + self._dt_test.name
diff --git a/contrib/tools/python3/src/Lib/email/_header_value_parser.py b/contrib/tools/python3/src/Lib/email/_header_value_parser.py
index 51d355fbb0..a9dbc7f335 100644
--- a/contrib/tools/python3/src/Lib/email/_header_value_parser.py
+++ b/contrib/tools/python3/src/Lib/email/_header_value_parser.py
@@ -68,7 +68,7 @@ XXX: provide complete list of token types.
"""
import re
-import sys
+import sys
import urllib # For urllib.parse.unquote
from string import hexdigits
from operator import itemgetter
@@ -96,18 +96,18 @@ EXTENDED_ATTRIBUTE_ENDS = ATTRIBUTE_ENDS - set('%')
def quote_string(value):
return '"'+str(value).replace('\\', '\\\\').replace('"', r'\"')+'"'
-# Match a RFC 2047 word, looks like =?utf-8?q?someword?=
-rfc2047_matcher = re.compile(r'''
- =\? # literal =?
- [^?]* # charset
- \? # literal ?
- [qQbB] # literal 'q' or 'b', case insensitive
- \? # literal ?
- .*? # encoded word
- \?= # literal ?=
-''', re.VERBOSE | re.MULTILINE)
-
-
+# Match a RFC 2047 word, looks like =?utf-8?q?someword?=
+rfc2047_matcher = re.compile(r'''
+ =\? # literal =?
+ [^?]* # charset
+ \? # literal ?
+ [qQbB] # literal 'q' or 'b', case insensitive
+ \? # literal ?
+ .*? # encoded word
+ \?= # literal ?=
+''', re.VERBOSE | re.MULTILINE)
+
+
#
# TokenList and its subclasses
#
@@ -509,11 +509,11 @@ class DotAtomText(TokenList):
as_ew_allowed = True
-class NoFoldLiteral(TokenList):
- token_type = 'no-fold-literal'
- as_ew_allowed = False
-
-
+class NoFoldLiteral(TokenList):
+ token_type = 'no-fold-literal'
+ as_ew_allowed = False
+
+
class AddrSpec(TokenList):
token_type = 'addr-spec'
@@ -561,8 +561,8 @@ class DisplayName(Phrase):
@property
def display_name(self):
res = TokenList(self)
- if len(res) == 0:
- return res.value
+ if len(res) == 0:
+ return res.value
if res[0].token_type == 'cfws':
res.pop(0)
else:
@@ -584,7 +584,7 @@ class DisplayName(Phrase):
for x in self:
if x.token_type == 'quoted-string':
quote = True
- if len(self) != 0 and quote:
+ if len(self) != 0 and quote:
pre = post = ''
if self[0].token_type=='cfws' or self[0][0].token_type=='cfws':
pre = ' '
@@ -730,7 +730,7 @@ class MimeParameters(TokenList):
# to assume the RFC 2231 pieces can come in any order. However, we
# output them in the order that we first see a given name, which gives
# us a stable __str__.
- params = {} # Using order preserving dict from Python 3.7+
+ params = {} # Using order preserving dict from Python 3.7+
for token in self:
if not token.token_type.endswith('parameter'):
continue
@@ -842,23 +842,23 @@ class HeaderLabel(TokenList):
as_ew_allowed = False
-class MsgID(TokenList):
- token_type = 'msg-id'
- as_ew_allowed = False
-
- def fold(self, policy):
- # message-id tokens may not be folded.
- return str(self) + policy.linesep
-
-
-class MessageID(MsgID):
- token_type = 'message-id'
-
-
-class InvalidMessageID(MessageID):
- token_type = 'invalid-message-id'
-
-
+class MsgID(TokenList):
+ token_type = 'msg-id'
+ as_ew_allowed = False
+
+ def fold(self, policy):
+ # message-id tokens may not be folded.
+ return str(self) + policy.linesep
+
+
+class MessageID(MsgID):
+ token_type = 'message-id'
+
+
+class InvalidMessageID(MessageID):
+ token_type = 'invalid-message-id'
+
+
class Header(TokenList):
token_type = 'header'
@@ -940,10 +940,10 @@ class EWWhiteSpaceTerminal(WhiteSpaceTerminal):
return ''
-class _InvalidEwError(errors.HeaderParseError):
- """Invalid encoded word found while parsing headers."""
-
-
+class _InvalidEwError(errors.HeaderParseError):
+ """Invalid encoded word found while parsing headers."""
+
+
# XXX these need to become classes and used as instances so
# that a program can't change them in a parse tree and screw
# up other parse trees. Maybe should have tests for that, too.
@@ -1048,10 +1048,10 @@ def get_encoded_word(value):
raise errors.HeaderParseError(
"expected encoded word but found {}".format(value))
remstr = ''.join(remainder)
- if (len(remstr) > 1 and
- remstr[0] in hexdigits and
- remstr[1] in hexdigits and
- tok.count('?') < 2):
+ if (len(remstr) > 1 and
+ remstr[0] in hexdigits and
+ remstr[1] in hexdigits and
+ tok.count('?') < 2):
# The ? after the CTE was followed by an encoded word escape (=XX).
rest, *remainder = remstr.split('?=', 1)
tok = tok + '?=' + rest
@@ -1062,8 +1062,8 @@ def get_encoded_word(value):
value = ''.join(remainder)
try:
text, charset, lang, defects = _ew.decode('=?' + tok + '?=')
- except (ValueError, KeyError):
- raise _InvalidEwError(
+ except (ValueError, KeyError):
+ raise _InvalidEwError(
"encoded word format invalid: '{}'".format(ew.cte))
ew.charset = charset
ew.lang = lang
@@ -1078,10 +1078,10 @@ def get_encoded_word(value):
_validate_xtext(vtext)
ew.append(vtext)
text = ''.join(remainder)
- # Encoded words should be followed by a WS
- if value and value[0] not in WSP:
- ew.defects.append(errors.InvalidHeaderDefect(
- "missing trailing whitespace after encoded-word"))
+ # Encoded words should be followed by a WS
+ if value and value[0] not in WSP:
+ ew.defects.append(errors.InvalidHeaderDefect(
+ "missing trailing whitespace after encoded-word"))
return ew, value
def get_unstructured(value):
@@ -1113,12 +1113,12 @@ def get_unstructured(value):
token, value = get_fws(value)
unstructured.append(token)
continue
- valid_ew = True
+ valid_ew = True
if value.startswith('=?'):
try:
token, value = get_encoded_word(value)
- except _InvalidEwError:
- valid_ew = False
+ except _InvalidEwError:
+ valid_ew = False
except errors.HeaderParseError:
# XXX: Need to figure out how to register defects when
# appropriate here.
@@ -1137,14 +1137,14 @@ def get_unstructured(value):
unstructured.append(token)
continue
tok, *remainder = _wsp_splitter(value, 1)
- # Split in the middle of an atom if there is a rfc2047 encoded word
- # which does not have WSP on both sides. The defect will be registered
- # the next time through the loop.
- # This needs to only be performed when the encoded word is valid;
- # otherwise, performing it on an invalid encoded word can cause
- # the parser to go in an infinite loop.
- if valid_ew and rfc2047_matcher.search(tok):
- tok, *remainder = value.partition('=?')
+ # Split in the middle of an atom if there is a rfc2047 encoded word
+ # which does not have WSP on both sides. The defect will be registered
+ # the next time through the loop.
+ # This needs to only be performed when the encoded word is valid;
+ # otherwise, performing it on an invalid encoded word can cause
+ # the parser to go in an infinite loop.
+ if valid_ew and rfc2047_matcher.search(tok):
+ tok, *remainder = value.partition('=?')
vtext = ValueTerminal(tok, 'vtext')
_validate_xtext(vtext)
unstructured.append(vtext)
@@ -1211,28 +1211,28 @@ def get_bare_quoted_string(value):
"expected '\"' but found '{}'".format(value))
bare_quoted_string = BareQuotedString()
value = value[1:]
- if value and value[0] == '"':
+ if value and value[0] == '"':
token, value = get_qcontent(value)
bare_quoted_string.append(token)
while value and value[0] != '"':
if value[0] in WSP:
token, value = get_fws(value)
elif value[:2] == '=?':
- valid_ew = False
+ valid_ew = False
try:
token, value = get_encoded_word(value)
bare_quoted_string.defects.append(errors.InvalidHeaderDefect(
"encoded word inside quoted string"))
- valid_ew = True
+ valid_ew = True
except errors.HeaderParseError:
token, value = get_qcontent(value)
- # Collapse the whitespace between two encoded words that occur in a
- # bare-quoted-string.
- if valid_ew and len(bare_quoted_string) > 1:
- if (bare_quoted_string[-1].token_type == 'fws' and
- bare_quoted_string[-2].token_type == 'encoded-word'):
- bare_quoted_string[-1] = EWWhiteSpaceTerminal(
- bare_quoted_string[-1], 'fws')
+ # Collapse the whitespace between two encoded words that occur in a
+ # bare-quoted-string.
+ if valid_ew and len(bare_quoted_string) > 1:
+ if (bare_quoted_string[-1].token_type == 'fws' and
+ bare_quoted_string[-2].token_type == 'encoded-word'):
+ bare_quoted_string[-1] = EWWhiteSpaceTerminal(
+ bare_quoted_string[-1], 'fws')
else:
token, value = get_qcontent(value)
bare_quoted_string.append(token)
@@ -1389,9 +1389,9 @@ def get_word(value):
leader, value = get_cfws(value)
else:
leader = None
- if not value:
- raise errors.HeaderParseError(
- "Expected 'atom' or 'quoted-string' but found nothing.")
+ if not value:
+ raise errors.HeaderParseError(
+ "Expected 'atom' or 'quoted-string' but found nothing.")
if value[0]=='"':
token, value = get_quoted_string(value)
elif value[0] in SPECIALS:
@@ -1616,8 +1616,8 @@ def get_domain(value):
token, value = get_dot_atom(value)
except errors.HeaderParseError:
token, value = get_atom(value)
- if value and value[0] == '@':
- raise errors.HeaderParseError('Invalid Domain')
+ if value and value[0] == '@':
+ raise errors.HeaderParseError('Invalid Domain')
if leader is not None:
token[:0] = [leader]
domain.append(token)
@@ -1641,7 +1641,7 @@ def get_addr_spec(value):
addr_spec.append(token)
if not value or value[0] != '@':
addr_spec.defects.append(errors.InvalidHeaderDefect(
- "addr-spec local part with no domain"))
+ "addr-spec local part with no domain"))
return addr_spec, value
addr_spec.append(ValueTerminal('@', 'address-at-symbol'))
token, value = get_domain(value[1:])
@@ -2026,118 +2026,118 @@ def get_address_list(value):
value = value[1:]
return address_list, value
-
-def get_no_fold_literal(value):
- """ no-fold-literal = "[" *dtext "]"
- """
- no_fold_literal = NoFoldLiteral()
- if not value:
- raise errors.HeaderParseError(
- "expected no-fold-literal but found '{}'".format(value))
- if value[0] != '[':
- raise errors.HeaderParseError(
- "expected '[' at the start of no-fold-literal "
- "but found '{}'".format(value))
- no_fold_literal.append(ValueTerminal('[', 'no-fold-literal-start'))
- value = value[1:]
- token, value = get_dtext(value)
- no_fold_literal.append(token)
- if not value or value[0] != ']':
- raise errors.HeaderParseError(
- "expected ']' at the end of no-fold-literal "
- "but found '{}'".format(value))
- no_fold_literal.append(ValueTerminal(']', 'no-fold-literal-end'))
- return no_fold_literal, value[1:]
-
-def get_msg_id(value):
- """msg-id = [CFWS] "<" id-left '@' id-right ">" [CFWS]
- id-left = dot-atom-text / obs-id-left
- id-right = dot-atom-text / no-fold-literal / obs-id-right
- no-fold-literal = "[" *dtext "]"
- """
- msg_id = MsgID()
- if value and value[0] in CFWS_LEADER:
- token, value = get_cfws(value)
- msg_id.append(token)
- if not value or value[0] != '<':
- raise errors.HeaderParseError(
- "expected msg-id but found '{}'".format(value))
- msg_id.append(ValueTerminal('<', 'msg-id-start'))
- value = value[1:]
- # Parse id-left.
- try:
- token, value = get_dot_atom_text(value)
- except errors.HeaderParseError:
- try:
- # obs-id-left is same as local-part of add-spec.
- token, value = get_obs_local_part(value)
- msg_id.defects.append(errors.ObsoleteHeaderDefect(
- "obsolete id-left in msg-id"))
- except errors.HeaderParseError:
- raise errors.HeaderParseError(
- "expected dot-atom-text or obs-id-left"
- " but found '{}'".format(value))
- msg_id.append(token)
- if not value or value[0] != '@':
- msg_id.defects.append(errors.InvalidHeaderDefect(
- "msg-id with no id-right"))
- # Even though there is no id-right, if the local part
- # ends with `>` let's just parse it too and return
- # along with the defect.
- if value and value[0] == '>':
- msg_id.append(ValueTerminal('>', 'msg-id-end'))
- value = value[1:]
- return msg_id, value
- msg_id.append(ValueTerminal('@', 'address-at-symbol'))
- value = value[1:]
- # Parse id-right.
- try:
- token, value = get_dot_atom_text(value)
- except errors.HeaderParseError:
- try:
- token, value = get_no_fold_literal(value)
- except errors.HeaderParseError as e:
- try:
- token, value = get_domain(value)
- msg_id.defects.append(errors.ObsoleteHeaderDefect(
- "obsolete id-right in msg-id"))
- except errors.HeaderParseError:
- raise errors.HeaderParseError(
- "expected dot-atom-text, no-fold-literal or obs-id-right"
- " but found '{}'".format(value))
- msg_id.append(token)
- if value and value[0] == '>':
- value = value[1:]
- else:
- msg_id.defects.append(errors.InvalidHeaderDefect(
- "missing trailing '>' on msg-id"))
- msg_id.append(ValueTerminal('>', 'msg-id-end'))
- if value and value[0] in CFWS_LEADER:
- token, value = get_cfws(value)
- msg_id.append(token)
- return msg_id, value
-
-
-def parse_message_id(value):
- """message-id = "Message-ID:" msg-id CRLF
- """
- message_id = MessageID()
- try:
- token, value = get_msg_id(value)
- message_id.append(token)
- except errors.HeaderParseError as ex:
- token = get_unstructured(value)
- message_id = InvalidMessageID(token)
- message_id.defects.append(
- errors.InvalidHeaderDefect("Invalid msg-id: {!r}".format(ex)))
- else:
- # Value after parsing a valid msg_id should be None.
- if value:
- message_id.defects.append(errors.InvalidHeaderDefect(
- "Unexpected {!r}".format(value)))
-
- return message_id
-
+
+def get_no_fold_literal(value):
+ """ no-fold-literal = "[" *dtext "]"
+ """
+ no_fold_literal = NoFoldLiteral()
+ if not value:
+ raise errors.HeaderParseError(
+ "expected no-fold-literal but found '{}'".format(value))
+ if value[0] != '[':
+ raise errors.HeaderParseError(
+ "expected '[' at the start of no-fold-literal "
+ "but found '{}'".format(value))
+ no_fold_literal.append(ValueTerminal('[', 'no-fold-literal-start'))
+ value = value[1:]
+ token, value = get_dtext(value)
+ no_fold_literal.append(token)
+ if not value or value[0] != ']':
+ raise errors.HeaderParseError(
+ "expected ']' at the end of no-fold-literal "
+ "but found '{}'".format(value))
+ no_fold_literal.append(ValueTerminal(']', 'no-fold-literal-end'))
+ return no_fold_literal, value[1:]
+
+def get_msg_id(value):
+ """msg-id = [CFWS] "<" id-left '@' id-right ">" [CFWS]
+ id-left = dot-atom-text / obs-id-left
+ id-right = dot-atom-text / no-fold-literal / obs-id-right
+ no-fold-literal = "[" *dtext "]"
+ """
+ msg_id = MsgID()
+ if value and value[0] in CFWS_LEADER:
+ token, value = get_cfws(value)
+ msg_id.append(token)
+ if not value or value[0] != '<':
+ raise errors.HeaderParseError(
+ "expected msg-id but found '{}'".format(value))
+ msg_id.append(ValueTerminal('<', 'msg-id-start'))
+ value = value[1:]
+ # Parse id-left.
+ try:
+ token, value = get_dot_atom_text(value)
+ except errors.HeaderParseError:
+ try:
+ # obs-id-left is same as local-part of add-spec.
+ token, value = get_obs_local_part(value)
+ msg_id.defects.append(errors.ObsoleteHeaderDefect(
+ "obsolete id-left in msg-id"))
+ except errors.HeaderParseError:
+ raise errors.HeaderParseError(
+ "expected dot-atom-text or obs-id-left"
+ " but found '{}'".format(value))
+ msg_id.append(token)
+ if not value or value[0] != '@':
+ msg_id.defects.append(errors.InvalidHeaderDefect(
+ "msg-id with no id-right"))
+ # Even though there is no id-right, if the local part
+ # ends with `>` let's just parse it too and return
+ # along with the defect.
+ if value and value[0] == '>':
+ msg_id.append(ValueTerminal('>', 'msg-id-end'))
+ value = value[1:]
+ return msg_id, value
+ msg_id.append(ValueTerminal('@', 'address-at-symbol'))
+ value = value[1:]
+ # Parse id-right.
+ try:
+ token, value = get_dot_atom_text(value)
+ except errors.HeaderParseError:
+ try:
+ token, value = get_no_fold_literal(value)
+ except errors.HeaderParseError as e:
+ try:
+ token, value = get_domain(value)
+ msg_id.defects.append(errors.ObsoleteHeaderDefect(
+ "obsolete id-right in msg-id"))
+ except errors.HeaderParseError:
+ raise errors.HeaderParseError(
+ "expected dot-atom-text, no-fold-literal or obs-id-right"
+ " but found '{}'".format(value))
+ msg_id.append(token)
+ if value and value[0] == '>':
+ value = value[1:]
+ else:
+ msg_id.defects.append(errors.InvalidHeaderDefect(
+ "missing trailing '>' on msg-id"))
+ msg_id.append(ValueTerminal('>', 'msg-id-end'))
+ if value and value[0] in CFWS_LEADER:
+ token, value = get_cfws(value)
+ msg_id.append(token)
+ return msg_id, value
+
+
+def parse_message_id(value):
+ """message-id = "Message-ID:" msg-id CRLF
+ """
+ message_id = MessageID()
+ try:
+ token, value = get_msg_id(value)
+ message_id.append(token)
+ except errors.HeaderParseError as ex:
+ token = get_unstructured(value)
+ message_id = InvalidMessageID(token)
+ message_id.defects.append(
+ errors.InvalidHeaderDefect("Invalid msg-id: {!r}".format(ex)))
+ else:
+ # Value after parsing a valid msg_id should be None.
+ if value:
+ message_id.defects.append(errors.InvalidHeaderDefect(
+ "Unexpected {!r}".format(value)))
+
+ return message_id
+
#
# XXX: As I begin to add additional header parsers, I'm realizing we probably
# have two level of parser routines: the get_XXX methods that get a token in
@@ -2535,9 +2535,9 @@ def get_parameter(value):
while value:
if value[0] in WSP:
token, value = get_fws(value)
- elif value[0] == '"':
- token = ValueTerminal('"', 'DQUOTE')
- value = value[1:]
+ elif value[0] == '"':
+ token = ValueTerminal('"', 'DQUOTE')
+ value = value[1:]
else:
token, value = get_qcontent(value)
v.append(token)
@@ -2558,7 +2558,7 @@ def parse_mime_parameters(value):
the formal RFC grammar, but it is more convenient for us for the set of
parameters to be treated as its own TokenList.
- This is 'parse' routine because it consumes the remaining value, but it
+ This is 'parse' routine because it consumes the remaining value, but it
would never be called to parse a full header. Instead it is called to
parse everything after the non-parameter value of a specific MIME header.
@@ -2764,7 +2764,7 @@ def _refold_parse_tree(parse_tree, *, policy):
"""
# max_line_length 0/None means no limit, ie: infinitely long.
- maxlen = policy.max_line_length or sys.maxsize
+ maxlen = policy.max_line_length or sys.maxsize
encoding = 'utf-8' if policy.utf8 else 'us-ascii'
lines = ['']
last_ew = None
@@ -2778,9 +2778,9 @@ def _refold_parse_tree(parse_tree, *, policy):
wrap_as_ew_blocked -= 1
continue
tstr = str(part)
- if part.token_type == 'ptext' and set(tstr) & SPECIALS:
- # Encode if tstr contains special characters.
- want_encoding = True
+ if part.token_type == 'ptext' and set(tstr) & SPECIALS:
+ # Encode if tstr contains special characters.
+ want_encoding = True
try:
tstr.encode(encoding)
charset = encoding
@@ -2802,7 +2802,7 @@ def _refold_parse_tree(parse_tree, *, policy):
want_encoding = False
last_ew = None
if part.syntactic_break:
- encoded_part = part.fold(policy=policy)[:-len(policy.linesep)]
+ encoded_part = part.fold(policy=policy)[:-len(policy.linesep)]
if policy.linesep not in encoded_part:
# It fits on a single line
if len(encoded_part) > maxlen - len(lines[-1]):
@@ -2837,7 +2837,7 @@ def _refold_parse_tree(parse_tree, *, policy):
newline = _steal_trailing_WSP_if_exists(lines)
if newline or part.startswith_fws():
lines.append(newline + tstr)
- last_ew = None
+ last_ew = None
continue
if not hasattr(part, 'encode'):
# It's not a terminal, try folding the subparts.
@@ -2891,36 +2891,36 @@ def _fold_as_ew(to_encode, lines, maxlen, last_ew, ew_combine_allowed, charset):
trailing_wsp = to_encode[-1]
to_encode = to_encode[:-1]
new_last_ew = len(lines[-1]) if last_ew is None else last_ew
-
- encode_as = 'utf-8' if charset == 'us-ascii' else charset
-
- # The RFC2047 chrome takes up 7 characters plus the length
- # of the charset name.
- chrome_len = len(encode_as) + 7
-
- if (chrome_len + 1) >= maxlen:
- raise errors.HeaderParseError(
- "max_line_length is too small to fit an encoded word")
-
+
+ encode_as = 'utf-8' if charset == 'us-ascii' else charset
+
+ # The RFC2047 chrome takes up 7 characters plus the length
+ # of the charset name.
+ chrome_len = len(encode_as) + 7
+
+ if (chrome_len + 1) >= maxlen:
+ raise errors.HeaderParseError(
+ "max_line_length is too small to fit an encoded word")
+
while to_encode:
remaining_space = maxlen - len(lines[-1])
- text_space = remaining_space - chrome_len
+ text_space = remaining_space - chrome_len
if text_space <= 0:
lines.append(' ')
continue
-
- to_encode_word = to_encode[:text_space]
- encoded_word = _ew.encode(to_encode_word, charset=encode_as)
- excess = len(encoded_word) - remaining_space
- while excess > 0:
- # Since the chunk to encode is guaranteed to fit into less than 100 characters,
- # shrinking it by one at a time shouldn't take long.
- to_encode_word = to_encode_word[:-1]
- encoded_word = _ew.encode(to_encode_word, charset=encode_as)
- excess = len(encoded_word) - remaining_space
- lines[-1] += encoded_word
- to_encode = to_encode[len(to_encode_word):]
-
+
+ to_encode_word = to_encode[:text_space]
+ encoded_word = _ew.encode(to_encode_word, charset=encode_as)
+ excess = len(encoded_word) - remaining_space
+ while excess > 0:
+ # Since the chunk to encode is guaranteed to fit into less than 100 characters,
+ # shrinking it by one at a time shouldn't take long.
+ to_encode_word = to_encode_word[:-1]
+ encoded_word = _ew.encode(to_encode_word, charset=encode_as)
+ excess = len(encoded_word) - remaining_space
+ lines[-1] += encoded_word
+ to_encode = to_encode[len(to_encode_word):]
+
if to_encode:
lines.append(' ')
new_last_ew = len(lines[-1])
diff --git a/contrib/tools/python3/src/Lib/email/_parseaddr.py b/contrib/tools/python3/src/Lib/email/_parseaddr.py
index c5a7b23193..bb53fcc278 100644
--- a/contrib/tools/python3/src/Lib/email/_parseaddr.py
+++ b/contrib/tools/python3/src/Lib/email/_parseaddr.py
@@ -67,8 +67,8 @@ def _parsedate_tz(data):
if not data:
return
data = data.split()
- if not data: # This happens for whitespace-only input.
- return None
+ if not data: # This happens for whitespace-only input.
+ return None
# The FWS after the comma after the day-of-week is optional, so search and
# adjust for this.
if data[0].endswith(',') or data[0].lower() in _daynames:
@@ -128,8 +128,8 @@ def _parsedate_tz(data):
tss = 0
elif len(tm) == 3:
[thh, tmm, tss] = tm
- else:
- return None
+ else:
+ return None
else:
return None
try:
@@ -383,12 +383,12 @@ class AddrlistClass:
aslist.append('@')
self.pos += 1
self.gotonext()
- domain = self.getdomain()
- if not domain:
- # Invalid domain, return an empty address instead of returning a
- # local part to denote failed parsing.
- return EMPTYSTRING
- return EMPTYSTRING.join(aslist) + domain
+ domain = self.getdomain()
+ if not domain:
+ # Invalid domain, return an empty address instead of returning a
+ # local part to denote failed parsing.
+ return EMPTYSTRING
+ return EMPTYSTRING.join(aslist) + domain
def getdomain(self):
"""Get the complete domain name from an address."""
@@ -403,10 +403,10 @@ class AddrlistClass:
elif self.field[self.pos] == '.':
self.pos += 1
sdlist.append('.')
- elif self.field[self.pos] == '@':
- # bpo-34155: Don't parse domains with two `@` like
- # `a@malicious.org@important.com`.
- return EMPTYSTRING
+ elif self.field[self.pos] == '@':
+ # bpo-34155: Don't parse domains with two `@` like
+ # `a@malicious.org@important.com`.
+ return EMPTYSTRING
elif self.field[self.pos] in self.atomends:
break
else:
diff --git a/contrib/tools/python3/src/Lib/email/charset.py b/contrib/tools/python3/src/Lib/email/charset.py
index d3d759ad91..2573893187 100644
--- a/contrib/tools/python3/src/Lib/email/charset.py
+++ b/contrib/tools/python3/src/Lib/email/charset.py
@@ -241,7 +241,7 @@ class Charset:
self.output_codec = CODEC_MAP.get(self.output_charset,
self.output_charset)
- def __repr__(self):
+ def __repr__(self):
return self.input_charset.lower()
def __eq__(self, other):
diff --git a/contrib/tools/python3/src/Lib/email/contentmanager.py b/contrib/tools/python3/src/Lib/email/contentmanager.py
index fcf278dbcc..01cbb79260 100644
--- a/contrib/tools/python3/src/Lib/email/contentmanager.py
+++ b/contrib/tools/python3/src/Lib/email/contentmanager.py
@@ -144,15 +144,15 @@ def _encode_text(string, charset, cte, policy):
linesep = policy.linesep.encode('ascii')
def embedded_body(lines): return linesep.join(lines) + linesep
def normal_body(lines): return b'\n'.join(lines) + b'\n'
- if cte is None:
+ if cte is None:
# Use heuristics to decide on the "best" encoding.
- if max((len(x) for x in lines), default=0) <= policy.max_line_length:
- try:
- return '7bit', normal_body(lines).decode('ascii')
- except UnicodeDecodeError:
- pass
- if policy.cte_type == '8bit':
- return '8bit', normal_body(lines).decode('ascii', 'surrogateescape')
+ if max((len(x) for x in lines), default=0) <= policy.max_line_length:
+ try:
+ return '7bit', normal_body(lines).decode('ascii')
+ except UnicodeDecodeError:
+ pass
+ if policy.cte_type == '8bit':
+ return '8bit', normal_body(lines).decode('ascii', 'surrogateescape')
sniff = embedded_body(lines[:10])
sniff_qp = quoprimime.body_encode(sniff.decode('latin-1'),
policy.max_line_length)
@@ -238,7 +238,7 @@ def set_bytes_content(msg, data, maintype, subtype, cte='base64',
data = binascii.b2a_qp(data, istext=False, header=False, quotetabs=True)
data = data.decode('ascii')
elif cte == '7bit':
- data = data.decode('ascii')
+ data = data.decode('ascii')
elif cte in ('8bit', 'binary'):
data = data.decode('ascii', 'surrogateescape')
msg.set_payload(data)
diff --git a/contrib/tools/python3/src/Lib/email/feedparser.py b/contrib/tools/python3/src/Lib/email/feedparser.py
index 97d3f5144d..36909ab31c 100644
--- a/contrib/tools/python3/src/Lib/email/feedparser.py
+++ b/contrib/tools/python3/src/Lib/email/feedparser.py
@@ -320,7 +320,7 @@ class FeedParser:
self._cur.set_payload(EMPTYSTRING.join(lines))
return
# Make sure a valid content type was specified per RFC 2045:6.4.
- if (str(self._cur.get('content-transfer-encoding', '8bit')).lower()
+ if (str(self._cur.get('content-transfer-encoding', '8bit')).lower()
not in ('7bit', '8bit', 'binary')):
defect = errors.InvalidMultipartContentTransferEncodingDefect()
self.policy.handle_defect(self._cur, defect)
diff --git a/contrib/tools/python3/src/Lib/email/generator.py b/contrib/tools/python3/src/Lib/email/generator.py
index c9b121624e..db007a5652 100644
--- a/contrib/tools/python3/src/Lib/email/generator.py
+++ b/contrib/tools/python3/src/Lib/email/generator.py
@@ -186,11 +186,11 @@ class Generator:
# If we munged the cte, copy the message again and re-fix the CTE.
if munge_cte:
msg = deepcopy(msg)
- # Preserve the header order if the CTE header already exists.
- if msg.get('content-transfer-encoding') is None:
- msg['Content-Transfer-Encoding'] = munge_cte[0]
- else:
- msg.replace_header('content-transfer-encoding', munge_cte[0])
+ # Preserve the header order if the CTE header already exists.
+ if msg.get('content-transfer-encoding') is None:
+ msg['Content-Transfer-Encoding'] = munge_cte[0]
+ else:
+ msg.replace_header('content-transfer-encoding', munge_cte[0])
msg.replace_header('content-type', munge_cte[1])
# Write the headers. First we see if the message object wants to
# handle that itself. If not, we'll do it generically.
diff --git a/contrib/tools/python3/src/Lib/email/header.py b/contrib/tools/python3/src/Lib/email/header.py
index 4ab0032bc6..ce19f67d10 100644
--- a/contrib/tools/python3/src/Lib/email/header.py
+++ b/contrib/tools/python3/src/Lib/email/header.py
@@ -431,7 +431,7 @@ class _ValueFormatter:
if end_of_line != (' ', ''):
self._current_line.push(*end_of_line)
if len(self._current_line) > 0:
- if self._current_line.is_onlyws() and self._lines:
+ if self._current_line.is_onlyws() and self._lines:
self._lines[-1] += str(self._current_line)
else:
self._lines.append(str(self._current_line))
diff --git a/contrib/tools/python3/src/Lib/email/headerregistry.py b/contrib/tools/python3/src/Lib/email/headerregistry.py
index 5d84fc0d82..d8b1f2f073 100644
--- a/contrib/tools/python3/src/Lib/email/headerregistry.py
+++ b/contrib/tools/python3/src/Lib/email/headerregistry.py
@@ -31,11 +31,11 @@ class Address:
without any Content Transfer Encoding.
"""
-
- inputs = ''.join(filter(None, (display_name, username, domain, addr_spec)))
- if '\r' in inputs or '\n' in inputs:
- raise ValueError("invalid arguments; address parts cannot contain CR or LF")
-
+
+ inputs = ''.join(filter(None, (display_name, username, domain, addr_spec)))
+ if '\r' in inputs or '\n' in inputs:
+ raise ValueError("invalid arguments; address parts cannot contain CR or LF")
+
# This clause with its potential 'raise' may only happen when an
# application program creates an Address object using an addr_spec
# keyword. The email library code itself must always supply username
@@ -74,9 +74,9 @@ class Address:
"""The addr_spec (username@domain) portion of the address, quoted
according to RFC 5322 rules, but with no Content Transfer Encoding.
"""
- lp = self.username
- if not parser.DOT_ATOM_ENDS.isdisjoint(lp):
- lp = parser.quote_string(lp)
+ lp = self.username
+ if not parser.DOT_ATOM_ENDS.isdisjoint(lp):
+ lp = parser.quote_string(lp)
if self.domain:
return lp + '@' + self.domain
if not lp:
@@ -89,17 +89,17 @@ class Address:
self.display_name, self.username, self.domain)
def __str__(self):
- disp = self.display_name
- if not parser.SPECIALS.isdisjoint(disp):
- disp = parser.quote_string(disp)
+ disp = self.display_name
+ if not parser.SPECIALS.isdisjoint(disp):
+ disp = parser.quote_string(disp)
if disp:
addr_spec = '' if self.addr_spec=='<>' else self.addr_spec
return "{} <{}>".format(disp, addr_spec)
return self.addr_spec
def __eq__(self, other):
- if not isinstance(other, Address):
- return NotImplemented
+ if not isinstance(other, Address):
+ return NotImplemented
return (self.display_name == other.display_name and
self.username == other.username and
self.domain == other.domain)
@@ -142,15 +142,15 @@ class Group:
if self.display_name is None and len(self.addresses)==1:
return str(self.addresses[0])
disp = self.display_name
- if disp is not None and not parser.SPECIALS.isdisjoint(disp):
- disp = parser.quote_string(disp)
+ if disp is not None and not parser.SPECIALS.isdisjoint(disp):
+ disp = parser.quote_string(disp)
adrstr = ", ".join(str(x) for x in self.addresses)
adrstr = ' ' + adrstr if adrstr else adrstr
return "{}:{};".format(disp, adrstr)
def __eq__(self, other):
- if not isinstance(other, Group):
- return NotImplemented
+ if not isinstance(other, Group):
+ return NotImplemented
return (self.display_name == other.display_name and
self.addresses == other.addresses)
@@ -244,7 +244,7 @@ class BaseHeader(str):
the header name and the ': ' separator.
"""
- # At some point we need to put fws here if it was in the source.
+ # At some point we need to put fws here if it was in the source.
header = parser.Header([
parser.HeaderLabel([
parser.ValueTerminal(self.name, 'header-name'),
@@ -519,18 +519,18 @@ class ContentTransferEncodingHeader:
return self._cte
-class MessageIDHeader:
-
- max_count = 1
- value_parser = staticmethod(parser.parse_message_id)
-
- @classmethod
- def parse(cls, value, kwds):
- kwds['parse_tree'] = parse_tree = cls.value_parser(value)
- kwds['decoded'] = str(parse_tree)
- kwds['defects'].extend(parse_tree.all_defects)
-
-
+class MessageIDHeader:
+
+ max_count = 1
+ value_parser = staticmethod(parser.parse_message_id)
+
+ @classmethod
+ def parse(cls, value, kwds):
+ kwds['parse_tree'] = parse_tree = cls.value_parser(value)
+ kwds['decoded'] = str(parse_tree)
+ kwds['defects'].extend(parse_tree.all_defects)
+
+
# The header factory #
_default_header_map = {
@@ -553,7 +553,7 @@ _default_header_map = {
'content-type': ContentTypeHeader,
'content-disposition': ContentDispositionHeader,
'content-transfer-encoding': ContentTransferEncodingHeader,
- 'message-id': MessageIDHeader,
+ 'message-id': MessageIDHeader,
}
class HeaderRegistry:
diff --git a/contrib/tools/python3/src/Lib/email/message.py b/contrib/tools/python3/src/Lib/email/message.py
index 6752ce0fa1..110342f5f9 100644
--- a/contrib/tools/python3/src/Lib/email/message.py
+++ b/contrib/tools/python3/src/Lib/email/message.py
@@ -141,7 +141,7 @@ class Message:
header. For backward compatibility reasons, if maxheaderlen is
not specified it defaults to 0, so you must override it explicitly
if you want a different maxheaderlen. 'policy' is passed to the
- Generator instance used to serialize the message; if it is not
+ Generator instance used to serialize the message; if it is not
specified the policy associated with the message instance is used.
If the message object contains binary data that is not encoded
@@ -948,7 +948,7 @@ class MIMEPart(Message):
if policy is None:
from email.policy import default
policy = default
- super().__init__(policy)
+ super().__init__(policy)
def as_string(self, unixfrom=False, maxheaderlen=None, policy=None):
@@ -958,14 +958,14 @@ class MIMEPart(Message):
header. maxheaderlen is retained for backward compatibility with the
base Message class, but defaults to None, meaning that the policy value
for max_line_length controls the header maximum length. 'policy' is
- passed to the Generator instance used to serialize the message; if it
+ passed to the Generator instance used to serialize the message; if it
is not specified the policy associated with the message instance is
used.
"""
policy = self.policy if policy is None else policy
if maxheaderlen is None:
maxheaderlen = policy.max_line_length
- return super().as_string(unixfrom, maxheaderlen, policy)
+ return super().as_string(unixfrom, maxheaderlen, policy)
def __str__(self):
return self.as_string(policy=self.policy.clone(utf8=True))
@@ -982,7 +982,7 @@ class MIMEPart(Message):
if subtype in preferencelist:
yield (preferencelist.index(subtype), part)
return
- if maintype != 'multipart' or not self.is_multipart():
+ if maintype != 'multipart' or not self.is_multipart():
return
if subtype != 'related':
for subpart in part.iter_parts():
@@ -1041,16 +1041,16 @@ class MIMEPart(Message):
maintype, subtype = self.get_content_type().split('/')
if maintype != 'multipart' or subtype == 'alternative':
return
- payload = self.get_payload()
- # Certain malformed messages can have content type set to `multipart/*`
- # but still have single part body, in which case payload.copy() can
- # fail with AttributeError.
- try:
- parts = payload.copy()
- except AttributeError:
- # payload is not a list, it is most probably a string.
- return
-
+ payload = self.get_payload()
+ # Certain malformed messages can have content type set to `multipart/*`
+ # but still have single part body, in which case payload.copy() can
+ # fail with AttributeError.
+ try:
+ parts = payload.copy()
+ except AttributeError:
+ # payload is not a list, it is most probably a string.
+ return
+
if maintype == 'multipart' and subtype == 'related':
# For related, we treat everything but the root as an attachment.
# The root may be indicated by 'start'; if there's no start or we
@@ -1087,7 +1087,7 @@ class MIMEPart(Message):
Return an empty iterator for a non-multipart.
"""
- if self.is_multipart():
+ if self.is_multipart():
yield from self.get_payload()
def get_content(self, *args, content_manager=None, **kw):
diff --git a/contrib/tools/python3/src/Lib/email/policy.py b/contrib/tools/python3/src/Lib/email/policy.py
index 611deb50bb..6028e0ceeb 100644
--- a/contrib/tools/python3/src/Lib/email/policy.py
+++ b/contrib/tools/python3/src/Lib/email/policy.py
@@ -3,7 +3,7 @@ code that adds all the email6 features.
"""
import re
-import sys
+import sys
from email._policybase import Policy, Compat32, compat32, _extend_docstrings
from email.utils import _has_surrogates
from email.headerregistry import HeaderRegistry as HeaderRegistry
@@ -204,7 +204,7 @@ class EmailPolicy(Policy):
def _fold(self, name, value, refold_binary=False):
if hasattr(value, 'name'):
return value.fold(policy=self)
- maxlen = self.max_line_length if self.max_line_length else sys.maxsize
+ maxlen = self.max_line_length if self.max_line_length else sys.maxsize
lines = value.splitlines()
refold = (self.refold_source == 'all' or
self.refold_source == 'long' and
diff --git a/contrib/tools/python3/src/Lib/email/utils.py b/contrib/tools/python3/src/Lib/email/utils.py
index 48d30160aa..c146b8c075 100644
--- a/contrib/tools/python3/src/Lib/email/utils.py
+++ b/contrib/tools/python3/src/Lib/email/utils.py
@@ -81,7 +81,7 @@ def formataddr(pair, charset='utf-8'):
If the first element of pair is false, then the second element is
returned unmodified.
- The optional charset is the character set that is used to encode
+ The optional charset is the character set that is used to encode
realname in case realname is not ASCII safe. Can be an instance of str or
a Charset-like object which has a header_encode method. Default is
'utf-8'.
@@ -109,7 +109,7 @@ def formataddr(pair, charset='utf-8'):
def getaddresses(fieldvalues):
"""Return a list of (REALNAME, EMAIL) for each fieldvalue."""
- all = COMMASPACE.join(str(v) for v in fieldvalues)
+ all = COMMASPACE.join(str(v) for v in fieldvalues)
a = _AddressList(all)
return a.addresslist
@@ -259,13 +259,13 @@ def decode_params(params):
params is a sequence of 2-tuples containing (param name, string value).
"""
- new_params = [params[0]]
+ new_params = [params[0]]
# Map parameter's name to a list of continuations. The values are a
# 3-tuple of the continuation number, the string value, and a flag
# specifying whether a particular segment is %-encoded.
rfc2231_params = {}
- for name, value in params[1:]:
- encoded = name.endswith('*')
+ for name, value in params[1:]:
+ encoded = name.endswith('*')
value = unquote(value)
mo = rfc2231_continuation.match(name)
if mo:
diff --git a/contrib/tools/python3/src/Lib/encodings/__init__.py b/contrib/tools/python3/src/Lib/encodings/__init__.py
index ddd5afdcf2..987fcfd9ba 100644
--- a/contrib/tools/python3/src/Lib/encodings/__init__.py
+++ b/contrib/tools/python3/src/Lib/encodings/__init__.py
@@ -12,7 +12,7 @@
* getregentry() -> codecs.CodecInfo object
The getregentry() API must return a CodecInfo object with encoder, decoder,
incrementalencoder, incrementaldecoder, streamwriter and streamreader
- attributes which adhere to the Python Codec Interface Standard.
+ attributes which adhere to the Python Codec Interface Standard.
In addition, a module may optionally also define the following
APIs which are then used by the package's codec search function:
@@ -49,7 +49,7 @@ def normalize_encoding(encoding):
collapsed and replaced with a single underscore, e.g. ' -;#'
becomes '_'. Leading and trailing underscores are removed.
- Note that encoding names should be ASCII only.
+ Note that encoding names should be ASCII only.
"""
if isinstance(encoding, bytes):
diff --git a/contrib/tools/python3/src/Lib/encodings/aliases.py b/contrib/tools/python3/src/Lib/encodings/aliases.py
index d85afd6d5c..6bd41ed289 100644
--- a/contrib/tools/python3/src/Lib/encodings/aliases.py
+++ b/contrib/tools/python3/src/Lib/encodings/aliases.py
@@ -266,8 +266,8 @@ aliases = {
'roman8' : 'hp_roman8',
'r8' : 'hp_roman8',
'csHPRoman8' : 'hp_roman8',
- 'cp1051' : 'hp_roman8',
- 'ibm1051' : 'hp_roman8',
+ 'cp1051' : 'hp_roman8',
+ 'ibm1051' : 'hp_roman8',
# hz codec
'hzgb' : 'hz',
@@ -450,7 +450,7 @@ aliases = {
# mac_latin2 codec
'maccentraleurope' : 'mac_latin2',
- 'mac_centeuro' : 'mac_latin2',
+ 'mac_centeuro' : 'mac_latin2',
'maclatin2' : 'mac_latin2',
# mac_roman codec
@@ -534,7 +534,7 @@ aliases = {
'utf8' : 'utf_8',
'utf8_ucs2' : 'utf_8',
'utf8_ucs4' : 'utf_8',
- 'cp65001' : 'utf_8',
+ 'cp65001' : 'utf_8',
# uu_codec codec
'uu' : 'uu_codec',
diff --git a/contrib/tools/python3/src/Lib/encodings/punycode.py b/contrib/tools/python3/src/Lib/encodings/punycode.py
index 1c57264470..414dc76a0a 100644
--- a/contrib/tools/python3/src/Lib/encodings/punycode.py
+++ b/contrib/tools/python3/src/Lib/encodings/punycode.py
@@ -143,7 +143,7 @@ def decode_generalized_number(extended, extpos, bias, errors):
digit = char - 22 # 0x30-26
elif errors == "strict":
raise UnicodeError("Invalid extended code point '%s'"
- % extended[extpos-1])
+ % extended[extpos-1])
else:
return extpos, None
t = T(j, bias)
diff --git a/contrib/tools/python3/src/Lib/encodings/raw_unicode_escape.py b/contrib/tools/python3/src/Lib/encodings/raw_unicode_escape.py
index 46c8e070dd..a38a0de778 100644
--- a/contrib/tools/python3/src/Lib/encodings/raw_unicode_escape.py
+++ b/contrib/tools/python3/src/Lib/encodings/raw_unicode_escape.py
@@ -21,16 +21,16 @@ class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.raw_unicode_escape_encode(input, self.errors)[0]
-class IncrementalDecoder(codecs.BufferedIncrementalDecoder):
- def _buffer_decode(self, input, errors, final):
- return codecs.raw_unicode_escape_decode(input, errors, final)
+class IncrementalDecoder(codecs.BufferedIncrementalDecoder):
+ def _buffer_decode(self, input, errors, final):
+ return codecs.raw_unicode_escape_decode(input, errors, final)
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
- def decode(self, input, errors='strict'):
- return codecs.raw_unicode_escape_decode(input, errors, False)
+ def decode(self, input, errors='strict'):
+ return codecs.raw_unicode_escape_decode(input, errors, False)
### encodings module API
diff --git a/contrib/tools/python3/src/Lib/encodings/unicode_escape.py b/contrib/tools/python3/src/Lib/encodings/unicode_escape.py
index 9b1ce99b33..0d75604ee0 100644
--- a/contrib/tools/python3/src/Lib/encodings/unicode_escape.py
+++ b/contrib/tools/python3/src/Lib/encodings/unicode_escape.py
@@ -21,16 +21,16 @@ class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.unicode_escape_encode(input, self.errors)[0]
-class IncrementalDecoder(codecs.BufferedIncrementalDecoder):
- def _buffer_decode(self, input, errors, final):
- return codecs.unicode_escape_decode(input, errors, final)
+class IncrementalDecoder(codecs.BufferedIncrementalDecoder):
+ def _buffer_decode(self, input, errors, final):
+ return codecs.unicode_escape_decode(input, errors, final)
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
- def decode(self, input, errors='strict'):
- return codecs.unicode_escape_decode(input, errors, False)
+ def decode(self, input, errors='strict'):
+ return codecs.unicode_escape_decode(input, errors, False)
### encodings module API
diff --git a/contrib/tools/python3/src/Lib/encodings/uu_codec.py b/contrib/tools/python3/src/Lib/encodings/uu_codec.py
index 4e58c62fe9..350b5f7a4f 100644
--- a/contrib/tools/python3/src/Lib/encodings/uu_codec.py
+++ b/contrib/tools/python3/src/Lib/encodings/uu_codec.py
@@ -20,10 +20,10 @@ def uu_encode(input, errors='strict', filename='<data>', mode=0o666):
read = infile.read
write = outfile.write
- # Remove newline chars from filename
- filename = filename.replace('\n','\\n')
- filename = filename.replace('\r','\\r')
-
+ # Remove newline chars from filename
+ filename = filename.replace('\n','\\n')
+ filename = filename.replace('\r','\\r')
+
# Encode
write(('begin %o %s\n' % (mode & 0o777, filename)).encode('ascii'))
chunk = read(45)
diff --git a/contrib/tools/python3/src/Lib/ensurepip/__init__.py b/contrib/tools/python3/src/Lib/ensurepip/__init__.py
index 2a140a2624..bc7b88ea61 100644
--- a/contrib/tools/python3/src/Lib/ensurepip/__init__.py
+++ b/contrib/tools/python3/src/Lib/ensurepip/__init__.py
@@ -1,37 +1,37 @@
import os
import os.path
import sys
-import runpy
+import runpy
import tempfile
-import subprocess
-from importlib import resources
-
-from . import _bundled
-
+import subprocess
+from importlib import resources
+from . import _bundled
+
+
__all__ = ["version", "bootstrap"]
-_SETUPTOOLS_VERSION = "58.1.0"
-_PIP_VERSION = "21.2.4"
+_SETUPTOOLS_VERSION = "58.1.0"
+_PIP_VERSION = "21.2.4"
_PROJECTS = [
- ("setuptools", _SETUPTOOLS_VERSION, "py3"),
- ("pip", _PIP_VERSION, "py3"),
+ ("setuptools", _SETUPTOOLS_VERSION, "py3"),
+ ("pip", _PIP_VERSION, "py3"),
]
def _run_pip(args, additional_paths=None):
- # Run the bootstraping in a subprocess to avoid leaking any state that happens
- # after pip has executed. Particulary, this avoids the case when pip holds onto
- # the files in *additional_paths*, preventing us to remove them at the end of the
- # invocation.
- code = f"""
-import runpy
-import sys
-sys.path = {additional_paths or []} + sys.path
-sys.argv[1:] = {args}
-runpy.run_module("pip", run_name="__main__", alter_sys=True)
-"""
- return subprocess.run([sys.executable, "-c", code], check=True).returncode
+ # Run the bootstraping in a subprocess to avoid leaking any state that happens
+ # after pip has executed. Particulary, this avoids the case when pip holds onto
+ # the files in *additional_paths*, preventing us to remove them at the end of the
+ # invocation.
+ code = f"""
+import runpy
+import sys
+sys.path = {additional_paths or []} + sys.path
+sys.argv[1:] = {args}
+runpy.run_module("pip", run_name="__main__", alter_sys=True)
+"""
+ return subprocess.run([sys.executable, "-c", code], check=True).returncode
def version():
@@ -79,8 +79,8 @@ def _bootstrap(*, root=None, upgrade=False, user=False,
if altinstall and default_pip:
raise ValueError("Cannot use altinstall and default_pip together")
- sys.audit("ensurepip.bootstrap", root)
-
+ sys.audit("ensurepip.bootstrap", root)
+
_disable_pip_configuration_settings()
# By default, installing pip and setuptools installs all of the
@@ -100,11 +100,11 @@ def _bootstrap(*, root=None, upgrade=False, user=False,
# Put our bundled wheels into a temporary directory and construct the
# additional paths that need added to sys.path
additional_paths = []
- for project, version, py_tag in _PROJECTS:
- wheel_name = "{}-{}-{}-none-any.whl".format(project, version, py_tag)
- whl = resources.read_binary(
- _bundled,
- wheel_name,
+ for project, version, py_tag in _PROJECTS:
+ wheel_name = "{}-{}-{}-none-any.whl".format(project, version, py_tag)
+ whl = resources.read_binary(
+ _bundled,
+ wheel_name,
)
with open(os.path.join(tmpdir, wheel_name), "wb") as fp:
fp.write(whl)
@@ -112,7 +112,7 @@ def _bootstrap(*, root=None, upgrade=False, user=False,
additional_paths.append(os.path.join(tmpdir, wheel_name))
# Construct the arguments to be passed to the pip command
- args = ["install", "--no-cache-dir", "--no-index", "--find-links", tmpdir]
+ args = ["install", "--no-cache-dir", "--no-index", "--find-links", tmpdir]
if root:
args += ["--root", root]
if upgrade:
diff --git a/contrib/tools/python3/src/Lib/enum.py b/contrib/tools/python3/src/Lib/enum.py
index ee4c4c04f9..cda835e852 100644
--- a/contrib/tools/python3/src/Lib/enum.py
+++ b/contrib/tools/python3/src/Lib/enum.py
@@ -10,54 +10,54 @@ __all__ = [
def _is_descriptor(obj):
- """
- Returns True if obj is a descriptor, False otherwise.
- """
+ """
+ Returns True if obj is a descriptor, False otherwise.
+ """
return (
hasattr(obj, '__get__') or
hasattr(obj, '__set__') or
- hasattr(obj, '__delete__')
- )
+ hasattr(obj, '__delete__')
+ )
def _is_dunder(name):
- """
- Returns True if a __dunder__ name, False otherwise.
- """
- return (
- len(name) > 4 and
+ """
+ Returns True if a __dunder__ name, False otherwise.
+ """
+ return (
+ len(name) > 4 and
name[:2] == name[-2:] == '__' and
name[2] != '_' and
- name[-3] != '_'
- )
+ name[-3] != '_'
+ )
def _is_sunder(name):
- """
- Returns True if a _sunder_ name, False otherwise.
- """
- return (
- len(name) > 2 and
+ """
+ Returns True if a _sunder_ name, False otherwise.
+ """
+ return (
+ len(name) > 2 and
name[0] == name[-1] == '_' and
name[1:2] != '_' and
- name[-2:-1] != '_'
- )
-
-def _is_private(cls_name, name):
- # do not use `re` as `re` imports `enum`
- pattern = '_%s__' % (cls_name, )
- if (
- len(name) >= 5
- and name.startswith(pattern)
- and name[len(pattern)] != '_'
- and (name[-1] != '_' or name[-2] != '_')
- ):
- return True
- else:
- return False
-
+ name[-2:-1] != '_'
+ )
+
+def _is_private(cls_name, name):
+ # do not use `re` as `re` imports `enum`
+ pattern = '_%s__' % (cls_name, )
+ if (
+ len(name) >= 5
+ and name.startswith(pattern)
+ and name[len(pattern)] != '_'
+ and (name[-1] != '_' or name[-2] != '_')
+ ):
+ return True
+ else:
+ return False
+
def _make_class_unpicklable(cls):
- """
- Make the given class un-picklable.
- """
+ """
+ Make the given class un-picklable.
+ """
def _break_on_call_reduce(self, proto):
raise TypeError('%r cannot be pickled' % self)
cls.__reduce_ex__ = _break_on_call_reduce
@@ -72,8 +72,8 @@ class auto:
class _EnumDict(dict):
- """
- Track enum member order and ensure member names are not reused.
+ """
+ Track enum member order and ensure member names are not reused.
EnumMeta will use the names found in self._member_names as the
enumeration member names.
@@ -83,25 +83,25 @@ class _EnumDict(dict):
self._member_names = []
self._last_values = []
self._ignore = []
- self._auto_called = False
+ self._auto_called = False
def __setitem__(self, key, value):
- """
- Changes anything not dundered or not a descriptor.
+ """
+ Changes anything not dundered or not a descriptor.
If an enum member name is used twice, an error is raised; duplicate
values are not checked for.
Single underscore (sunder) names are reserved.
"""
- if _is_private(self._cls_name, key):
- import warnings
- warnings.warn(
- "private variables, such as %r, will be normal attributes in 3.10"
- % (key, ),
- DeprecationWarning,
- stacklevel=2,
- )
+ if _is_private(self._cls_name, key):
+ import warnings
+ warnings.warn(
+ "private variables, such as %r, will be normal attributes in 3.10"
+ % (key, ),
+ DeprecationWarning,
+ stacklevel=2,
+ )
if _is_sunder(key):
if key not in (
'_order_', '_create_pseudo_member_',
@@ -109,9 +109,9 @@ class _EnumDict(dict):
):
raise ValueError('_names_ are reserved for future Enum use')
if key == '_generate_next_value_':
- # check if members already defined as auto()
- if self._auto_called:
- raise TypeError("_generate_next_value_ must be defined before members")
+ # check if members already defined as auto()
+ if self._auto_called:
+ raise TypeError("_generate_next_value_ must be defined before members")
setattr(self, '_generate_next_value', value)
elif key == '_ignore_':
if isinstance(value, str):
@@ -121,10 +121,10 @@ class _EnumDict(dict):
self._ignore = value
already = set(value) & set(self._member_names)
if already:
- raise ValueError(
- '_ignore_ cannot specify already set names: %r'
- % (already, )
- )
+ raise ValueError(
+ '_ignore_ cannot specify already set names: %r'
+ % (already, )
+ )
elif _is_dunder(key):
if key == '__order__':
key = '_order_'
@@ -139,13 +139,13 @@ class _EnumDict(dict):
raise TypeError('%r already defined as: %r' % (key, self[key]))
if isinstance(value, auto):
if value.value == _auto_null:
- value.value = self._generate_next_value(
- key,
- 1,
- len(self._member_names),
- self._last_values[:],
- )
- self._auto_called = True
+ value.value = self._generate_next_value(
+ key,
+ 1,
+ len(self._member_names),
+ self._last_values[:],
+ )
+ self._auto_called = True
value = value.value
self._member_names.append(key)
self._last_values.append(value)
@@ -158,25 +158,25 @@ class _EnumDict(dict):
Enum = None
class EnumMeta(type):
- """
- Metaclass for Enum
- """
+ """
+ Metaclass for Enum
+ """
@classmethod
- def __prepare__(metacls, cls, bases, **kwds):
- # check that previous enum members do not exist
- metacls._check_for_existing_members(cls, bases)
+ def __prepare__(metacls, cls, bases, **kwds):
+ # check that previous enum members do not exist
+ metacls._check_for_existing_members(cls, bases)
# create the namespace dict
enum_dict = _EnumDict()
- enum_dict._cls_name = cls
+ enum_dict._cls_name = cls
# inherit previous flags and _generate_next_value_ function
- member_type, first_enum = metacls._get_mixins_(cls, bases)
+ member_type, first_enum = metacls._get_mixins_(cls, bases)
if first_enum is not None:
- enum_dict['_generate_next_value_'] = getattr(
- first_enum, '_generate_next_value_', None,
- )
+ enum_dict['_generate_next_value_'] = getattr(
+ first_enum, '_generate_next_value_', None,
+ )
return enum_dict
- def __new__(metacls, cls, bases, classdict, **kwds):
+ def __new__(metacls, cls, bases, classdict, **kwds):
# an Enum class is final once enumeration items have been defined; it
# cannot be mixed with other types (int, float, etc.) if it has an
# inherited __new__ unless a new __new__ is defined (or the resulting
@@ -187,10 +187,10 @@ class EnumMeta(type):
ignore = classdict['_ignore_']
for key in ignore:
classdict.pop(key, None)
- member_type, first_enum = metacls._get_mixins_(cls, bases)
- __new__, save_new, use_args = metacls._find_new_(
- classdict, member_type, first_enum,
- )
+ member_type, first_enum = metacls._get_mixins_(cls, bases)
+ __new__, save_new, use_args = metacls._find_new_(
+ classdict, member_type, first_enum,
+ )
# save enum items into separate mapping so they don't get baked into
# the new class
@@ -211,18 +211,18 @@ class EnumMeta(type):
if '__doc__' not in classdict:
classdict['__doc__'] = 'An enumeration.'
- enum_class = super().__new__(metacls, cls, bases, classdict, **kwds)
+ enum_class = super().__new__(metacls, cls, bases, classdict, **kwds)
enum_class._member_names_ = [] # names in definition order
- enum_class._member_map_ = {} # name->value map
+ enum_class._member_map_ = {} # name->value map
enum_class._member_type_ = member_type
# save DynamicClassAttribute attributes from super classes so we know
# if we can take the shortcut of storing members in the class dict
- dynamic_attributes = {
- k for c in enum_class.mro()
- for k, v in c.__dict__.items()
- if isinstance(v, DynamicClassAttribute)
- }
+ dynamic_attributes = {
+ k for c in enum_class.mro()
+ for k, v in c.__dict__.items()
+ if isinstance(v, DynamicClassAttribute)
+ }
# Reverse value->name map for hashable values.
enum_class._value2member_map_ = {}
@@ -242,32 +242,32 @@ class EnumMeta(type):
methods = ('__getnewargs_ex__', '__getnewargs__',
'__reduce_ex__', '__reduce__')
if not any(m in member_type.__dict__ for m in methods):
- if '__new__' in classdict:
- # too late, sabotage
- _make_class_unpicklable(enum_class)
- else:
- # final attempt to verify that pickling would work:
- # travel mro until __new__ is found, checking for
- # __reduce__ and friends along the way -- if any of them
- # are found before/when __new__ is found, pickling should
- # work
- sabotage = None
- for chain in bases:
- for base in chain.__mro__:
- if base is object:
- continue
- elif any(m in base.__dict__ for m in methods):
- # found one, we're good
- sabotage = False
- break
- elif '__new__' in base.__dict__:
- # not good
- sabotage = True
- break
- if sabotage is not None:
- break
- if sabotage:
- _make_class_unpicklable(enum_class)
+ if '__new__' in classdict:
+ # too late, sabotage
+ _make_class_unpicklable(enum_class)
+ else:
+ # final attempt to verify that pickling would work:
+ # travel mro until __new__ is found, checking for
+ # __reduce__ and friends along the way -- if any of them
+ # are found before/when __new__ is found, pickling should
+ # work
+ sabotage = None
+ for chain in bases:
+ for base in chain.__mro__:
+ if base is object:
+ continue
+ elif any(m in base.__dict__ for m in methods):
+ # found one, we're good
+ sabotage = False
+ break
+ elif '__new__' in base.__dict__:
+ # not good
+ sabotage = True
+ break
+ if sabotage is not None:
+ break
+ if sabotage:
+ _make_class_unpicklable(enum_class)
# instantiate them, checking for duplicates as we go
# we instantiate first instead of checking for duplicates first in case
# a custom __new__ is doing something funky with the values -- such as
@@ -320,11 +320,11 @@ class EnumMeta(type):
# double check that repr and friends are not the mixin's or various
# things break (such as pickle)
- # however, if the method is defined in the Enum itself, don't replace
- # it
+ # however, if the method is defined in the Enum itself, don't replace
+ # it
for name in ('__repr__', '__str__', '__format__', '__reduce_ex__'):
- if name in classdict:
- continue
+ if name in classdict:
+ continue
class_method = getattr(enum_class, name)
obj_method = getattr(member_type, name, None)
enum_method = getattr(first_enum, name, None)
@@ -356,8 +356,8 @@ class EnumMeta(type):
return True
def __call__(cls, value, names=None, *, module=None, qualname=None, type=None, start=1):
- """
- Either returns an existing member, or creates a new enum class.
+ """
+ Either returns an existing member, or creates a new enum class.
This method is used both when an enum class is given a value to match
to an enumeration member (i.e. Color(3)) and for the functional API
@@ -383,38 +383,38 @@ class EnumMeta(type):
if names is None: # simple value lookup
return cls.__new__(cls, value)
# otherwise, functional API: we're creating a new Enum type
- return cls._create_(
- value,
- names,
- module=module,
- qualname=qualname,
- type=type,
- start=start,
- )
+ return cls._create_(
+ value,
+ names,
+ module=module,
+ qualname=qualname,
+ type=type,
+ start=start,
+ )
def __contains__(cls, member):
if not isinstance(member, Enum):
- raise TypeError(
- "unsupported operand type(s) for 'in': '%s' and '%s'" % (
- type(member).__qualname__, cls.__class__.__qualname__))
+ raise TypeError(
+ "unsupported operand type(s) for 'in': '%s' and '%s'" % (
+ type(member).__qualname__, cls.__class__.__qualname__))
return isinstance(member, cls) and member._name_ in cls._member_map_
def __delattr__(cls, attr):
# nicer error message when someone tries to delete an attribute
# (see issue19025).
if attr in cls._member_map_:
- raise AttributeError("%s: cannot delete Enum member." % cls.__name__)
+ raise AttributeError("%s: cannot delete Enum member." % cls.__name__)
super().__delattr__(attr)
def __dir__(self):
- return (
- ['__class__', '__doc__', '__members__', '__module__']
- + self._member_names_
- )
+ return (
+ ['__class__', '__doc__', '__members__', '__module__']
+ + self._member_names_
+ )
def __getattr__(cls, name):
- """
- Return the enum member matching `name`
+ """
+ Return the enum member matching `name`
We use __getattr__ instead of descriptors or inserting into the enum
class' __dict__ in order to support `name` and `value` being both
@@ -432,9 +432,9 @@ class EnumMeta(type):
return cls._member_map_[name]
def __iter__(cls):
- """
- Returns members in definition order.
- """
+ """
+ Returns members in definition order.
+ """
return (cls._member_map_[name] for name in cls._member_names_)
def __len__(cls):
@@ -442,8 +442,8 @@ class EnumMeta(type):
@property
def __members__(cls):
- """
- Returns a mapping of member name->value.
+ """
+ Returns a mapping of member name->value.
This mapping lists all enum members, including aliases. Note that this
is a read-only view of the internal mapping.
@@ -454,14 +454,14 @@ class EnumMeta(type):
return "<enum %r>" % cls.__name__
def __reversed__(cls):
- """
- Returns members in reverse definition order.
- """
+ """
+ Returns members in reverse definition order.
+ """
return (cls._member_map_[name] for name in reversed(cls._member_names_))
def __setattr__(cls, name, value):
- """
- Block attempts to reassign Enum members.
+ """
+ Block attempts to reassign Enum members.
A simple assignment to the class namespace only changes one of the
several possible ways to get an Enum member from the Enum class,
@@ -473,8 +473,8 @@ class EnumMeta(type):
super().__setattr__(name, value)
def _create_(cls, class_name, names, *, module=None, qualname=None, type=None, start=1):
- """
- Convenience method to create a new Enum class.
+ """
+ Convenience method to create a new Enum class.
`names` can be:
@@ -486,7 +486,7 @@ class EnumMeta(type):
"""
metacls = cls.__class__
bases = (cls, ) if type is None else (type, cls)
- _, first_enum = cls._get_mixins_(cls, bases)
+ _, first_enum = cls._get_mixins_(cls, bases)
classdict = metacls.__prepare__(class_name, bases)
# special processing needed for names?
@@ -514,7 +514,7 @@ class EnumMeta(type):
if module is None:
try:
module = sys._getframe(2).f_globals['__name__']
- except (AttributeError, ValueError, KeyError):
+ except (AttributeError, ValueError, KeyError):
pass
if module is None:
_make_class_unpicklable(enum_class)
@@ -525,53 +525,53 @@ class EnumMeta(type):
return enum_class
- def _convert_(cls, name, module, filter, source=None):
- """
- Create a new Enum subclass that replaces a collection of global constants
- """
- # convert all constants from source (or module) that pass filter() to
- # a new Enum called name, and export the enum and its members back to
- # module;
- # also, replace the __reduce_ex__ method so unpickling works in
- # previous Python versions
- module_globals = vars(sys.modules[module])
- if source:
- source = vars(source)
- else:
- source = module_globals
- # _value2member_map_ is populated in the same order every time
- # for a consistent reverse mapping of number to name when there
- # are multiple names for the same number.
- members = [
- (name, value)
- for name, value in source.items()
- if filter(name)]
- try:
- # sort by value
- members.sort(key=lambda t: (t[1], t[0]))
- except TypeError:
- # unless some values aren't comparable, in which case sort by name
- members.sort(key=lambda t: t[0])
- cls = cls(name, members, module=module)
- cls.__reduce_ex__ = _reduce_ex_by_name
- module_globals.update(cls.__members__)
- module_globals[name] = cls
- return cls
-
- @staticmethod
- def _check_for_existing_members(class_name, bases):
- for chain in bases:
- for base in chain.__mro__:
- if issubclass(base, Enum) and base._member_names_:
- raise TypeError(
- "%s: cannot extend enumeration %r"
- % (class_name, base.__name__)
- )
-
+ def _convert_(cls, name, module, filter, source=None):
+ """
+ Create a new Enum subclass that replaces a collection of global constants
+ """
+ # convert all constants from source (or module) that pass filter() to
+ # a new Enum called name, and export the enum and its members back to
+ # module;
+ # also, replace the __reduce_ex__ method so unpickling works in
+ # previous Python versions
+ module_globals = vars(sys.modules[module])
+ if source:
+ source = vars(source)
+ else:
+ source = module_globals
+ # _value2member_map_ is populated in the same order every time
+ # for a consistent reverse mapping of number to name when there
+ # are multiple names for the same number.
+ members = [
+ (name, value)
+ for name, value in source.items()
+ if filter(name)]
+ try:
+ # sort by value
+ members.sort(key=lambda t: (t[1], t[0]))
+ except TypeError:
+ # unless some values aren't comparable, in which case sort by name
+ members.sort(key=lambda t: t[0])
+ cls = cls(name, members, module=module)
+ cls.__reduce_ex__ = _reduce_ex_by_name
+ module_globals.update(cls.__members__)
+ module_globals[name] = cls
+ return cls
+
@staticmethod
- def _get_mixins_(class_name, bases):
- """
- Returns the type for creating enum members, and the first inherited
+ def _check_for_existing_members(class_name, bases):
+ for chain in bases:
+ for base in chain.__mro__:
+ if issubclass(base, Enum) and base._member_names_:
+ raise TypeError(
+ "%s: cannot extend enumeration %r"
+ % (class_name, base.__name__)
+ )
+
+ @staticmethod
+ def _get_mixins_(class_name, bases):
+ """
+ Returns the type for creating enum members, and the first inherited
enum class.
bases: the tuple of bases that was given to __new__
@@ -580,29 +580,29 @@ class EnumMeta(type):
return object, Enum
def _find_data_type(bases):
- data_types = set()
+ data_types = set()
for chain in bases:
- candidate = None
+ candidate = None
for base in chain.__mro__:
if base is object:
continue
- elif issubclass(base, Enum):
- if base._member_type_ is not object:
- data_types.add(base._member_type_)
- break
+ elif issubclass(base, Enum):
+ if base._member_type_ is not object:
+ data_types.add(base._member_type_)
+ break
elif '__new__' in base.__dict__:
if issubclass(base, Enum):
continue
- data_types.add(candidate or base)
- break
- else:
- candidate = candidate or base
- if len(data_types) > 1:
- raise TypeError('%r: too many data types: %r' % (class_name, data_types))
- elif data_types:
- return data_types.pop()
- else:
- return None
+ data_types.add(candidate or base)
+ break
+ else:
+ candidate = candidate or base
+ if len(data_types) > 1:
+ raise TypeError('%r: too many data types: %r' % (class_name, data_types))
+ elif data_types:
+ return data_types.pop()
+ else:
+ return None
# ensure final parent class is an Enum derivative, find any concrete
# data type, and check that Enum has no members
@@ -617,8 +617,8 @@ class EnumMeta(type):
@staticmethod
def _find_new_(classdict, member_type, first_enum):
- """
- Returns the __new__ to be used for creating the enum members.
+ """
+ Returns the __new__ to be used for creating the enum members.
classdict: the class dictionary given to __new__
member_type: the data type whose __new__ will be used by default
@@ -662,8 +662,8 @@ class EnumMeta(type):
class Enum(metaclass=EnumMeta):
- """
- Generic enumeration.
+ """
+ Generic enumeration.
Derive from this class to define new enumerations.
"""
@@ -693,34 +693,34 @@ class Enum(metaclass=EnumMeta):
except Exception as e:
exc = e
result = None
- try:
- if isinstance(result, cls):
- return result
- else:
- ve_exc = ValueError("%r is not a valid %s" % (value, cls.__qualname__))
- if result is None and exc is None:
- raise ve_exc
- elif exc is None:
- exc = TypeError(
- 'error in %s._missing_: returned %r instead of None or a valid member'
- % (cls.__name__, result)
- )
- exc.__context__ = ve_exc
- raise exc
- finally:
- # ensure all variables that could hold an exception are destroyed
- exc = None
- ve_exc = None
+ try:
+ if isinstance(result, cls):
+ return result
+ else:
+ ve_exc = ValueError("%r is not a valid %s" % (value, cls.__qualname__))
+ if result is None and exc is None:
+ raise ve_exc
+ elif exc is None:
+ exc = TypeError(
+ 'error in %s._missing_: returned %r instead of None or a valid member'
+ % (cls.__name__, result)
+ )
+ exc.__context__ = ve_exc
+ raise exc
+ finally:
+ # ensure all variables that could hold an exception are destroyed
+ exc = None
+ ve_exc = None
def _generate_next_value_(name, start, count, last_values):
- """
- Generate the next value when not given.
-
- name: the name of the member
- start: the initial start value or None
- count: the number of existing members
- last_value: the last value assigned or None
- """
+ """
+ Generate the next value when not given.
+
+ name: the name of the member
+ start: the initial start value or None
+ count: the number of existing members
+ last_value: the last value assigned or None
+ """
for last_value in reversed(last_values):
try:
return last_value + 1
@@ -731,7 +731,7 @@ class Enum(metaclass=EnumMeta):
@classmethod
def _missing_(cls, value):
- return None
+ return None
def __repr__(self):
return "<%s.%s: %r>" % (
@@ -741,28 +741,28 @@ class Enum(metaclass=EnumMeta):
return "%s.%s" % (self.__class__.__name__, self._name_)
def __dir__(self):
- """
- Returns all members and all public methods
- """
+ """
+ Returns all members and all public methods
+ """
added_behavior = [
m
for cls in self.__class__.mro()
for m in cls.__dict__
if m[0] != '_' and m not in self._member_map_
- ] + [m for m in self.__dict__ if m[0] != '_']
+ ] + [m for m in self.__dict__ if m[0] != '_']
return (['__class__', '__doc__', '__module__'] + added_behavior)
def __format__(self, format_spec):
- """
- Returns format using actual value type unless __str__ has been overridden.
- """
+ """
+ Returns format using actual value type unless __str__ has been overridden.
+ """
# mixed-in Enums should use the mixed-in type's __format__, otherwise
# we can get strange results with the Enum name showing up instead of
# the value
- # pure Enum branch, or branch with __str__ explicitly overridden
- str_overridden = type(self).__str__ not in (Enum.__str__, Flag.__str__)
- if self._member_type_ is object or str_overridden:
+ # pure Enum branch, or branch with __str__ explicitly overridden
+ str_overridden = type(self).__str__ not in (Enum.__str__, Flag.__str__)
+ if self._member_type_ is object or str_overridden:
cls = str
val = str(self)
# mix-in branch
@@ -803,16 +803,16 @@ def _reduce_ex_by_name(self, proto):
return self.name
class Flag(Enum):
- """
- Support for flags
- """
+ """
+ Support for flags
+ """
def _generate_next_value_(name, start, count, last_values):
"""
Generate the next value when not given.
name: the name of the member
- start: the initial start value or None
+ start: the initial start value or None
count: the number of existing members
last_value: the last value assigned or None
"""
@@ -828,9 +828,9 @@ class Flag(Enum):
@classmethod
def _missing_(cls, value):
- """
- Returns member (possibly creating it) if one can be found for value.
- """
+ """
+ Returns member (possibly creating it) if one can be found for value.
+ """
original_value = value
if value < 0:
value = ~value
@@ -849,7 +849,7 @@ class Flag(Enum):
# verify all bits are accounted for
_, extra_flags = _decompose(cls, value)
if extra_flags:
- raise ValueError("%r is not a valid %s" % (value, cls.__qualname__))
+ raise ValueError("%r is not a valid %s" % (value, cls.__qualname__))
# construct a singleton enum pseudo-member
pseudo_member = object.__new__(cls)
pseudo_member._name_ = None
@@ -860,13 +860,13 @@ class Flag(Enum):
return pseudo_member
def __contains__(self, other):
- """
- Returns True if self has at least the same flags set as other.
- """
+ """
+ Returns True if self has at least the same flags set as other.
+ """
if not isinstance(other, self.__class__):
- raise TypeError(
- "unsupported operand type(s) for 'in': '%s' and '%s'" % (
- type(other).__qualname__, self.__class__.__qualname__))
+ raise TypeError(
+ "unsupported operand type(s) for 'in': '%s' and '%s'" % (
+ type(other).__qualname__, self.__class__.__qualname__))
return other._value_ & self._value_ == other._value_
def __repr__(self):
@@ -921,25 +921,25 @@ class Flag(Enum):
class IntFlag(int, Flag):
- """
- Support for integer-based Flags
- """
+ """
+ Support for integer-based Flags
+ """
@classmethod
def _missing_(cls, value):
- """
- Returns member (possibly creating it) if one can be found for value.
- """
+ """
+ Returns member (possibly creating it) if one can be found for value.
+ """
if not isinstance(value, int):
- raise ValueError("%r is not a valid %s" % (value, cls.__qualname__))
+ raise ValueError("%r is not a valid %s" % (value, cls.__qualname__))
new_member = cls._create_pseudo_member_(value)
return new_member
@classmethod
def _create_pseudo_member_(cls, value):
- """
- Create a composite member iff value contains only members.
- """
+ """
+ Create a composite member iff value contains only members.
+ """
pseudo_member = cls._value2member_map_.get(value, None)
if pseudo_member is None:
need_to_create = [value]
@@ -994,15 +994,15 @@ class IntFlag(int, Flag):
def _high_bit(value):
- """
- returns index of highest bit, or -1 if value is zero or negative
- """
+ """
+ returns index of highest bit, or -1 if value is zero or negative
+ """
return value.bit_length() - 1
def unique(enumeration):
- """
- Class decorator for enumerations ensuring unique member values.
- """
+ """
+ Class decorator for enumerations ensuring unique member values.
+ """
duplicates = []
for name, member in enumeration.__members__.items():
if name != member.name:
@@ -1015,26 +1015,26 @@ def unique(enumeration):
return enumeration
def _decompose(flag, value):
- """
- Extract all members from the value.
- """
+ """
+ Extract all members from the value.
+ """
# _decompose is only called if the value is not named
not_covered = value
negative = value < 0
members = []
- for member in flag:
- member_value = member.value
+ for member in flag:
+ member_value = member.value
if member_value and member_value & value == member_value:
members.append(member)
not_covered &= ~member_value
- if not negative:
- tmp = not_covered
- while tmp:
- flag_value = 2 ** _high_bit(tmp)
- if flag_value in flag._value2member_map_:
- members.append(flag._value2member_map_[flag_value])
- not_covered &= ~flag_value
- tmp &= ~flag_value
+ if not negative:
+ tmp = not_covered
+ while tmp:
+ flag_value = 2 ** _high_bit(tmp)
+ if flag_value in flag._value2member_map_:
+ members.append(flag._value2member_map_[flag_value])
+ not_covered &= ~flag_value
+ tmp &= ~flag_value
if not members and value in flag._value2member_map_:
members.append(flag._value2member_map_[value])
members.sort(key=lambda m: m._value_, reverse=True)
diff --git a/contrib/tools/python3/src/Lib/filecmp.py b/contrib/tools/python3/src/Lib/filecmp.py
index 1893f909de..ee286da966 100644
--- a/contrib/tools/python3/src/Lib/filecmp.py
+++ b/contrib/tools/python3/src/Lib/filecmp.py
@@ -13,7 +13,7 @@ Functions:
import os
import stat
from itertools import filterfalse
-from types import GenericAlias
+from types import GenericAlias
__all__ = ['clear_cache', 'cmp', 'dircmp', 'cmpfiles', 'DEFAULT_IGNORES']
@@ -36,9 +36,9 @@ def cmp(f1, f2, shallow=True):
f2 -- Second file name
- shallow -- treat files as identical if their stat signatures (type, size,
- mtime) are identical. Otherwise, files are considered different
- if their sizes or contents differ. [default: True]
+ shallow -- treat files as identical if their stat signatures (type, size,
+ mtime) are identical. Otherwise, files are considered different
+ if their sizes or contents differ. [default: True]
Return value:
@@ -158,12 +158,12 @@ class dircmp:
ok = 1
try:
a_stat = os.stat(a_path)
- except OSError:
+ except OSError:
# print('Can\'t stat', a_path, ':', why.args[1])
ok = 0
try:
b_stat = os.stat(b_path)
- except OSError:
+ except OSError:
# print('Can\'t stat', b_path, ':', why.args[1])
ok = 0
@@ -249,9 +249,9 @@ class dircmp:
self.methodmap[attr](self)
return getattr(self, attr)
- __class_getitem__ = classmethod(GenericAlias)
-
-
+ __class_getitem__ = classmethod(GenericAlias)
+
+
def cmpfiles(a, b, common, shallow=True):
"""Compare common files in two directories.
diff --git a/contrib/tools/python3/src/Lib/fileinput.py b/contrib/tools/python3/src/Lib/fileinput.py
index 0c31f93ed8..3b40153cc2 100644
--- a/contrib/tools/python3/src/Lib/fileinput.py
+++ b/contrib/tools/python3/src/Lib/fileinput.py
@@ -8,9 +8,9 @@ Typical use is:
This iterates over the lines of all files listed in sys.argv[1:],
defaulting to sys.stdin if the list is empty. If a filename is '-' it
-is also replaced by sys.stdin and the optional arguments mode and
-openhook are ignored. To specify an alternative list of filenames,
-pass it as the argument to input(). A single file name is also allowed.
+is also replaced by sys.stdin and the optional arguments mode and
+openhook are ignored. To specify an alternative list of filenames,
+pass it as the argument to input(). A single file name is also allowed.
Functions filename(), lineno() return the filename and cumulative line
number of the line that has just been read; filelineno() returns its
@@ -73,7 +73,7 @@ XXX Possible additions:
"""
import sys, os
-from types import GenericAlias
+from types import GenericAlias
__all__ = ["input", "close", "nextfile", "filename", "lineno", "filelineno",
"fileno", "isfirstline", "isstdin", "FileInput", "hook_compressed",
@@ -81,7 +81,7 @@ __all__ = ["input", "close", "nextfile", "filename", "lineno", "filelineno",
_state = None
-def input(files=None, inplace=False, backup="", *, mode="r", openhook=None):
+def input(files=None, inplace=False, backup="", *, mode="r", openhook=None):
"""Return an instance of the FileInput class, which can be iterated.
The parameters are passed to the constructor of the FileInput class.
@@ -91,7 +91,7 @@ def input(files=None, inplace=False, backup="", *, mode="r", openhook=None):
global _state
if _state and _state._file:
raise RuntimeError("input() already active")
- _state = FileInput(files, inplace, backup, mode=mode, openhook=openhook)
+ _state = FileInput(files, inplace, backup, mode=mode, openhook=openhook)
return _state
def close():
@@ -173,7 +173,7 @@ def isstdin():
return _state.isstdin()
class FileInput:
- """FileInput([files[, inplace[, backup]]], *, mode=None, openhook=None)
+ """FileInput([files[, inplace[, backup]]], *, mode=None, openhook=None)
Class FileInput is the implementation of the module; its methods
filename(), lineno(), fileline(), isfirstline(), isstdin(), fileno(),
@@ -185,7 +185,7 @@ class FileInput:
sequential order; random access and readline() cannot be mixed.
"""
- def __init__(self, files=None, inplace=False, backup="", *,
+ def __init__(self, files=None, inplace=False, backup="", *,
mode="r", openhook=None):
if isinstance(files, str):
files = (files,)
@@ -218,7 +218,7 @@ class FileInput:
warnings.warn("'U' mode is deprecated",
DeprecationWarning, 2)
self._mode = mode
- self._write_mode = mode.replace('r', 'w') if 'U' not in mode else 'w'
+ self._write_mode = mode.replace('r', 'w') if 'U' not in mode else 'w'
if openhook:
if inplace:
raise ValueError("FileInput cannot use an opening hook in inplace mode")
@@ -256,13 +256,13 @@ class FileInput:
# repeat with next file
def __getitem__(self, i):
- import warnings
- warnings.warn(
- "Support for indexing FileInput objects is deprecated. "
- "Use iterator protocol instead.",
- DeprecationWarning,
- stacklevel=2
- )
+ import warnings
+ warnings.warn(
+ "Support for indexing FileInput objects is deprecated. "
+ "Use iterator protocol instead.",
+ DeprecationWarning,
+ stacklevel=2
+ )
if i != self.lineno():
raise RuntimeError("accessing lines out of order")
try:
@@ -345,16 +345,16 @@ class FileInput:
try:
perm = os.fstat(self._file.fileno()).st_mode
except OSError:
- self._output = open(self._filename, self._write_mode)
+ self._output = open(self._filename, self._write_mode)
else:
mode = os.O_CREAT | os.O_WRONLY | os.O_TRUNC
if hasattr(os, 'O_BINARY'):
mode |= os.O_BINARY
fd = os.open(self._filename, mode, perm)
- self._output = os.fdopen(fd, self._write_mode)
+ self._output = os.fdopen(fd, self._write_mode)
try:
- os.chmod(self._filename, perm)
+ os.chmod(self._filename, perm)
except OSError:
pass
self._savestdout = sys.stdout
@@ -392,9 +392,9 @@ class FileInput:
def isstdin(self):
return self._isstdin
- __class_getitem__ = classmethod(GenericAlias)
-
+ __class_getitem__ = classmethod(GenericAlias)
+
def hook_compressed(filename, mode):
ext = os.path.splitext(filename)[1]
if ext == '.gz':
diff --git a/contrib/tools/python3/src/Lib/fnmatch.py b/contrib/tools/python3/src/Lib/fnmatch.py
index 7c52c23067..ec473b9532 100644
--- a/contrib/tools/python3/src/Lib/fnmatch.py
+++ b/contrib/tools/python3/src/Lib/fnmatch.py
@@ -16,12 +16,12 @@ import functools
__all__ = ["filter", "fnmatch", "fnmatchcase", "translate"]
-# Build a thread-safe incrementing counter to help create unique regexp group
-# names across calls.
-from itertools import count
-_nextgroupnum = count().__next__
-del count
-
+# Build a thread-safe incrementing counter to help create unique regexp group
+# names across calls.
+from itertools import count
+_nextgroupnum = count().__next__
+del count
+
def fnmatch(name, pat):
"""Test whether FILENAME matches PATTERN.
@@ -52,7 +52,7 @@ def _compile_pattern(pat):
return re.compile(res).match
def filter(names, pat):
- """Construct a list from those elements of the iterable NAMES that match PAT."""
+ """Construct a list from those elements of the iterable NAMES that match PAT."""
result = []
pat = os.path.normcase(pat)
match = _compile_pattern(pat)
@@ -83,19 +83,19 @@ def translate(pat):
There is no way to quote meta-characters.
"""
- STAR = object()
- res = []
- add = res.append
+ STAR = object()
+ res = []
+ add = res.append
i, n = 0, len(pat)
while i < n:
c = pat[i]
i = i+1
if c == '*':
- # compress consecutive `*` into one
- if (not res) or res[-1] is not STAR:
- add(STAR)
+ # compress consecutive `*` into one
+ if (not res) or res[-1] is not STAR:
+ add(STAR)
elif c == '?':
- add('.')
+ add('.')
elif c == '[':
j = i
if j < n and pat[j] == '!':
@@ -105,7 +105,7 @@ def translate(pat):
while j < n and pat[j] != ']':
j = j+1
if j >= n:
- add('\\[')
+ add('\\[')
else:
stuff = pat[i:j]
if '--' not in stuff:
@@ -132,52 +132,52 @@ def translate(pat):
stuff = '^' + stuff[1:]
elif stuff[0] in ('^', '['):
stuff = '\\' + stuff
- add(f'[{stuff}]')
- else:
- add(re.escape(c))
- assert i == n
-
- # Deal with STARs.
- inp = res
- res = []
- add = res.append
- i, n = 0, len(inp)
- # Fixed pieces at the start?
- while i < n and inp[i] is not STAR:
- add(inp[i])
- i += 1
- # Now deal with STAR fixed STAR fixed ...
- # For an interior `STAR fixed` pairing, we want to do a minimal
- # .*? match followed by `fixed`, with no possibility of backtracking.
- # We can't spell that directly, but can trick it into working by matching
- # .*?fixed
- # in a lookahead assertion, save the matched part in a group, then
- # consume that group via a backreference. If the overall match fails,
- # the lookahead assertion won't try alternatives. So the translation is:
- # (?=(?P<name>.*?fixed))(?P=name)
- # Group names are created as needed: g0, g1, g2, ...
- # The numbers are obtained from _nextgroupnum() to ensure they're unique
- # across calls and across threads. This is because people rely on the
- # undocumented ability to join multiple translate() results together via
- # "|" to build large regexps matching "one of many" shell patterns.
- while i < n:
- assert inp[i] is STAR
- i += 1
- if i == n:
- add(".*")
- break
- assert inp[i] is not STAR
- fixed = []
- while i < n and inp[i] is not STAR:
- fixed.append(inp[i])
- i += 1
- fixed = "".join(fixed)
- if i == n:
- add(".*")
- add(fixed)
+ add(f'[{stuff}]')
else:
- groupnum = _nextgroupnum()
- add(f"(?=(?P<g{groupnum}>.*?{fixed}))(?P=g{groupnum})")
- assert i == n
- res = "".join(res)
- return fr'(?s:{res})\Z'
+ add(re.escape(c))
+ assert i == n
+
+ # Deal with STARs.
+ inp = res
+ res = []
+ add = res.append
+ i, n = 0, len(inp)
+ # Fixed pieces at the start?
+ while i < n and inp[i] is not STAR:
+ add(inp[i])
+ i += 1
+ # Now deal with STAR fixed STAR fixed ...
+ # For an interior `STAR fixed` pairing, we want to do a minimal
+ # .*? match followed by `fixed`, with no possibility of backtracking.
+ # We can't spell that directly, but can trick it into working by matching
+ # .*?fixed
+ # in a lookahead assertion, save the matched part in a group, then
+ # consume that group via a backreference. If the overall match fails,
+ # the lookahead assertion won't try alternatives. So the translation is:
+ # (?=(?P<name>.*?fixed))(?P=name)
+ # Group names are created as needed: g0, g1, g2, ...
+ # The numbers are obtained from _nextgroupnum() to ensure they're unique
+ # across calls and across threads. This is because people rely on the
+ # undocumented ability to join multiple translate() results together via
+ # "|" to build large regexps matching "one of many" shell patterns.
+ while i < n:
+ assert inp[i] is STAR
+ i += 1
+ if i == n:
+ add(".*")
+ break
+ assert inp[i] is not STAR
+ fixed = []
+ while i < n and inp[i] is not STAR:
+ fixed.append(inp[i])
+ i += 1
+ fixed = "".join(fixed)
+ if i == n:
+ add(".*")
+ add(fixed)
+ else:
+ groupnum = _nextgroupnum()
+ add(f"(?=(?P<g{groupnum}>.*?{fixed}))(?P=g{groupnum})")
+ assert i == n
+ res = "".join(res)
+ return fr'(?s:{res})\Z'
diff --git a/contrib/tools/python3/src/Lib/fractions.py b/contrib/tools/python3/src/Lib/fractions.py
index de3e23b759..b5df2760d5 100644
--- a/contrib/tools/python3/src/Lib/fractions.py
+++ b/contrib/tools/python3/src/Lib/fractions.py
@@ -10,7 +10,7 @@ import operator
import re
import sys
-__all__ = ['Fraction']
+__all__ = ['Fraction']
# Constants related to the hash implementation; hash(x) is based
@@ -155,9 +155,9 @@ class Fraction(numbers.Rational):
if denominator == 0:
raise ZeroDivisionError('Fraction(%s, 0)' % numerator)
if _normalize:
- g = math.gcd(numerator, denominator)
- if denominator < 0:
- g = -g
+ g = math.gcd(numerator, denominator)
+ if denominator < 0:
+ g = -g
numerator //= g
denominator //= g
self._numerator = numerator
@@ -190,14 +190,14 @@ class Fraction(numbers.Rational):
(cls.__name__, dec, type(dec).__name__))
return cls(*dec.as_integer_ratio())
- def as_integer_ratio(self):
- """Return the integer ratio as a tuple.
-
- Return a tuple of two integers, whose ratio is equal to the
- Fraction and with a positive denominator.
- """
- return (self._numerator, self._denominator)
-
+ def as_integer_ratio(self):
+ """Return the integer ratio as a tuple.
+
+ Return a tuple of two integers, whose ratio is equal to the
+ Fraction and with a positive denominator.
+ """
+ return (self._numerator, self._denominator)
+
def limit_denominator(self, max_denominator=1000000):
"""Closest Fraction to self with denominator at most max_denominator.
@@ -409,27 +409,27 @@ class Fraction(numbers.Rational):
__truediv__, __rtruediv__ = _operator_fallbacks(_div, operator.truediv)
- def _floordiv(a, b):
+ def _floordiv(a, b):
"""a // b"""
- return (a.numerator * b.denominator) // (a.denominator * b.numerator)
+ return (a.numerator * b.denominator) // (a.denominator * b.numerator)
- __floordiv__, __rfloordiv__ = _operator_fallbacks(_floordiv, operator.floordiv)
+ __floordiv__, __rfloordiv__ = _operator_fallbacks(_floordiv, operator.floordiv)
- def _divmod(a, b):
- """(a // b, a % b)"""
- da, db = a.denominator, b.denominator
- div, n_mod = divmod(a.numerator * db, da * b.numerator)
- return div, Fraction(n_mod, da * db)
+ def _divmod(a, b):
+ """(a // b, a % b)"""
+ da, db = a.denominator, b.denominator
+ div, n_mod = divmod(a.numerator * db, da * b.numerator)
+ return div, Fraction(n_mod, da * db)
- __divmod__, __rdivmod__ = _operator_fallbacks(_divmod, divmod)
-
- def _mod(a, b):
+ __divmod__, __rdivmod__ = _operator_fallbacks(_divmod, divmod)
+
+ def _mod(a, b):
"""a % b"""
- da, db = a.denominator, b.denominator
- return Fraction((a.numerator * db) % (b.numerator * da), da * db)
-
- __mod__, __rmod__ = _operator_fallbacks(_mod, operator.mod)
+ da, db = a.denominator, b.denominator
+ return Fraction((a.numerator * db) % (b.numerator * da), da * db)
+ __mod__, __rmod__ = _operator_fallbacks(_mod, operator.mod)
+
def __pow__(a, b):
"""a ** b
@@ -494,16 +494,16 @@ class Fraction(numbers.Rational):
return a._numerator // a._denominator
def __floor__(a):
- """math.floor(a)"""
+ """math.floor(a)"""
return a.numerator // a.denominator
def __ceil__(a):
- """math.ceil(a)"""
+ """math.ceil(a)"""
# The negations cleverly convince floordiv to return the ceiling.
return -(-a.numerator // a.denominator)
def __round__(self, ndigits=None):
- """round(self, ndigits)
+ """round(self, ndigits)
Rounds half toward even.
"""
@@ -530,34 +530,34 @@ class Fraction(numbers.Rational):
def __hash__(self):
"""hash(self)"""
- # To make sure that the hash of a Fraction agrees with the hash
- # of a numerically equal integer, float or Decimal instance, we
- # follow the rules for numeric hashes outlined in the
- # documentation. (See library docs, 'Built-in Types').
+ # To make sure that the hash of a Fraction agrees with the hash
+ # of a numerically equal integer, float or Decimal instance, we
+ # follow the rules for numeric hashes outlined in the
+ # documentation. (See library docs, 'Built-in Types').
- try:
- dinv = pow(self._denominator, -1, _PyHASH_MODULUS)
- except ValueError:
- # ValueError means there is no modular inverse.
+ try:
+ dinv = pow(self._denominator, -1, _PyHASH_MODULUS)
+ except ValueError:
+ # ValueError means there is no modular inverse.
hash_ = _PyHASH_INF
else:
- # The general algorithm now specifies that the absolute value of
- # the hash is
- # (|N| * dinv) % P
- # where N is self._numerator and P is _PyHASH_MODULUS. That's
- # optimized here in two ways: first, for a non-negative int i,
- # hash(i) == i % P, but the int hash implementation doesn't need
- # to divide, and is faster than doing % P explicitly. So we do
- # hash(|N| * dinv)
- # instead. Second, N is unbounded, so its product with dinv may
- # be arbitrarily expensive to compute. The final answer is the
- # same if we use the bounded |N| % P instead, which can again
- # be done with an int hash() call. If 0 <= i < P, hash(i) == i,
- # so this nested hash() call wastes a bit of time making a
- # redundant copy when |N| < P, but can save an arbitrarily large
- # amount of computation for large |N|.
- hash_ = hash(hash(abs(self._numerator)) * dinv)
- result = hash_ if self._numerator >= 0 else -hash_
+ # The general algorithm now specifies that the absolute value of
+ # the hash is
+ # (|N| * dinv) % P
+ # where N is self._numerator and P is _PyHASH_MODULUS. That's
+ # optimized here in two ways: first, for a non-negative int i,
+ # hash(i) == i % P, but the int hash implementation doesn't need
+ # to divide, and is faster than doing % P explicitly. So we do
+ # hash(|N| * dinv)
+ # instead. Second, N is unbounded, so its product with dinv may
+ # be arbitrarily expensive to compute. The final answer is the
+ # same if we use the bounded |N| % P instead, which can again
+ # be done with an int hash() call. If 0 <= i < P, hash(i) == i,
+ # so this nested hash() call wastes a bit of time making a
+ # redundant copy when |N| < P, but can save an arbitrarily large
+ # amount of computation for large |N|.
+ hash_ = hash(hash(abs(self._numerator)) * dinv)
+ result = hash_ if self._numerator >= 0 else -hash_
return -2 if result == -1 else result
def __eq__(a, b):
@@ -621,9 +621,9 @@ class Fraction(numbers.Rational):
def __bool__(a):
"""a != 0"""
- # bpo-39274: Use bool() because (a._numerator != 0) can return an
- # object which is not a bool.
- return bool(a._numerator)
+ # bpo-39274: Use bool() because (a._numerator != 0) can return an
+ # object which is not a bool.
+ return bool(a._numerator)
# support for pickling, copy, and deepcopy
diff --git a/contrib/tools/python3/src/Lib/ftplib.py b/contrib/tools/python3/src/Lib/ftplib.py
index 7c5a50715f..d57bad9e91 100644
--- a/contrib/tools/python3/src/Lib/ftplib.py
+++ b/contrib/tools/python3/src/Lib/ftplib.py
@@ -75,14 +75,14 @@ class FTP:
'''An FTP client class.
To create a connection, call the class using these arguments:
- host, user, passwd, acct, timeout, source_address, encoding
+ host, user, passwd, acct, timeout, source_address, encoding
The first four arguments are all strings, and have default value ''.
- The parameter ´timeout´ must be numeric and defaults to None if not
- passed, meaning that no timeout will be set on any ftp socket(s).
+ The parameter ´timeout´ must be numeric and defaults to None if not
+ passed, meaning that no timeout will be set on any ftp socket(s).
If a timeout is passed, then this is now the default timeout for all ftp
socket operations for this instance.
- The last parameter is the encoding of filenames, which defaults to utf-8.
+ The last parameter is the encoding of filenames, which defaults to utf-8.
Then use self.connect() with optional host and port argument.
@@ -102,19 +102,19 @@ class FTP:
sock = None
file = None
welcome = None
- passiveserver = True
- # Disables https://bugs.python.org/issue43285 security if set to True.
- trust_server_pasv_ipv4_address = False
+ passiveserver = True
+ # Disables https://bugs.python.org/issue43285 security if set to True.
+ trust_server_pasv_ipv4_address = False
def __init__(self, host='', user='', passwd='', acct='',
- timeout=_GLOBAL_DEFAULT_TIMEOUT, source_address=None, *,
- encoding='utf-8'):
- """Initialization method (called by class instantiation).
- Initialize host to localhost, port to standard ftp port.
- Optional arguments are host (for connect()),
- and user, passwd, acct (for login()).
- """
- self.encoding = encoding
+ timeout=_GLOBAL_DEFAULT_TIMEOUT, source_address=None, *,
+ encoding='utf-8'):
+ """Initialization method (called by class instantiation).
+ Initialize host to localhost, port to standard ftp port.
+ Optional arguments are host (for connect()),
+ and user, passwd, acct (for login()).
+ """
+ self.encoding = encoding
self.source_address = source_address
self.timeout = timeout
if host:
@@ -150,11 +150,11 @@ class FTP:
self.port = port
if timeout != -999:
self.timeout = timeout
- if self.timeout is not None and not self.timeout:
- raise ValueError('Non-blocking socket (timeout=0) is not supported')
+ if self.timeout is not None and not self.timeout:
+ raise ValueError('Non-blocking socket (timeout=0) is not supported')
if source_address is not None:
self.source_address = source_address
- sys.audit("ftplib.connect", self, self.host, self.port)
+ sys.audit("ftplib.connect", self, self.host, self.port)
self.sock = socket.create_connection((self.host, self.port), self.timeout,
source_address=self.source_address)
self.af = self.sock.family
@@ -195,7 +195,7 @@ class FTP:
def putline(self, line):
if '\r' in line or '\n' in line:
raise ValueError('an illegal newline character should not be contained')
- sys.audit("ftplib.sendcmd", self, line)
+ sys.audit("ftplib.sendcmd", self, line)
line = line + CRLF
if self.debugging > 1:
print('*put*', self.sanitize(line))
@@ -310,7 +310,7 @@ class FTP:
def makeport(self):
'''Create a new socket and send a PORT command for it.'''
- sock = socket.create_server(("", 0), family=self.af, backlog=1)
+ sock = socket.create_server(("", 0), family=self.af, backlog=1)
port = sock.getsockname()[1] # Get proper port
host = self.sock.getsockname()[0] # Get proper host
if self.af == socket.AF_INET:
@@ -322,13 +322,13 @@ class FTP:
return sock
def makepasv(self):
- """Internal: Does the PASV or EPSV handshake -> (address, port)"""
+ """Internal: Does the PASV or EPSV handshake -> (address, port)"""
if self.af == socket.AF_INET:
- untrusted_host, port = parse227(self.sendcmd('PASV'))
- if self.trust_server_pasv_ipv4_address:
- host = untrusted_host
- else:
- host = self.sock.getpeername()[0]
+ untrusted_host, port = parse227(self.sendcmd('PASV'))
+ if self.trust_server_pasv_ipv4_address:
+ host = untrusted_host
+ else:
+ host = self.sock.getpeername()[0]
else:
host, port = parse229(self.sendcmd('EPSV'), self.sock.getpeername())
return host, port
@@ -715,10 +715,10 @@ else:
'''
ssl_version = ssl.PROTOCOL_TLS_CLIENT
- def __init__(self, host='', user='', passwd='', acct='',
- keyfile=None, certfile=None, context=None,
- timeout=_GLOBAL_DEFAULT_TIMEOUT, source_address=None, *,
- encoding='utf-8'):
+ def __init__(self, host='', user='', passwd='', acct='',
+ keyfile=None, certfile=None, context=None,
+ timeout=_GLOBAL_DEFAULT_TIMEOUT, source_address=None, *,
+ encoding='utf-8'):
if context is not None and keyfile is not None:
raise ValueError("context and keyfile arguments are mutually "
"exclusive")
@@ -737,13 +737,13 @@ else:
keyfile=keyfile)
self.context = context
self._prot_p = False
- super().__init__(host, user, passwd, acct,
- timeout, source_address, encoding=encoding)
+ super().__init__(host, user, passwd, acct,
+ timeout, source_address, encoding=encoding)
def login(self, user='', passwd='', acct='', secure=True):
if secure and not isinstance(self.sock, ssl.SSLSocket):
self.auth()
- return super().login(user, passwd, acct)
+ return super().login(user, passwd, acct)
def auth(self):
'''Set up secure control connection by using TLS/SSL.'''
@@ -753,7 +753,7 @@ else:
resp = self.voidcmd('AUTH TLS')
else:
resp = self.voidcmd('AUTH SSL')
- self.sock = self.context.wrap_socket(self.sock, server_hostname=self.host)
+ self.sock = self.context.wrap_socket(self.sock, server_hostname=self.host)
self.file = self.sock.makefile(mode='r', encoding=self.encoding)
return resp
@@ -790,7 +790,7 @@ else:
# --- Overridden FTP methods
def ntransfercmd(self, cmd, rest=None):
- conn, size = super().ntransfercmd(cmd, rest)
+ conn, size = super().ntransfercmd(cmd, rest)
if self._prot_p:
conn = self.context.wrap_socket(conn,
server_hostname=self.host)
diff --git a/contrib/tools/python3/src/Lib/functools.py b/contrib/tools/python3/src/Lib/functools.py
index 77e5035ebc..008555aeea 100644
--- a/contrib/tools/python3/src/Lib/functools.py
+++ b/contrib/tools/python3/src/Lib/functools.py
@@ -10,16 +10,16 @@
# See C source code for _functools credits/copyright
__all__ = ['update_wrapper', 'wraps', 'WRAPPER_ASSIGNMENTS', 'WRAPPER_UPDATES',
- 'total_ordering', 'cache', 'cmp_to_key', 'lru_cache', 'reduce',
- 'partial', 'partialmethod', 'singledispatch', 'singledispatchmethod',
- 'cached_property']
+ 'total_ordering', 'cache', 'cmp_to_key', 'lru_cache', 'reduce',
+ 'partial', 'partialmethod', 'singledispatch', 'singledispatchmethod',
+ 'cached_property']
from abc import get_cache_token
from collections import namedtuple
# import types, weakref # Deferred to single_dispatch()
from reprlib import recursive_repr
from _thread import RLock
-from types import GenericAlias
+from types import GenericAlias
################################################################################
@@ -88,84 +88,84 @@ def wraps(wrapped,
def _gt_from_lt(self, other, NotImplemented=NotImplemented):
'Return a > b. Computed by @total_ordering from (not a < b) and (a != b).'
- op_result = type(self).__lt__(self, other)
+ op_result = type(self).__lt__(self, other)
if op_result is NotImplemented:
return op_result
return not op_result and self != other
def _le_from_lt(self, other, NotImplemented=NotImplemented):
'Return a <= b. Computed by @total_ordering from (a < b) or (a == b).'
- op_result = type(self).__lt__(self, other)
- if op_result is NotImplemented:
- return op_result
+ op_result = type(self).__lt__(self, other)
+ if op_result is NotImplemented:
+ return op_result
return op_result or self == other
def _ge_from_lt(self, other, NotImplemented=NotImplemented):
'Return a >= b. Computed by @total_ordering from (not a < b).'
- op_result = type(self).__lt__(self, other)
+ op_result = type(self).__lt__(self, other)
if op_result is NotImplemented:
return op_result
return not op_result
def _ge_from_le(self, other, NotImplemented=NotImplemented):
'Return a >= b. Computed by @total_ordering from (not a <= b) or (a == b).'
- op_result = type(self).__le__(self, other)
+ op_result = type(self).__le__(self, other)
if op_result is NotImplemented:
return op_result
return not op_result or self == other
def _lt_from_le(self, other, NotImplemented=NotImplemented):
'Return a < b. Computed by @total_ordering from (a <= b) and (a != b).'
- op_result = type(self).__le__(self, other)
+ op_result = type(self).__le__(self, other)
if op_result is NotImplemented:
return op_result
return op_result and self != other
def _gt_from_le(self, other, NotImplemented=NotImplemented):
'Return a > b. Computed by @total_ordering from (not a <= b).'
- op_result = type(self).__le__(self, other)
+ op_result = type(self).__le__(self, other)
if op_result is NotImplemented:
return op_result
return not op_result
def _lt_from_gt(self, other, NotImplemented=NotImplemented):
'Return a < b. Computed by @total_ordering from (not a > b) and (a != b).'
- op_result = type(self).__gt__(self, other)
+ op_result = type(self).__gt__(self, other)
if op_result is NotImplemented:
return op_result
return not op_result and self != other
def _ge_from_gt(self, other, NotImplemented=NotImplemented):
'Return a >= b. Computed by @total_ordering from (a > b) or (a == b).'
- op_result = type(self).__gt__(self, other)
- if op_result is NotImplemented:
- return op_result
+ op_result = type(self).__gt__(self, other)
+ if op_result is NotImplemented:
+ return op_result
return op_result or self == other
def _le_from_gt(self, other, NotImplemented=NotImplemented):
'Return a <= b. Computed by @total_ordering from (not a > b).'
- op_result = type(self).__gt__(self, other)
+ op_result = type(self).__gt__(self, other)
if op_result is NotImplemented:
return op_result
return not op_result
def _le_from_ge(self, other, NotImplemented=NotImplemented):
'Return a <= b. Computed by @total_ordering from (not a >= b) or (a == b).'
- op_result = type(self).__ge__(self, other)
+ op_result = type(self).__ge__(self, other)
if op_result is NotImplemented:
return op_result
return not op_result or self == other
def _gt_from_ge(self, other, NotImplemented=NotImplemented):
'Return a > b. Computed by @total_ordering from (a >= b) and (a != b).'
- op_result = type(self).__ge__(self, other)
+ op_result = type(self).__ge__(self, other)
if op_result is NotImplemented:
return op_result
return op_result and self != other
def _lt_from_ge(self, other, NotImplemented=NotImplemented):
'Return a < b. Computed by @total_ordering from (not a >= b).'
- op_result = type(self).__ge__(self, other)
+ op_result = type(self).__ge__(self, other)
if op_result is NotImplemented:
return op_result
return not op_result
@@ -229,45 +229,45 @@ except ImportError:
################################################################################
-### reduce() sequence to a single item
-################################################################################
-
-_initial_missing = object()
-
-def reduce(function, sequence, initial=_initial_missing):
- """
- reduce(function, sequence[, initial]) -> value
-
- Apply a function of two arguments cumulatively to the items of a sequence,
- from left to right, so as to reduce the sequence to a single value.
- For example, reduce(lambda x, y: x+y, [1, 2, 3, 4, 5]) calculates
- ((((1+2)+3)+4)+5). If initial is present, it is placed before the items
- of the sequence in the calculation, and serves as a default when the
- sequence is empty.
- """
-
- it = iter(sequence)
-
- if initial is _initial_missing:
- try:
- value = next(it)
- except StopIteration:
- raise TypeError("reduce() of empty sequence with no initial value") from None
- else:
- value = initial
-
- for element in it:
- value = function(value, element)
-
- return value
-
-try:
- from _functools import reduce
-except ImportError:
- pass
-
-
-################################################################################
+### reduce() sequence to a single item
+################################################################################
+
+_initial_missing = object()
+
+def reduce(function, sequence, initial=_initial_missing):
+ """
+ reduce(function, sequence[, initial]) -> value
+
+ Apply a function of two arguments cumulatively to the items of a sequence,
+ from left to right, so as to reduce the sequence to a single value.
+ For example, reduce(lambda x, y: x+y, [1, 2, 3, 4, 5]) calculates
+ ((((1+2)+3)+4)+5). If initial is present, it is placed before the items
+ of the sequence in the calculation, and serves as a default when the
+ sequence is empty.
+ """
+
+ it = iter(sequence)
+
+ if initial is _initial_missing:
+ try:
+ value = next(it)
+ except StopIteration:
+ raise TypeError("reduce() of empty sequence with no initial value") from None
+ else:
+ value = initial
+
+ for element in it:
+ value = function(value, element)
+
+ return value
+
+try:
+ from _functools import reduce
+except ImportError:
+ pass
+
+
+################################################################################
### partial() argument application
################################################################################
@@ -279,13 +279,13 @@ class partial:
__slots__ = "func", "args", "keywords", "__dict__", "__weakref__"
- def __new__(cls, func, /, *args, **keywords):
+ def __new__(cls, func, /, *args, **keywords):
if not callable(func):
raise TypeError("the first argument must be callable")
if hasattr(func, "func"):
args = func.args + args
- keywords = {**func.keywords, **keywords}
+ keywords = {**func.keywords, **keywords}
func = func.func
self = super(partial, cls).__new__(cls)
@@ -295,9 +295,9 @@ class partial:
self.keywords = keywords
return self
- def __call__(self, /, *args, **keywords):
- keywords = {**self.keywords, **keywords}
- return self.func(*self.args, *args, **keywords)
+ def __call__(self, /, *args, **keywords):
+ keywords = {**self.keywords, **keywords}
+ return self.func(*self.args, *args, **keywords)
@recursive_repr()
def __repr__(self):
@@ -351,7 +351,7 @@ class partialmethod(object):
callables as instance methods.
"""
- def __init__(self, func, /, *args, **keywords):
+ def __init__(self, func, /, *args, **keywords):
if not callable(func) and not hasattr(func, "__get__"):
raise TypeError("{!r} is not callable or a descriptor"
.format(func))
@@ -364,7 +364,7 @@ class partialmethod(object):
# it's also more efficient since only one function will be called
self.func = func.func
self.args = func.args + args
- self.keywords = {**func.keywords, **keywords}
+ self.keywords = {**func.keywords, **keywords}
else:
self.func = func
self.args = args
@@ -382,14 +382,14 @@ class partialmethod(object):
keywords=keywords)
def _make_unbound_method(self):
- def _method(cls_or_self, /, *args, **keywords):
- keywords = {**self.keywords, **keywords}
- return self.func(cls_or_self, *self.args, *args, **keywords)
+ def _method(cls_or_self, /, *args, **keywords):
+ keywords = {**self.keywords, **keywords}
+ return self.func(cls_or_self, *self.args, *args, **keywords)
_method.__isabstractmethod__ = self.__isabstractmethod__
_method._partialmethod = self
return _method
- def __get__(self, obj, cls=None):
+ def __get__(self, obj, cls=None):
get = getattr(self.func, "__get__", None)
result = None
if get is not None:
@@ -412,16 +412,16 @@ class partialmethod(object):
def __isabstractmethod__(self):
return getattr(self.func, "__isabstractmethod__", False)
- __class_getitem__ = classmethod(GenericAlias)
-
-
-# Helper functions
-
-def _unwrap_partial(func):
- while isinstance(func, partial):
- func = func.func
- return func
+ __class_getitem__ = classmethod(GenericAlias)
+
+
+# Helper functions
+def _unwrap_partial(func):
+ while isinstance(func, partial):
+ func = func.func
+ return func
+
################################################################################
### LRU Cache function decorator
################################################################################
@@ -491,7 +491,7 @@ def lru_cache(maxsize=128, typed=False):
with f.cache_info(). Clear the cache and statistics with f.cache_clear().
Access the underlying function with f.__wrapped__.
- See: https://en.wikipedia.org/wiki/Cache_replacement_policies#Least_recently_used_(LRU)
+ See: https://en.wikipedia.org/wiki/Cache_replacement_policies#Least_recently_used_(LRU)
"""
@@ -501,22 +501,22 @@ def lru_cache(maxsize=128, typed=False):
# to allow the implementation to change (including a possible C version).
if isinstance(maxsize, int):
- # Negative maxsize is treated as 0
+ # Negative maxsize is treated as 0
if maxsize < 0:
maxsize = 0
- elif callable(maxsize) and isinstance(typed, bool):
- # The user_function was passed in directly via the maxsize argument
- user_function, maxsize = maxsize, 128
- wrapper = _lru_cache_wrapper(user_function, maxsize, typed, _CacheInfo)
- wrapper.cache_parameters = lambda : {'maxsize': maxsize, 'typed': typed}
- return update_wrapper(wrapper, user_function)
+ elif callable(maxsize) and isinstance(typed, bool):
+ # The user_function was passed in directly via the maxsize argument
+ user_function, maxsize = maxsize, 128
+ wrapper = _lru_cache_wrapper(user_function, maxsize, typed, _CacheInfo)
+ wrapper.cache_parameters = lambda : {'maxsize': maxsize, 'typed': typed}
+ return update_wrapper(wrapper, user_function)
elif maxsize is not None:
- raise TypeError(
- 'Expected first argument to be an integer, a callable, or None')
+ raise TypeError(
+ 'Expected first argument to be an integer, a callable, or None')
def decorating_function(user_function):
wrapper = _lru_cache_wrapper(user_function, maxsize, typed, _CacheInfo)
- wrapper.cache_parameters = lambda : {'maxsize': maxsize, 'typed': typed}
+ wrapper.cache_parameters = lambda : {'maxsize': maxsize, 'typed': typed}
return update_wrapper(wrapper, user_function)
return decorating_function
@@ -644,22 +644,22 @@ except ImportError:
################################################################################
-### cache -- simplified access to the infinity cache
-################################################################################
-
-def cache(user_function, /):
- 'Simple lightweight unbounded cache. Sometimes called "memoize".'
- return lru_cache(maxsize=None)(user_function)
-
-
-################################################################################
+### cache -- simplified access to the infinity cache
+################################################################################
+
+def cache(user_function, /):
+ 'Simple lightweight unbounded cache. Sometimes called "memoize".'
+ return lru_cache(maxsize=None)(user_function)
+
+
+################################################################################
### singledispatch() - single-dispatch generic function decorator
################################################################################
def _c3_merge(sequences):
"""Merges MROs in *sequences* to a single MRO using the C3 algorithm.
- Adapted from https://www.python.org/download/releases/2.3/mro/.
+ Adapted from https://www.python.org/download/releases/2.3/mro/.
"""
result = []
@@ -739,7 +739,7 @@ def _compose_mro(cls, types):
# Remove entries which are already present in the __mro__ or unrelated.
def is_related(typ):
return (typ not in bases and hasattr(typ, '__mro__')
- and not isinstance(typ, GenericAlias)
+ and not isinstance(typ, GenericAlias)
and issubclass(cls, typ))
types = [n for n in types if is_related(n)]
# Remove entries which are strict bases of other entries (they will end up
@@ -837,9 +837,9 @@ def singledispatch(func):
dispatch_cache[cls] = impl
return impl
- def _is_valid_dispatch_type(cls):
- return isinstance(cls, type) and not isinstance(cls, GenericAlias)
-
+ def _is_valid_dispatch_type(cls):
+ return isinstance(cls, type) and not isinstance(cls, GenericAlias)
+
def register(cls, func=None):
"""generic_func.register(cls, func) -> func
@@ -847,15 +847,15 @@ def singledispatch(func):
"""
nonlocal cache_token
- if _is_valid_dispatch_type(cls):
- if func is None:
+ if _is_valid_dispatch_type(cls):
+ if func is None:
return lambda f: register(cls, f)
- else:
- if func is not None:
- raise TypeError(
- f"Invalid first argument to `register()`. "
- f"{cls!r} is not a class."
- )
+ else:
+ if func is not None:
+ raise TypeError(
+ f"Invalid first argument to `register()`. "
+ f"{cls!r} is not a class."
+ )
ann = getattr(cls, '__annotations__', {})
if not ann:
raise TypeError(
@@ -868,12 +868,12 @@ def singledispatch(func):
# only import typing if annotation parsing is necessary
from typing import get_type_hints
argname, cls = next(iter(get_type_hints(func).items()))
- if not _is_valid_dispatch_type(cls):
- raise TypeError(
- f"Invalid annotation for {argname!r}. "
- f"{cls!r} is not a class."
- )
-
+ if not _is_valid_dispatch_type(cls):
+ raise TypeError(
+ f"Invalid annotation for {argname!r}. "
+ f"{cls!r} is not a class."
+ )
+
registry[cls] = func
if cache_token is None and hasattr(cls, '__abstractmethods__'):
cache_token = get_cache_token()
@@ -895,110 +895,110 @@ def singledispatch(func):
wrapper._clear_cache = dispatch_cache.clear
update_wrapper(wrapper, func)
return wrapper
-
-
-# Descriptor version
-class singledispatchmethod:
- """Single-dispatch generic method descriptor.
-
- Supports wrapping existing descriptors and handles non-descriptor
- callables as instance methods.
- """
-
- def __init__(self, func):
- if not callable(func) and not hasattr(func, "__get__"):
- raise TypeError(f"{func!r} is not callable or a descriptor")
-
- self.dispatcher = singledispatch(func)
- self.func = func
-
- # bpo-45678: special-casing for classmethod/staticmethod in Python <=3.9,
- # as functools.update_wrapper doesn't work properly in singledispatchmethod.__get__
- # if it is applied to an unbound classmethod/staticmethod
- if isinstance(func, (staticmethod, classmethod)):
- self._wrapped_func = func.__func__
- else:
- self._wrapped_func = func
- def register(self, cls, method=None):
- """generic_method.register(cls, func) -> func
-
- Registers a new implementation for the given *cls* on a *generic_method*.
- """
- # bpo-39679: in Python <= 3.9, classmethods and staticmethods don't
- # inherit __annotations__ of the wrapped function (fixed in 3.10+ as
- # a side-effect of bpo-43682) but we need that for annotation-derived
- # singledispatches. So we add that just-in-time here.
- if isinstance(cls, (staticmethod, classmethod)):
- cls.__annotations__ = getattr(cls.__func__, '__annotations__', {})
- return self.dispatcher.register(cls, func=method)
-
- def __get__(self, obj, cls=None):
- def _method(*args, **kwargs):
- method = self.dispatcher.dispatch(args[0].__class__)
- return method.__get__(obj, cls)(*args, **kwargs)
-
- _method.__isabstractmethod__ = self.__isabstractmethod__
- _method.register = self.register
- update_wrapper(_method, self._wrapped_func)
- return _method
-
- @property
- def __isabstractmethod__(self):
- return getattr(self.func, '__isabstractmethod__', False)
-
-
-################################################################################
-### cached_property() - computed once per instance, cached as attribute
-################################################################################
-
-_NOT_FOUND = object()
-
-
-class cached_property:
- def __init__(self, func):
- self.func = func
- self.attrname = None
- self.__doc__ = func.__doc__
- self.lock = RLock()
-
- def __set_name__(self, owner, name):
- if self.attrname is None:
- self.attrname = name
- elif name != self.attrname:
- raise TypeError(
- "Cannot assign the same cached_property to two different names "
- f"({self.attrname!r} and {name!r})."
- )
-
- def __get__(self, instance, owner=None):
- if instance is None:
- return self
- if self.attrname is None:
- raise TypeError(
- "Cannot use cached_property instance without calling __set_name__ on it.")
- try:
- cache = instance.__dict__
- except AttributeError: # not all objects have __dict__ (e.g. class defines slots)
- msg = (
- f"No '__dict__' attribute on {type(instance).__name__!r} "
- f"instance to cache {self.attrname!r} property."
- )
- raise TypeError(msg) from None
- val = cache.get(self.attrname, _NOT_FOUND)
- if val is _NOT_FOUND:
- with self.lock:
- # check if another thread filled cache while we awaited lock
- val = cache.get(self.attrname, _NOT_FOUND)
- if val is _NOT_FOUND:
- val = self.func(instance)
- try:
- cache[self.attrname] = val
- except TypeError:
- msg = (
- f"The '__dict__' attribute on {type(instance).__name__!r} instance "
- f"does not support item assignment for caching {self.attrname!r} property."
- )
- raise TypeError(msg) from None
- return val
-
- __class_getitem__ = classmethod(GenericAlias)
+
+
+# Descriptor version
+class singledispatchmethod:
+ """Single-dispatch generic method descriptor.
+
+ Supports wrapping existing descriptors and handles non-descriptor
+ callables as instance methods.
+ """
+
+ def __init__(self, func):
+ if not callable(func) and not hasattr(func, "__get__"):
+ raise TypeError(f"{func!r} is not callable or a descriptor")
+
+ self.dispatcher = singledispatch(func)
+ self.func = func
+
+ # bpo-45678: special-casing for classmethod/staticmethod in Python <=3.9,
+ # as functools.update_wrapper doesn't work properly in singledispatchmethod.__get__
+ # if it is applied to an unbound classmethod/staticmethod
+ if isinstance(func, (staticmethod, classmethod)):
+ self._wrapped_func = func.__func__
+ else:
+ self._wrapped_func = func
+ def register(self, cls, method=None):
+ """generic_method.register(cls, func) -> func
+
+ Registers a new implementation for the given *cls* on a *generic_method*.
+ """
+ # bpo-39679: in Python <= 3.9, classmethods and staticmethods don't
+ # inherit __annotations__ of the wrapped function (fixed in 3.10+ as
+ # a side-effect of bpo-43682) but we need that for annotation-derived
+ # singledispatches. So we add that just-in-time here.
+ if isinstance(cls, (staticmethod, classmethod)):
+ cls.__annotations__ = getattr(cls.__func__, '__annotations__', {})
+ return self.dispatcher.register(cls, func=method)
+
+ def __get__(self, obj, cls=None):
+ def _method(*args, **kwargs):
+ method = self.dispatcher.dispatch(args[0].__class__)
+ return method.__get__(obj, cls)(*args, **kwargs)
+
+ _method.__isabstractmethod__ = self.__isabstractmethod__
+ _method.register = self.register
+ update_wrapper(_method, self._wrapped_func)
+ return _method
+
+ @property
+ def __isabstractmethod__(self):
+ return getattr(self.func, '__isabstractmethod__', False)
+
+
+################################################################################
+### cached_property() - computed once per instance, cached as attribute
+################################################################################
+
+_NOT_FOUND = object()
+
+
+class cached_property:
+ def __init__(self, func):
+ self.func = func
+ self.attrname = None
+ self.__doc__ = func.__doc__
+ self.lock = RLock()
+
+ def __set_name__(self, owner, name):
+ if self.attrname is None:
+ self.attrname = name
+ elif name != self.attrname:
+ raise TypeError(
+ "Cannot assign the same cached_property to two different names "
+ f"({self.attrname!r} and {name!r})."
+ )
+
+ def __get__(self, instance, owner=None):
+ if instance is None:
+ return self
+ if self.attrname is None:
+ raise TypeError(
+ "Cannot use cached_property instance without calling __set_name__ on it.")
+ try:
+ cache = instance.__dict__
+ except AttributeError: # not all objects have __dict__ (e.g. class defines slots)
+ msg = (
+ f"No '__dict__' attribute on {type(instance).__name__!r} "
+ f"instance to cache {self.attrname!r} property."
+ )
+ raise TypeError(msg) from None
+ val = cache.get(self.attrname, _NOT_FOUND)
+ if val is _NOT_FOUND:
+ with self.lock:
+ # check if another thread filled cache while we awaited lock
+ val = cache.get(self.attrname, _NOT_FOUND)
+ if val is _NOT_FOUND:
+ val = self.func(instance)
+ try:
+ cache[self.attrname] = val
+ except TypeError:
+ msg = (
+ f"The '__dict__' attribute on {type(instance).__name__!r} instance "
+ f"does not support item assignment for caching {self.attrname!r} property."
+ )
+ raise TypeError(msg) from None
+ return val
+
+ __class_getitem__ = classmethod(GenericAlias)
diff --git a/contrib/tools/python3/src/Lib/genericpath.py b/contrib/tools/python3/src/Lib/genericpath.py
index ce36451a3a..220a3fc292 100644
--- a/contrib/tools/python3/src/Lib/genericpath.py
+++ b/contrib/tools/python3/src/Lib/genericpath.py
@@ -17,7 +17,7 @@ def exists(path):
"""Test whether a path exists. Returns False for broken symbolic links"""
try:
os.stat(path)
- except (OSError, ValueError):
+ except (OSError, ValueError):
return False
return True
@@ -28,7 +28,7 @@ def isfile(path):
"""Test whether a path is a regular file"""
try:
st = os.stat(path)
- except (OSError, ValueError):
+ except (OSError, ValueError):
return False
return stat.S_ISREG(st.st_mode)
@@ -40,7 +40,7 @@ def isdir(s):
"""Return true if the pathname refers to an existing directory."""
try:
st = os.stat(s)
- except (OSError, ValueError):
+ except (OSError, ValueError):
return False
return stat.S_ISDIR(st.st_mode)
@@ -92,11 +92,11 @@ def samestat(s1, s2):
# Are two filenames really pointing to the same file?
def samefile(f1, f2):
- """Test whether two pathnames reference the same actual file or directory
-
- This is determined by the device number and i-node number and
- raises an exception if an os.stat() call on either pathname fails.
- """
+ """Test whether two pathnames reference the same actual file or directory
+
+ This is determined by the device number and i-node number and
+ raises an exception if an os.stat() call on either pathname fails.
+ """
s1 = os.stat(f1)
s2 = os.stat(f2)
return samestat(s1, s2)
@@ -149,7 +149,7 @@ def _check_arg_types(funcname, *args):
elif isinstance(s, bytes):
hasbytes = True
else:
- raise TypeError(f'{funcname}() argument must be str, bytes, or '
- f'os.PathLike object, not {s.__class__.__name__!r}') from None
+ raise TypeError(f'{funcname}() argument must be str, bytes, or '
+ f'os.PathLike object, not {s.__class__.__name__!r}') from None
if hasstr and hasbytes:
raise TypeError("Can't mix strings and bytes in path components") from None
diff --git a/contrib/tools/python3/src/Lib/getpass.py b/contrib/tools/python3/src/Lib/getpass.py
index 6970d8adfb..d7ce4ef2a6 100644
--- a/contrib/tools/python3/src/Lib/getpass.py
+++ b/contrib/tools/python3/src/Lib/getpass.py
@@ -52,7 +52,7 @@ def unix_getpass(prompt='Password: ', stream=None):
stack.enter_context(input)
if not stream:
stream = input
- except OSError:
+ except OSError:
# If that fails, see if stdin can be controlled.
stack.close()
try:
@@ -95,7 +95,7 @@ def unix_getpass(prompt='Password: ', stream=None):
def win_getpass(prompt='Password: ', stream=None):
- """Prompt for password with echo off, using Windows getwch()."""
+ """Prompt for password with echo off, using Windows getwch()."""
if sys.stdin is not sys.__stdin__:
return fallback_getpass(prompt, stream)
diff --git a/contrib/tools/python3/src/Lib/gettext.py b/contrib/tools/python3/src/Lib/gettext.py
index 9df74c91f0..d65e40338e 100644
--- a/contrib/tools/python3/src/Lib/gettext.py
+++ b/contrib/tools/python3/src/Lib/gettext.py
@@ -61,7 +61,7 @@ __all__ = ['NullTranslations', 'GNUTranslations', 'Catalog',
'bind_textdomain_codeset',
'dgettext', 'dngettext', 'gettext', 'lgettext', 'ldgettext',
'ldngettext', 'lngettext', 'ngettext',
- 'pgettext', 'dpgettext', 'npgettext', 'dnpgettext',
+ 'pgettext', 'dpgettext', 'npgettext', 'dnpgettext',
]
_default_localedir = os.path.join(sys.base_prefix, 'share', 'locale')
@@ -214,7 +214,7 @@ def c2py(plural):
def _expand_lang(loc):
- import locale
+ import locale
loc = locale.normalize(loc)
COMPONENT_CODESET = 1 << 0
COMPONENT_TERRITORY = 1 << 1
@@ -280,15 +280,15 @@ class NullTranslations:
return message
def lgettext(self, message):
- import warnings
- warnings.warn('lgettext() is deprecated, use gettext() instead',
- DeprecationWarning, 2)
- import locale
+ import warnings
+ warnings.warn('lgettext() is deprecated, use gettext() instead',
+ DeprecationWarning, 2)
+ import locale
if self._fallback:
- with warnings.catch_warnings():
- warnings.filterwarnings('ignore', r'.*\blgettext\b.*',
- DeprecationWarning)
- return self._fallback.lgettext(message)
+ with warnings.catch_warnings():
+ warnings.filterwarnings('ignore', r'.*\blgettext\b.*',
+ DeprecationWarning)
+ return self._fallback.lgettext(message)
if self._output_charset:
return message.encode(self._output_charset)
return message.encode(locale.getpreferredencoding())
@@ -302,15 +302,15 @@ class NullTranslations:
return msgid2
def lngettext(self, msgid1, msgid2, n):
- import warnings
- warnings.warn('lngettext() is deprecated, use ngettext() instead',
- DeprecationWarning, 2)
- import locale
+ import warnings
+ warnings.warn('lngettext() is deprecated, use ngettext() instead',
+ DeprecationWarning, 2)
+ import locale
if self._fallback:
- with warnings.catch_warnings():
- warnings.filterwarnings('ignore', r'.*\blngettext\b.*',
- DeprecationWarning)
- return self._fallback.lngettext(msgid1, msgid2, n)
+ with warnings.catch_warnings():
+ warnings.filterwarnings('ignore', r'.*\blngettext\b.*',
+ DeprecationWarning)
+ return self._fallback.lngettext(msgid1, msgid2, n)
if n == 1:
tmsg = msgid1
else:
@@ -319,19 +319,19 @@ class NullTranslations:
return tmsg.encode(self._output_charset)
return tmsg.encode(locale.getpreferredencoding())
- def pgettext(self, context, message):
- if self._fallback:
- return self._fallback.pgettext(context, message)
- return message
-
- def npgettext(self, context, msgid1, msgid2, n):
- if self._fallback:
- return self._fallback.npgettext(context, msgid1, msgid2, n)
- if n == 1:
- return msgid1
- else:
- return msgid2
-
+ def pgettext(self, context, message):
+ if self._fallback:
+ return self._fallback.pgettext(context, message)
+ return message
+
+ def npgettext(self, context, msgid1, msgid2, n):
+ if self._fallback:
+ return self._fallback.npgettext(context, msgid1, msgid2, n)
+ if n == 1:
+ return msgid1
+ else:
+ return msgid2
+
def info(self):
return self._info
@@ -339,25 +339,25 @@ class NullTranslations:
return self._charset
def output_charset(self):
- import warnings
- warnings.warn('output_charset() is deprecated',
- DeprecationWarning, 2)
+ import warnings
+ warnings.warn('output_charset() is deprecated',
+ DeprecationWarning, 2)
return self._output_charset
def set_output_charset(self, charset):
- import warnings
- warnings.warn('set_output_charset() is deprecated',
- DeprecationWarning, 2)
+ import warnings
+ warnings.warn('set_output_charset() is deprecated',
+ DeprecationWarning, 2)
self._output_charset = charset
def install(self, names=None):
import builtins
builtins.__dict__['_'] = self.gettext
- if names is not None:
- allowed = {'gettext', 'lgettext', 'lngettext',
- 'ngettext', 'npgettext', 'pgettext'}
- for name in allowed & set(names):
- builtins.__dict__[name] = getattr(self, name)
+ if names is not None:
+ allowed = {'gettext', 'lgettext', 'lngettext',
+ 'ngettext', 'npgettext', 'pgettext'}
+ for name in allowed & set(names):
+ builtins.__dict__[name] = getattr(self, name)
class GNUTranslations(NullTranslations):
@@ -365,10 +365,10 @@ class GNUTranslations(NullTranslations):
LE_MAGIC = 0x950412de
BE_MAGIC = 0xde120495
- # The encoding of a msgctxt and a msgid in a .mo file is
- # msgctxt + "\x04" + msgid (gettext version >= 0.15)
- CONTEXT = "%s\x04%s"
-
+ # The encoding of a msgctxt and a msgid in a .mo file is
+ # msgctxt + "\x04" + msgid (gettext version >= 0.15)
+ CONTEXT = "%s\x04%s"
+
# Acceptable .mo versions
VERSIONS = (0, 1)
@@ -424,9 +424,9 @@ class GNUTranslations(NullTranslations):
item = b_item.decode().strip()
if not item:
continue
- # Skip over comment lines:
- if item.startswith('#-#-#-#-#') and item.endswith('#-#-#-#-#'):
- continue
+ # Skip over comment lines:
+ if item.startswith('#-#-#-#-#') and item.endswith('#-#-#-#-#'):
+ continue
k = v = None
if ':' in item:
k, v = item.split(':', 1)
@@ -466,10 +466,10 @@ class GNUTranslations(NullTranslations):
transidx += 8
def lgettext(self, message):
- import warnings
- warnings.warn('lgettext() is deprecated, use gettext() instead',
- DeprecationWarning, 2)
- import locale
+ import warnings
+ warnings.warn('lgettext() is deprecated, use gettext() instead',
+ DeprecationWarning, 2)
+ import locale
missing = object()
tmsg = self._catalog.get(message, missing)
if tmsg is missing:
@@ -481,10 +481,10 @@ class GNUTranslations(NullTranslations):
return tmsg.encode(locale.getpreferredencoding())
def lngettext(self, msgid1, msgid2, n):
- import warnings
- warnings.warn('lngettext() is deprecated, use ngettext() instead',
- DeprecationWarning, 2)
- import locale
+ import warnings
+ warnings.warn('lngettext() is deprecated, use ngettext() instead',
+ DeprecationWarning, 2)
+ import locale
try:
tmsg = self._catalog[(msgid1, self.plural(n))]
except KeyError:
@@ -519,30 +519,30 @@ class GNUTranslations(NullTranslations):
tmsg = msgid2
return tmsg
- def pgettext(self, context, message):
- ctxt_msg_id = self.CONTEXT % (context, message)
- missing = object()
- tmsg = self._catalog.get(ctxt_msg_id, missing)
- if tmsg is missing:
- if self._fallback:
- return self._fallback.pgettext(context, message)
- return message
- return tmsg
-
- def npgettext(self, context, msgid1, msgid2, n):
- ctxt_msg_id = self.CONTEXT % (context, msgid1)
- try:
- tmsg = self._catalog[ctxt_msg_id, self.plural(n)]
- except KeyError:
- if self._fallback:
- return self._fallback.npgettext(context, msgid1, msgid2, n)
- if n == 1:
- tmsg = msgid1
- else:
- tmsg = msgid2
- return tmsg
-
-
+ def pgettext(self, context, message):
+ ctxt_msg_id = self.CONTEXT % (context, message)
+ missing = object()
+ tmsg = self._catalog.get(ctxt_msg_id, missing)
+ if tmsg is missing:
+ if self._fallback:
+ return self._fallback.pgettext(context, message)
+ return message
+ return tmsg
+
+ def npgettext(self, context, msgid1, msgid2, n):
+ ctxt_msg_id = self.CONTEXT % (context, msgid1)
+ try:
+ tmsg = self._catalog[ctxt_msg_id, self.plural(n)]
+ except KeyError:
+ if self._fallback:
+ return self._fallback.npgettext(context, msgid1, msgid2, n)
+ if n == 1:
+ tmsg = msgid1
+ else:
+ tmsg = msgid2
+ return tmsg
+
+
# Locate a .mo file using the gettext strategy
def find(domain, localedir=None, languages=None, all=False):
# Get some reasonable defaults for arguments that were not supplied
@@ -583,10 +583,10 @@ def find(domain, localedir=None, languages=None, all=False):
# a mapping between absolute .mo file path and Translation object
_translations = {}
-_unspecified = ['unspecified']
+_unspecified = ['unspecified']
def translation(domain, localedir=None, languages=None,
- class_=None, fallback=False, codeset=_unspecified):
+ class_=None, fallback=False, codeset=_unspecified):
if class_ is None:
class_ = GNUTranslations
mofiles = find(domain, localedir, languages, all=True)
@@ -616,15 +616,15 @@ def translation(domain, localedir=None, languages=None,
# are not used.
import copy
t = copy.copy(t)
- if codeset is not _unspecified:
- import warnings
- warnings.warn('parameter codeset is deprecated',
- DeprecationWarning, 2)
- if codeset:
- with warnings.catch_warnings():
- warnings.filterwarnings('ignore', r'.*\bset_output_charset\b.*',
- DeprecationWarning)
- t.set_output_charset(codeset)
+ if codeset is not _unspecified:
+ import warnings
+ warnings.warn('parameter codeset is deprecated',
+ DeprecationWarning, 2)
+ if codeset:
+ with warnings.catch_warnings():
+ warnings.filterwarnings('ignore', r'.*\bset_output_charset\b.*',
+ DeprecationWarning)
+ t.set_output_charset(codeset)
if result is None:
result = t
else:
@@ -632,7 +632,7 @@ def translation(domain, localedir=None, languages=None,
return result
-def install(domain, localedir=None, codeset=_unspecified, names=None):
+def install(domain, localedir=None, codeset=_unspecified, names=None):
t = translation(domain, localedir, fallback=True, codeset=codeset)
t.install(names)
@@ -661,9 +661,9 @@ def bindtextdomain(domain, localedir=None):
def bind_textdomain_codeset(domain, codeset=None):
- import warnings
- warnings.warn('bind_textdomain_codeset() is deprecated',
- DeprecationWarning, 2)
+ import warnings
+ warnings.warn('bind_textdomain_codeset() is deprecated',
+ DeprecationWarning, 2)
global _localecodesets
if codeset is not None:
_localecodesets[domain] = codeset
@@ -672,32 +672,32 @@ def bind_textdomain_codeset(domain, codeset=None):
def dgettext(domain, message):
try:
- t = translation(domain, _localedirs.get(domain, None))
+ t = translation(domain, _localedirs.get(domain, None))
except OSError:
return message
return t.gettext(message)
def ldgettext(domain, message):
- import warnings
- warnings.warn('ldgettext() is deprecated, use dgettext() instead',
- DeprecationWarning, 2)
- import locale
+ import warnings
+ warnings.warn('ldgettext() is deprecated, use dgettext() instead',
+ DeprecationWarning, 2)
+ import locale
codeset = _localecodesets.get(domain)
try:
- with warnings.catch_warnings():
- warnings.filterwarnings('ignore', r'.*\bparameter codeset\b.*',
- DeprecationWarning)
- t = translation(domain, _localedirs.get(domain, None), codeset=codeset)
+ with warnings.catch_warnings():
+ warnings.filterwarnings('ignore', r'.*\bparameter codeset\b.*',
+ DeprecationWarning)
+ t = translation(domain, _localedirs.get(domain, None), codeset=codeset)
except OSError:
return message.encode(codeset or locale.getpreferredencoding())
- with warnings.catch_warnings():
- warnings.filterwarnings('ignore', r'.*\blgettext\b.*',
- DeprecationWarning)
- return t.lgettext(message)
+ with warnings.catch_warnings():
+ warnings.filterwarnings('ignore', r'.*\blgettext\b.*',
+ DeprecationWarning)
+ return t.lgettext(message)
def dngettext(domain, msgid1, msgid2, n):
try:
- t = translation(domain, _localedirs.get(domain, None))
+ t = translation(domain, _localedirs.get(domain, None))
except OSError:
if n == 1:
return msgid1
@@ -706,80 +706,80 @@ def dngettext(domain, msgid1, msgid2, n):
return t.ngettext(msgid1, msgid2, n)
def ldngettext(domain, msgid1, msgid2, n):
- import warnings
- warnings.warn('ldngettext() is deprecated, use dngettext() instead',
- DeprecationWarning, 2)
- import locale
+ import warnings
+ warnings.warn('ldngettext() is deprecated, use dngettext() instead',
+ DeprecationWarning, 2)
+ import locale
codeset = _localecodesets.get(domain)
try:
- with warnings.catch_warnings():
- warnings.filterwarnings('ignore', r'.*\bparameter codeset\b.*',
- DeprecationWarning)
- t = translation(domain, _localedirs.get(domain, None), codeset=codeset)
+ with warnings.catch_warnings():
+ warnings.filterwarnings('ignore', r'.*\bparameter codeset\b.*',
+ DeprecationWarning)
+ t = translation(domain, _localedirs.get(domain, None), codeset=codeset)
except OSError:
if n == 1:
tmsg = msgid1
else:
tmsg = msgid2
return tmsg.encode(codeset or locale.getpreferredencoding())
- with warnings.catch_warnings():
- warnings.filterwarnings('ignore', r'.*\blngettext\b.*',
- DeprecationWarning)
- return t.lngettext(msgid1, msgid2, n)
-
-
-def dpgettext(domain, context, message):
- try:
- t = translation(domain, _localedirs.get(domain, None))
- except OSError:
- return message
- return t.pgettext(context, message)
-
-
-def dnpgettext(domain, context, msgid1, msgid2, n):
- try:
- t = translation(domain, _localedirs.get(domain, None))
- except OSError:
- if n == 1:
- return msgid1
- else:
- return msgid2
- return t.npgettext(context, msgid1, msgid2, n)
-
-
+ with warnings.catch_warnings():
+ warnings.filterwarnings('ignore', r'.*\blngettext\b.*',
+ DeprecationWarning)
+ return t.lngettext(msgid1, msgid2, n)
+
+
+def dpgettext(domain, context, message):
+ try:
+ t = translation(domain, _localedirs.get(domain, None))
+ except OSError:
+ return message
+ return t.pgettext(context, message)
+
+
+def dnpgettext(domain, context, msgid1, msgid2, n):
+ try:
+ t = translation(domain, _localedirs.get(domain, None))
+ except OSError:
+ if n == 1:
+ return msgid1
+ else:
+ return msgid2
+ return t.npgettext(context, msgid1, msgid2, n)
+
+
def gettext(message):
return dgettext(_current_domain, message)
def lgettext(message):
- import warnings
- warnings.warn('lgettext() is deprecated, use gettext() instead',
- DeprecationWarning, 2)
- with warnings.catch_warnings():
- warnings.filterwarnings('ignore', r'.*\bldgettext\b.*',
- DeprecationWarning)
- return ldgettext(_current_domain, message)
+ import warnings
+ warnings.warn('lgettext() is deprecated, use gettext() instead',
+ DeprecationWarning, 2)
+ with warnings.catch_warnings():
+ warnings.filterwarnings('ignore', r'.*\bldgettext\b.*',
+ DeprecationWarning)
+ return ldgettext(_current_domain, message)
def ngettext(msgid1, msgid2, n):
return dngettext(_current_domain, msgid1, msgid2, n)
def lngettext(msgid1, msgid2, n):
- import warnings
- warnings.warn('lngettext() is deprecated, use ngettext() instead',
- DeprecationWarning, 2)
- with warnings.catch_warnings():
- warnings.filterwarnings('ignore', r'.*\bldngettext\b.*',
- DeprecationWarning)
- return ldngettext(_current_domain, msgid1, msgid2, n)
-
-
-def pgettext(context, message):
- return dpgettext(_current_domain, context, message)
-
-
-def npgettext(context, msgid1, msgid2, n):
- return dnpgettext(_current_domain, context, msgid1, msgid2, n)
-
-
+ import warnings
+ warnings.warn('lngettext() is deprecated, use ngettext() instead',
+ DeprecationWarning, 2)
+ with warnings.catch_warnings():
+ warnings.filterwarnings('ignore', r'.*\bldngettext\b.*',
+ DeprecationWarning)
+ return ldngettext(_current_domain, msgid1, msgid2, n)
+
+
+def pgettext(context, message):
+ return dpgettext(_current_domain, context, message)
+
+
+def npgettext(context, msgid1, msgid2, n):
+ return dnpgettext(_current_domain, context, msgid1, msgid2, n)
+
+
# dcgettext() has been deemed unnecessary and is not implemented.
# James Henstridge's Catalog constructor from GNOME gettext. Documented usage
diff --git a/contrib/tools/python3/src/Lib/glob.py b/contrib/tools/python3/src/Lib/glob.py
index 1237061130..535cbcb420 100644
--- a/contrib/tools/python3/src/Lib/glob.py
+++ b/contrib/tools/python3/src/Lib/glob.py
@@ -1,10 +1,10 @@
"""Filename globbing utility."""
-import contextlib
+import contextlib
import os
import re
import fnmatch
-import sys
+import sys
__all__ = ["glob", "iglob", "escape"]
@@ -32,7 +32,7 @@ def iglob(pathname, *, recursive=False):
If recursive is true, the pattern '**' will match any files and
zero or more directories and subdirectories.
"""
- sys.audit("glob.glob", pathname, recursive)
+ sys.audit("glob.glob", pathname, recursive)
it = _iglob(pathname, recursive, False)
if recursive and _isrecursive(pathname):
s = next(it) # skip empty string
@@ -80,7 +80,7 @@ def _iglob(pathname, recursive, dironly):
# takes a literal basename (so it only has to check for its existence).
def _glob1(dirname, pattern, dironly):
- names = _listdir(dirname, dironly)
+ names = _listdir(dirname, dironly)
if not _ishidden(pattern):
names = (x for x in names if not _ishidden(x))
return fnmatch.filter(names, pattern)
@@ -131,13 +131,13 @@ def _iterdir(dirname, dironly):
except OSError:
return
-def _listdir(dirname, dironly):
- with contextlib.closing(_iterdir(dirname, dironly)) as it:
- return list(it)
-
+def _listdir(dirname, dironly):
+ with contextlib.closing(_iterdir(dirname, dironly)) as it:
+ return list(it)
+
# Recursively yields relative pathnames inside a literal directory.
def _rlistdir(dirname, dironly):
- names = _listdir(dirname, dironly)
+ names = _listdir(dirname, dironly)
for x in names:
if not _ishidden(x):
yield x
diff --git a/contrib/tools/python3/src/Lib/graphlib.py b/contrib/tools/python3/src/Lib/graphlib.py
index 1c5d9a413c..eabef4a716 100644
--- a/contrib/tools/python3/src/Lib/graphlib.py
+++ b/contrib/tools/python3/src/Lib/graphlib.py
@@ -1,246 +1,246 @@
-__all__ = ["TopologicalSorter", "CycleError"]
-
-_NODE_OUT = -1
-_NODE_DONE = -2
-
-
-class _NodeInfo:
- __slots__ = "node", "npredecessors", "successors"
-
- def __init__(self, node):
- # The node this class is augmenting.
- self.node = node
-
- # Number of predecessors, generally >= 0. When this value falls to 0,
- # and is returned by get_ready(), this is set to _NODE_OUT and when the
- # node is marked done by a call to done(), set to _NODE_DONE.
- self.npredecessors = 0
-
- # List of successor nodes. The list can contain duplicated elements as
- # long as they're all reflected in the successor's npredecessors attribute.
- self.successors = []
-
-
-class CycleError(ValueError):
- """Subclass of ValueError raised by TopologicalSorter.prepare if cycles
- exist in the working graph.
-
- If multiple cycles exist, only one undefined choice among them will be reported
- and included in the exception. The detected cycle can be accessed via the second
- element in the *args* attribute of the exception instance and consists in a list
- of nodes, such that each node is, in the graph, an immediate predecessor of the
- next node in the list. In the reported list, the first and the last node will be
- the same, to make it clear that it is cyclic.
- """
-
- pass
-
-
-class TopologicalSorter:
- """Provides functionality to topologically sort a graph of hashable nodes"""
-
- def __init__(self, graph=None):
- self._node2info = {}
- self._ready_nodes = None
- self._npassedout = 0
- self._nfinished = 0
-
- if graph is not None:
- for node, predecessors in graph.items():
- self.add(node, *predecessors)
-
- def _get_nodeinfo(self, node):
- if (result := self._node2info.get(node)) is None:
- self._node2info[node] = result = _NodeInfo(node)
- return result
-
- def add(self, node, *predecessors):
- """Add a new node and its predecessors to the graph.
-
- Both the *node* and all elements in *predecessors* must be hashable.
-
- If called multiple times with the same node argument, the set of dependencies
- will be the union of all dependencies passed in.
-
- It is possible to add a node with no dependencies (*predecessors* is not provided)
- as well as provide a dependency twice. If a node that has not been provided before
- is included among *predecessors* it will be automatically added to the graph with
- no predecessors of its own.
-
- Raises ValueError if called after "prepare".
- """
- if self._ready_nodes is not None:
- raise ValueError("Nodes cannot be added after a call to prepare()")
-
- # Create the node -> predecessor edges
- nodeinfo = self._get_nodeinfo(node)
- nodeinfo.npredecessors += len(predecessors)
-
- # Create the predecessor -> node edges
- for pred in predecessors:
- pred_info = self._get_nodeinfo(pred)
- pred_info.successors.append(node)
-
- def prepare(self):
- """Mark the graph as finished and check for cycles in the graph.
-
- If any cycle is detected, "CycleError" will be raised, but "get_ready" can
- still be used to obtain as many nodes as possible until cycles block more
- progress. After a call to this function, the graph cannot be modified and
- therefore no more nodes can be added using "add".
- """
- if self._ready_nodes is not None:
- raise ValueError("cannot prepare() more than once")
-
- self._ready_nodes = [
- i.node for i in self._node2info.values() if i.npredecessors == 0
- ]
- # ready_nodes is set before we look for cycles on purpose:
- # if the user wants to catch the CycleError, that's fine,
- # they can continue using the instance to grab as many
- # nodes as possible before cycles block more progress
- cycle = self._find_cycle()
- if cycle:
- raise CycleError(f"nodes are in a cycle", cycle)
-
- def get_ready(self):
- """Return a tuple of all the nodes that are ready.
-
- Initially it returns all nodes with no predecessors; once those are marked
- as processed by calling "done", further calls will return all new nodes that
- have all their predecessors already processed. Once no more progress can be made,
- empty tuples are returned.
-
- Raises ValueError if called without calling "prepare" previously.
- """
- if self._ready_nodes is None:
- raise ValueError("prepare() must be called first")
-
- # Get the nodes that are ready and mark them
- result = tuple(self._ready_nodes)
- n2i = self._node2info
- for node in result:
- n2i[node].npredecessors = _NODE_OUT
-
- # Clean the list of nodes that are ready and update
- # the counter of nodes that we have returned.
- self._ready_nodes.clear()
- self._npassedout += len(result)
-
- return result
-
- def is_active(self):
- """Return ``True`` if more progress can be made and ``False`` otherwise.
-
- Progress can be made if cycles do not block the resolution and either there
- are still nodes ready that haven't yet been returned by "get_ready" or the
- number of nodes marked "done" is less than the number that have been returned
- by "get_ready".
-
- Raises ValueError if called without calling "prepare" previously.
- """
- if self._ready_nodes is None:
- raise ValueError("prepare() must be called first")
- return self._nfinished < self._npassedout or bool(self._ready_nodes)
-
- def __bool__(self):
- return self.is_active()
-
- def done(self, *nodes):
- """Marks a set of nodes returned by "get_ready" as processed.
-
- This method unblocks any successor of each node in *nodes* for being returned
- in the future by a call to "get_ready".
-
- Raises :exec:`ValueError` if any node in *nodes* has already been marked as
- processed by a previous call to this method, if a node was not added to the
- graph by using "add" or if called without calling "prepare" previously or if
- node has not yet been returned by "get_ready".
- """
-
- if self._ready_nodes is None:
- raise ValueError("prepare() must be called first")
-
- n2i = self._node2info
-
- for node in nodes:
-
- # Check if we know about this node (it was added previously using add()
- if (nodeinfo := n2i.get(node)) is None:
- raise ValueError(f"node {node!r} was not added using add()")
-
- # If the node has not being returned (marked as ready) previously, inform the user.
- stat = nodeinfo.npredecessors
- if stat != _NODE_OUT:
- if stat >= 0:
- raise ValueError(
- f"node {node!r} was not passed out (still not ready)"
- )
- elif stat == _NODE_DONE:
- raise ValueError(f"node {node!r} was already marked done")
- else:
- assert False, f"node {node!r}: unknown status {stat}"
-
- # Mark the node as processed
- nodeinfo.npredecessors = _NODE_DONE
-
- # Go to all the successors and reduce the number of predecessors, collecting all the ones
- # that are ready to be returned in the next get_ready() call.
- for successor in nodeinfo.successors:
- successor_info = n2i[successor]
- successor_info.npredecessors -= 1
- if successor_info.npredecessors == 0:
- self._ready_nodes.append(successor)
- self._nfinished += 1
-
- def _find_cycle(self):
- n2i = self._node2info
- stack = []
- itstack = []
- seen = set()
- node2stacki = {}
-
- for node in n2i:
- if node in seen:
- continue
-
- while True:
- if node in seen:
- # If we have seen already the node and is in the
- # current stack we have found a cycle.
- if node in node2stacki:
- return stack[node2stacki[node] :] + [node]
- # else go on to get next successor
- else:
- seen.add(node)
- itstack.append(iter(n2i[node].successors).__next__)
- node2stacki[node] = len(stack)
- stack.append(node)
-
- # Backtrack to the topmost stack entry with
- # at least another successor.
- while stack:
- try:
- node = itstack[-1]()
- break
- except StopIteration:
- del node2stacki[stack.pop()]
- itstack.pop()
- else:
- break
- return None
-
- def static_order(self):
- """Returns an iterable of nodes in a topological order.
-
- The particular order that is returned may depend on the specific
- order in which the items were inserted in the graph.
-
- Using this method does not require to call "prepare" or "done". If any
- cycle is detected, :exc:`CycleError` will be raised.
- """
- self.prepare()
- while self.is_active():
- node_group = self.get_ready()
- yield from node_group
- self.done(*node_group)
+__all__ = ["TopologicalSorter", "CycleError"]
+
+_NODE_OUT = -1
+_NODE_DONE = -2
+
+
+class _NodeInfo:
+ __slots__ = "node", "npredecessors", "successors"
+
+ def __init__(self, node):
+ # The node this class is augmenting.
+ self.node = node
+
+ # Number of predecessors, generally >= 0. When this value falls to 0,
+ # and is returned by get_ready(), this is set to _NODE_OUT and when the
+ # node is marked done by a call to done(), set to _NODE_DONE.
+ self.npredecessors = 0
+
+ # List of successor nodes. The list can contain duplicated elements as
+ # long as they're all reflected in the successor's npredecessors attribute.
+ self.successors = []
+
+
+class CycleError(ValueError):
+ """Subclass of ValueError raised by TopologicalSorter.prepare if cycles
+ exist in the working graph.
+
+ If multiple cycles exist, only one undefined choice among them will be reported
+ and included in the exception. The detected cycle can be accessed via the second
+ element in the *args* attribute of the exception instance and consists in a list
+ of nodes, such that each node is, in the graph, an immediate predecessor of the
+ next node in the list. In the reported list, the first and the last node will be
+ the same, to make it clear that it is cyclic.
+ """
+
+ pass
+
+
+class TopologicalSorter:
+ """Provides functionality to topologically sort a graph of hashable nodes"""
+
+ def __init__(self, graph=None):
+ self._node2info = {}
+ self._ready_nodes = None
+ self._npassedout = 0
+ self._nfinished = 0
+
+ if graph is not None:
+ for node, predecessors in graph.items():
+ self.add(node, *predecessors)
+
+ def _get_nodeinfo(self, node):
+ if (result := self._node2info.get(node)) is None:
+ self._node2info[node] = result = _NodeInfo(node)
+ return result
+
+ def add(self, node, *predecessors):
+ """Add a new node and its predecessors to the graph.
+
+ Both the *node* and all elements in *predecessors* must be hashable.
+
+ If called multiple times with the same node argument, the set of dependencies
+ will be the union of all dependencies passed in.
+
+ It is possible to add a node with no dependencies (*predecessors* is not provided)
+ as well as provide a dependency twice. If a node that has not been provided before
+ is included among *predecessors* it will be automatically added to the graph with
+ no predecessors of its own.
+
+ Raises ValueError if called after "prepare".
+ """
+ if self._ready_nodes is not None:
+ raise ValueError("Nodes cannot be added after a call to prepare()")
+
+ # Create the node -> predecessor edges
+ nodeinfo = self._get_nodeinfo(node)
+ nodeinfo.npredecessors += len(predecessors)
+
+ # Create the predecessor -> node edges
+ for pred in predecessors:
+ pred_info = self._get_nodeinfo(pred)
+ pred_info.successors.append(node)
+
+ def prepare(self):
+ """Mark the graph as finished and check for cycles in the graph.
+
+ If any cycle is detected, "CycleError" will be raised, but "get_ready" can
+ still be used to obtain as many nodes as possible until cycles block more
+ progress. After a call to this function, the graph cannot be modified and
+ therefore no more nodes can be added using "add".
+ """
+ if self._ready_nodes is not None:
+ raise ValueError("cannot prepare() more than once")
+
+ self._ready_nodes = [
+ i.node for i in self._node2info.values() if i.npredecessors == 0
+ ]
+ # ready_nodes is set before we look for cycles on purpose:
+ # if the user wants to catch the CycleError, that's fine,
+ # they can continue using the instance to grab as many
+ # nodes as possible before cycles block more progress
+ cycle = self._find_cycle()
+ if cycle:
+ raise CycleError(f"nodes are in a cycle", cycle)
+
+ def get_ready(self):
+ """Return a tuple of all the nodes that are ready.
+
+ Initially it returns all nodes with no predecessors; once those are marked
+ as processed by calling "done", further calls will return all new nodes that
+ have all their predecessors already processed. Once no more progress can be made,
+ empty tuples are returned.
+
+ Raises ValueError if called without calling "prepare" previously.
+ """
+ if self._ready_nodes is None:
+ raise ValueError("prepare() must be called first")
+
+ # Get the nodes that are ready and mark them
+ result = tuple(self._ready_nodes)
+ n2i = self._node2info
+ for node in result:
+ n2i[node].npredecessors = _NODE_OUT
+
+ # Clean the list of nodes that are ready and update
+ # the counter of nodes that we have returned.
+ self._ready_nodes.clear()
+ self._npassedout += len(result)
+
+ return result
+
+ def is_active(self):
+ """Return ``True`` if more progress can be made and ``False`` otherwise.
+
+ Progress can be made if cycles do not block the resolution and either there
+ are still nodes ready that haven't yet been returned by "get_ready" or the
+ number of nodes marked "done" is less than the number that have been returned
+ by "get_ready".
+
+ Raises ValueError if called without calling "prepare" previously.
+ """
+ if self._ready_nodes is None:
+ raise ValueError("prepare() must be called first")
+ return self._nfinished < self._npassedout or bool(self._ready_nodes)
+
+ def __bool__(self):
+ return self.is_active()
+
+ def done(self, *nodes):
+ """Marks a set of nodes returned by "get_ready" as processed.
+
+ This method unblocks any successor of each node in *nodes* for being returned
+ in the future by a call to "get_ready".
+
+ Raises :exec:`ValueError` if any node in *nodes* has already been marked as
+ processed by a previous call to this method, if a node was not added to the
+ graph by using "add" or if called without calling "prepare" previously or if
+ node has not yet been returned by "get_ready".
+ """
+
+ if self._ready_nodes is None:
+ raise ValueError("prepare() must be called first")
+
+ n2i = self._node2info
+
+ for node in nodes:
+
+ # Check if we know about this node (it was added previously using add()
+ if (nodeinfo := n2i.get(node)) is None:
+ raise ValueError(f"node {node!r} was not added using add()")
+
+ # If the node has not being returned (marked as ready) previously, inform the user.
+ stat = nodeinfo.npredecessors
+ if stat != _NODE_OUT:
+ if stat >= 0:
+ raise ValueError(
+ f"node {node!r} was not passed out (still not ready)"
+ )
+ elif stat == _NODE_DONE:
+ raise ValueError(f"node {node!r} was already marked done")
+ else:
+ assert False, f"node {node!r}: unknown status {stat}"
+
+ # Mark the node as processed
+ nodeinfo.npredecessors = _NODE_DONE
+
+ # Go to all the successors and reduce the number of predecessors, collecting all the ones
+ # that are ready to be returned in the next get_ready() call.
+ for successor in nodeinfo.successors:
+ successor_info = n2i[successor]
+ successor_info.npredecessors -= 1
+ if successor_info.npredecessors == 0:
+ self._ready_nodes.append(successor)
+ self._nfinished += 1
+
+ def _find_cycle(self):
+ n2i = self._node2info
+ stack = []
+ itstack = []
+ seen = set()
+ node2stacki = {}
+
+ for node in n2i:
+ if node in seen:
+ continue
+
+ while True:
+ if node in seen:
+ # If we have seen already the node and is in the
+ # current stack we have found a cycle.
+ if node in node2stacki:
+ return stack[node2stacki[node] :] + [node]
+ # else go on to get next successor
+ else:
+ seen.add(node)
+ itstack.append(iter(n2i[node].successors).__next__)
+ node2stacki[node] = len(stack)
+ stack.append(node)
+
+ # Backtrack to the topmost stack entry with
+ # at least another successor.
+ while stack:
+ try:
+ node = itstack[-1]()
+ break
+ except StopIteration:
+ del node2stacki[stack.pop()]
+ itstack.pop()
+ else:
+ break
+ return None
+
+ def static_order(self):
+ """Returns an iterable of nodes in a topological order.
+
+ The particular order that is returned may depend on the specific
+ order in which the items were inserted in the graph.
+
+ Using this method does not require to call "prepare" or "done". If any
+ cycle is detected, :exc:`CycleError` will be raised.
+ """
+ self.prepare()
+ while self.is_active():
+ node_group = self.get_ready()
+ yield from node_group
+ self.done(*node_group)
diff --git a/contrib/tools/python3/src/Lib/gzip.py b/contrib/tools/python3/src/Lib/gzip.py
index 11a5f41d56..360857df38 100644
--- a/contrib/tools/python3/src/Lib/gzip.py
+++ b/contrib/tools/python3/src/Lib/gzip.py
@@ -11,18 +11,18 @@ import builtins
import io
import _compression
-__all__ = ["BadGzipFile", "GzipFile", "open", "compress", "decompress"]
+__all__ = ["BadGzipFile", "GzipFile", "open", "compress", "decompress"]
FTEXT, FHCRC, FEXTRA, FNAME, FCOMMENT = 1, 2, 4, 8, 16
READ, WRITE = 1, 2
-_COMPRESS_LEVEL_FAST = 1
-_COMPRESS_LEVEL_TRADEOFF = 6
-_COMPRESS_LEVEL_BEST = 9
-
-
-def open(filename, mode="rb", compresslevel=_COMPRESS_LEVEL_BEST,
+_COMPRESS_LEVEL_FAST = 1
+_COMPRESS_LEVEL_TRADEOFF = 6
+_COMPRESS_LEVEL_BEST = 9
+
+
+def open(filename, mode="rb", compresslevel=_COMPRESS_LEVEL_BEST,
encoding=None, errors=None, newline=None):
"""Open a gzip-compressed file in binary or text mode.
@@ -112,11 +112,11 @@ class _PaddedFile:
def seekable(self):
return True # Allows fast-forwarding even in unseekable streams
-
-class BadGzipFile(OSError):
- """Exception raised in some cases for invalid gzip files."""
-
-
+
+class BadGzipFile(OSError):
+ """Exception raised in some cases for invalid gzip files."""
+
+
class GzipFile(_compression.BaseStream):
"""The GzipFile class simulates most of the methods of a file object with
the exception of the truncate() method.
@@ -131,7 +131,7 @@ class GzipFile(_compression.BaseStream):
myfileobj = None
def __init__(self, filename=None, mode=None,
- compresslevel=_COMPRESS_LEVEL_BEST, fileobj=None, mtime=None):
+ compresslevel=_COMPRESS_LEVEL_BEST, fileobj=None, mtime=None):
"""Constructor for the GzipFile class.
At least one of fileobj and filename must be given a
@@ -177,7 +177,7 @@ class GzipFile(_compression.BaseStream):
filename = ''
else:
filename = os.fspath(filename)
- origmode = mode
+ origmode = mode
if mode is None:
mode = getattr(fileobj, 'mode', 'rb')
@@ -188,13 +188,13 @@ class GzipFile(_compression.BaseStream):
self.name = filename
elif mode.startswith(('w', 'a', 'x')):
- if origmode is None:
- import warnings
- warnings.warn(
- "GzipFile was opened for writing, but this will "
- "change in future Python releases. "
- "Specify the mode argument for opening it for writing.",
- FutureWarning, 2)
+ if origmode is None:
+ import warnings
+ warnings.warn(
+ "GzipFile was opened for writing, but this will "
+ "change in future Python releases. "
+ "Specify the mode argument for opening it for writing.",
+ FutureWarning, 2)
self.mode = WRITE
self._init_write(filename)
self.compress = zlib.compressobj(compresslevel,
@@ -209,7 +209,7 @@ class GzipFile(_compression.BaseStream):
self.fileobj = fileobj
if self.mode == WRITE:
- self._write_gzip_header(compresslevel)
+ self._write_gzip_header(compresslevel)
@property
def filename(self):
@@ -236,7 +236,7 @@ class GzipFile(_compression.BaseStream):
self.bufsize = 0
self.offset = 0 # Current file offset for seek(), tell(), etc
- def _write_gzip_header(self, compresslevel):
+ def _write_gzip_header(self, compresslevel):
self.fileobj.write(b'\037\213') # magic header
self.fileobj.write(b'\010') # compression method
try:
@@ -257,13 +257,13 @@ class GzipFile(_compression.BaseStream):
if mtime is None:
mtime = time.time()
write32u(self.fileobj, int(mtime))
- if compresslevel == _COMPRESS_LEVEL_BEST:
- xfl = b'\002'
- elif compresslevel == _COMPRESS_LEVEL_FAST:
- xfl = b'\004'
- else:
- xfl = b'\000'
- self.fileobj.write(xfl)
+ if compresslevel == _COMPRESS_LEVEL_BEST:
+ xfl = b'\002'
+ elif compresslevel == _COMPRESS_LEVEL_FAST:
+ xfl = b'\004'
+ else:
+ xfl = b'\000'
+ self.fileobj.write(xfl)
self.fileobj.write(b'\377')
if fname:
self.fileobj.write(fname + b'\000')
@@ -302,7 +302,7 @@ class GzipFile(_compression.BaseStream):
def read1(self, size=-1):
"""Implements BufferedIOBase.read1()
- Reads up to a buffer's worth of data if size is negative."""
+ Reads up to a buffer's worth of data if size is negative."""
self._check_not_closed()
if self.mode != READ:
import errno
@@ -432,12 +432,12 @@ class _GzipReader(_compression.DecompressReader):
return False
if magic != b'\037\213':
- raise BadGzipFile('Not a gzipped file (%r)' % magic)
+ raise BadGzipFile('Not a gzipped file (%r)' % magic)
(method, flag,
self._last_mtime) = struct.unpack("<BBIxx", self._read_exact(8))
if method != 8:
- raise BadGzipFile('Unknown compression method')
+ raise BadGzipFile('Unknown compression method')
if flag & FEXTRA:
# Read & discard the extra field, if present
@@ -516,15 +516,15 @@ class _GzipReader(_compression.DecompressReader):
def _read_eof(self):
# We've read to the end of the file
- # We check that the computed CRC and size of the
+ # We check that the computed CRC and size of the
# uncompressed data matches the stored values. Note that the size
# stored is the true file size mod 2**32.
crc32, isize = struct.unpack("<II", self._read_exact(8))
if crc32 != self._crc:
- raise BadGzipFile("CRC check failed %s != %s" % (hex(crc32),
- hex(self._crc)))
+ raise BadGzipFile("CRC check failed %s != %s" % (hex(crc32),
+ hex(self._crc)))
elif isize != (self._stream_size & 0xffffffff):
- raise BadGzipFile("Incorrect length of data produced")
+ raise BadGzipFile("Incorrect length of data produced")
# Gzip files can be padded with zeroes and still have archives.
# Consume all zero bytes and set the file position to the first
@@ -539,12 +539,12 @@ class _GzipReader(_compression.DecompressReader):
super()._rewind()
self._new_member = True
-def compress(data, compresslevel=_COMPRESS_LEVEL_BEST, *, mtime=None):
+def compress(data, compresslevel=_COMPRESS_LEVEL_BEST, *, mtime=None):
"""Compress data in one shot and return the compressed string.
Optional argument is the compression level, in range of 0-9.
"""
buf = io.BytesIO()
- with GzipFile(fileobj=buf, mode='wb', compresslevel=compresslevel, mtime=mtime) as f:
+ with GzipFile(fileobj=buf, mode='wb', compresslevel=compresslevel, mtime=mtime) as f:
f.write(data)
return buf.getvalue()
@@ -556,41 +556,41 @@ def decompress(data):
return f.read()
-def main():
- from argparse import ArgumentParser
- parser = ArgumentParser(description=
- "A simple command line interface for the gzip module: act like gzip, "
- "but do not delete the input file.")
- group = parser.add_mutually_exclusive_group()
- group.add_argument('--fast', action='store_true', help='compress faster')
- group.add_argument('--best', action='store_true', help='compress better')
- group.add_argument("-d", "--decompress", action="store_true",
- help="act like gunzip instead of gzip")
-
- parser.add_argument("args", nargs="*", default=["-"], metavar='file')
- args = parser.parse_args()
-
- compresslevel = _COMPRESS_LEVEL_TRADEOFF
- if args.fast:
- compresslevel = _COMPRESS_LEVEL_FAST
- elif args.best:
- compresslevel = _COMPRESS_LEVEL_BEST
-
- for arg in args.args:
- if args.decompress:
+def main():
+ from argparse import ArgumentParser
+ parser = ArgumentParser(description=
+ "A simple command line interface for the gzip module: act like gzip, "
+ "but do not delete the input file.")
+ group = parser.add_mutually_exclusive_group()
+ group.add_argument('--fast', action='store_true', help='compress faster')
+ group.add_argument('--best', action='store_true', help='compress better')
+ group.add_argument("-d", "--decompress", action="store_true",
+ help="act like gunzip instead of gzip")
+
+ parser.add_argument("args", nargs="*", default=["-"], metavar='file')
+ args = parser.parse_args()
+
+ compresslevel = _COMPRESS_LEVEL_TRADEOFF
+ if args.fast:
+ compresslevel = _COMPRESS_LEVEL_FAST
+ elif args.best:
+ compresslevel = _COMPRESS_LEVEL_BEST
+
+ for arg in args.args:
+ if args.decompress:
if arg == "-":
f = GzipFile(filename="", mode="rb", fileobj=sys.stdin.buffer)
g = sys.stdout.buffer
else:
if arg[-3:] != ".gz":
- sys.exit(f"filename doesn't end in .gz: {arg!r}")
+ sys.exit(f"filename doesn't end in .gz: {arg!r}")
f = open(arg, "rb")
g = builtins.open(arg[:-3], "wb")
else:
if arg == "-":
f = sys.stdin.buffer
- g = GzipFile(filename="", mode="wb", fileobj=sys.stdout.buffer,
- compresslevel=compresslevel)
+ g = GzipFile(filename="", mode="wb", fileobj=sys.stdout.buffer,
+ compresslevel=compresslevel)
else:
f = builtins.open(arg, "rb")
g = open(arg + ".gz", "wb")
@@ -605,4 +605,4 @@ def main():
f.close()
if __name__ == '__main__':
- main()
+ main()
diff --git a/contrib/tools/python3/src/Lib/hashlib.py b/contrib/tools/python3/src/Lib/hashlib.py
index 58c340d56e..5e0017866b 100644
--- a/contrib/tools/python3/src/Lib/hashlib.py
+++ b/contrib/tools/python3/src/Lib/hashlib.py
@@ -70,47 +70,47 @@ __all__ = __always_supported + ('new', 'algorithms_guaranteed',
__builtin_constructor_cache = {}
-# Prefer our blake2 implementation
-# OpenSSL 1.1.0 comes with a limited implementation of blake2b/s. The OpenSSL
-# implementations neither support keyed blake2 (blake2 MAC) nor advanced
-# features like salt, personalization, or tree hashing. OpenSSL hash-only
-# variants are available as 'blake2b512' and 'blake2s256', though.
-__block_openssl_constructor = {
- 'blake2b', 'blake2s',
-}
-
+# Prefer our blake2 implementation
+# OpenSSL 1.1.0 comes with a limited implementation of blake2b/s. The OpenSSL
+# implementations neither support keyed blake2 (blake2 MAC) nor advanced
+# features like salt, personalization, or tree hashing. OpenSSL hash-only
+# variants are available as 'blake2b512' and 'blake2s256', though.
+__block_openssl_constructor = {
+ 'blake2b', 'blake2s',
+}
+
def __get_builtin_constructor(name):
cache = __builtin_constructor_cache
constructor = cache.get(name)
if constructor is not None:
return constructor
try:
- if name in {'SHA1', 'sha1'}:
+ if name in {'SHA1', 'sha1'}:
import _sha1
cache['SHA1'] = cache['sha1'] = _sha1.sha1
- elif name in {'MD5', 'md5'}:
+ elif name in {'MD5', 'md5'}:
import _md5
cache['MD5'] = cache['md5'] = _md5.md5
- elif name in {'SHA256', 'sha256', 'SHA224', 'sha224'}:
+ elif name in {'SHA256', 'sha256', 'SHA224', 'sha224'}:
import _sha256
cache['SHA224'] = cache['sha224'] = _sha256.sha224
cache['SHA256'] = cache['sha256'] = _sha256.sha256
- elif name in {'SHA512', 'sha512', 'SHA384', 'sha384'}:
+ elif name in {'SHA512', 'sha512', 'SHA384', 'sha384'}:
import _sha512
cache['SHA384'] = cache['sha384'] = _sha512.sha384
cache['SHA512'] = cache['sha512'] = _sha512.sha512
- elif name in {'blake2b', 'blake2s'}:
+ elif name in {'blake2b', 'blake2s'}:
import _blake2
cache['blake2b'] = _blake2.blake2b
cache['blake2s'] = _blake2.blake2s
- elif name in {'sha3_224', 'sha3_256', 'sha3_384', 'sha3_512'}:
+ elif name in {'sha3_224', 'sha3_256', 'sha3_384', 'sha3_512'}:
import _sha3
cache['sha3_224'] = _sha3.sha3_224
cache['sha3_256'] = _sha3.sha3_256
cache['sha3_384'] = _sha3.sha3_384
cache['sha3_512'] = _sha3.sha3_512
- elif name in {'shake_128', 'shake_256'}:
- import _sha3
+ elif name in {'shake_128', 'shake_256'}:
+ import _sha3
cache['shake_128'] = _sha3.shake_128
cache['shake_256'] = _sha3.shake_256
except ImportError:
@@ -124,17 +124,17 @@ def __get_builtin_constructor(name):
def __get_openssl_constructor(name):
- if name in __block_openssl_constructor:
- # Prefer our builtin blake2 implementation.
+ if name in __block_openssl_constructor:
+ # Prefer our builtin blake2 implementation.
return __get_builtin_constructor(name)
try:
- # MD5, SHA1, and SHA2 are in all supported OpenSSL versions
- # SHA3/shake are available in OpenSSL 1.1.1+
+ # MD5, SHA1, and SHA2 are in all supported OpenSSL versions
+ # SHA3/shake are available in OpenSSL 1.1.1+
f = getattr(_hashlib, 'openssl_' + name)
# Allow the C module to raise ValueError. The function will be
- # defined but the hash not actually available. Don't fall back to
- # builtin if the current security policy blocks a digest, bpo#40695.
- f(usedforsecurity=False)
+ # defined but the hash not actually available. Don't fall back to
+ # builtin if the current security policy blocks a digest, bpo#40695.
+ f(usedforsecurity=False)
# Use the C function directly (very fast)
return f
except (AttributeError, ValueError):
@@ -153,11 +153,11 @@ def __hash_new(name, data=b'', **kwargs):
"""new(name, data=b'') - Return a new hashing object using the named algorithm;
optionally initialized with data (which must be a bytes-like object).
"""
- if name in __block_openssl_constructor:
- # Prefer our builtin blake2 implementation.
+ if name in __block_openssl_constructor:
+ # Prefer our builtin blake2 implementation.
return __get_builtin_constructor(name)(data, **kwargs)
try:
- return _hashlib.new(name, data, **kwargs)
+ return _hashlib.new(name, data, **kwargs)
except ValueError:
# If the _hashlib module (OpenSSL) doesn't support the named
# hash, try using our builtin implementations.
diff --git a/contrib/tools/python3/src/Lib/heapq.py b/contrib/tools/python3/src/Lib/heapq.py
index fabefd87f8..535cabfa1f 100644
--- a/contrib/tools/python3/src/Lib/heapq.py
+++ b/contrib/tools/python3/src/Lib/heapq.py
@@ -468,7 +468,7 @@ def nsmallest(n, iterable, key=None):
if n == 1:
it = iter(iterable)
sentinel = object()
- result = min(it, default=sentinel, key=key)
+ result = min(it, default=sentinel, key=key)
return [] if result is sentinel else [result]
# When n>=size, it's faster to use sorted()
@@ -528,7 +528,7 @@ def nlargest(n, iterable, key=None):
if n == 1:
it = iter(iterable)
sentinel = object()
- result = max(it, default=sentinel, key=key)
+ result = max(it, default=sentinel, key=key)
return [] if result is sentinel else [result]
# When n>=size, it's faster to use sorted()
@@ -597,5 +597,5 @@ except ImportError:
if __name__ == "__main__":
- import doctest # pragma: no cover
- print(doctest.testmod()) # pragma: no cover
+ import doctest # pragma: no cover
+ print(doctest.testmod()) # pragma: no cover
diff --git a/contrib/tools/python3/src/Lib/hmac.py b/contrib/tools/python3/src/Lib/hmac.py
index 180bc378b5..3051334b3d 100644
--- a/contrib/tools/python3/src/Lib/hmac.py
+++ b/contrib/tools/python3/src/Lib/hmac.py
@@ -1,4 +1,4 @@
-"""HMAC (Keyed-Hashing for Message Authentication) module.
+"""HMAC (Keyed-Hashing for Message Authentication) module.
Implements the HMAC algorithm as described by RFC 2104.
"""
@@ -9,10 +9,10 @@ try:
except ImportError:
_hashopenssl = None
_openssl_md_meths = None
- from _operator import _compare_digest as compare_digest
+ from _operator import _compare_digest as compare_digest
else:
_openssl_md_meths = frozenset(_hashopenssl.openssl_md_meth_names)
- compare_digest = _hashopenssl.compare_digest
+ compare_digest = _hashopenssl.compare_digest
import hashlib as _hashlib
trans_5C = bytes((x ^ 0x5C) for x in range(256))
@@ -31,43 +31,43 @@ class HMAC:
"""
blocksize = 64 # 512-bit HMAC; can be changed in subclasses.
- __slots__ = (
- "_digest_cons", "_inner", "_outer", "block_size", "digest_size"
- )
-
- def __init__(self, key, msg=None, digestmod=''):
+ __slots__ = (
+ "_digest_cons", "_inner", "_outer", "block_size", "digest_size"
+ )
+
+ def __init__(self, key, msg=None, digestmod=''):
"""Create a new HMAC object.
- key: bytes or buffer, key for the keyed hash object.
- msg: bytes or buffer, Initial input for the hash or None.
- digestmod: A hash name suitable for hashlib.new(). *OR*
+ key: bytes or buffer, key for the keyed hash object.
+ msg: bytes or buffer, Initial input for the hash or None.
+ digestmod: A hash name suitable for hashlib.new(). *OR*
A hashlib constructor returning a new hash object. *OR*
- A module supporting PEP 247.
+ A module supporting PEP 247.
- Required as of 3.8, despite its position after the optional
- msg argument. Passing it as a keyword argument is
- recommended, though not required for legacy API reasons.
+ Required as of 3.8, despite its position after the optional
+ msg argument. Passing it as a keyword argument is
+ recommended, though not required for legacy API reasons.
"""
if not isinstance(key, (bytes, bytearray)):
raise TypeError("key: expected bytes or bytearray, but got %r" % type(key).__name__)
- if not digestmod:
- raise TypeError("Missing required parameter 'digestmod'.")
+ if not digestmod:
+ raise TypeError("Missing required parameter 'digestmod'.")
if callable(digestmod):
- self._digest_cons = digestmod
+ self._digest_cons = digestmod
elif isinstance(digestmod, str):
- self._digest_cons = lambda d=b'': _hashlib.new(digestmod, d)
+ self._digest_cons = lambda d=b'': _hashlib.new(digestmod, d)
else:
- self._digest_cons = lambda d=b'': digestmod.new(d)
+ self._digest_cons = lambda d=b'': digestmod.new(d)
- self._outer = self._digest_cons()
- self._inner = self._digest_cons()
- self.digest_size = self._inner.digest_size
+ self._outer = self._digest_cons()
+ self._inner = self._digest_cons()
+ self.digest_size = self._inner.digest_size
- if hasattr(self._inner, 'block_size'):
- blocksize = self._inner.block_size
+ if hasattr(self._inner, 'block_size'):
+ blocksize = self._inner.block_size
if blocksize < 16:
_warnings.warn('block_size of %d seems too small; using our '
'default of %d.' % (blocksize, self.blocksize),
@@ -84,33 +84,33 @@ class HMAC:
self.block_size = blocksize
if len(key) > blocksize:
- key = self._digest_cons(key).digest()
+ key = self._digest_cons(key).digest()
key = key.ljust(blocksize, b'\0')
- self._outer.update(key.translate(trans_5C))
- self._inner.update(key.translate(trans_36))
+ self._outer.update(key.translate(trans_5C))
+ self._inner.update(key.translate(trans_36))
if msg is not None:
self.update(msg)
@property
def name(self):
- return "hmac-" + self._inner.name
-
- @property
- def digest_cons(self):
- return self._digest_cons
-
- @property
- def inner(self):
- return self._inner
-
- @property
- def outer(self):
- return self._outer
-
+ return "hmac-" + self._inner.name
+
+ @property
+ def digest_cons(self):
+ return self._digest_cons
+
+ @property
+ def inner(self):
+ return self._inner
+
+ @property
+ def outer(self):
+ return self._outer
+
def update(self, msg):
- """Feed data from msg into this hashing object."""
- self._inner.update(msg)
+ """Feed data from msg into this hashing object."""
+ self._inner.update(msg)
def copy(self):
"""Return a separate copy of this hashing object.
@@ -119,10 +119,10 @@ class HMAC:
"""
# Call __new__ directly to avoid the expensive __init__.
other = self.__class__.__new__(self.__class__)
- other._digest_cons = self._digest_cons
+ other._digest_cons = self._digest_cons
other.digest_size = self.digest_size
- other._inner = self._inner.copy()
- other._outer = self._outer.copy()
+ other._inner = self._inner.copy()
+ other._outer = self._outer.copy()
return other
def _current(self):
@@ -130,14 +130,14 @@ class HMAC:
To be used only internally with digest() and hexdigest().
"""
- h = self._outer.copy()
- h.update(self._inner.digest())
+ h = self._outer.copy()
+ h.update(self._inner.digest())
return h
def digest(self):
"""Return the hash value of this hashing object.
- This returns the hmac value as bytes. The object is
+ This returns the hmac value as bytes. The object is
not altered in any way by this function; you can continue
updating the object after calling this function.
"""
@@ -150,31 +150,31 @@ class HMAC:
h = self._current()
return h.hexdigest()
-def new(key, msg=None, digestmod=''):
+def new(key, msg=None, digestmod=''):
"""Create a new hashing object and return it.
- key: bytes or buffer, The starting key for the hash.
- msg: bytes or buffer, Initial input for the hash, or None.
- digestmod: A hash name suitable for hashlib.new(). *OR*
- A hashlib constructor returning a new hash object. *OR*
- A module supporting PEP 247.
-
- Required as of 3.8, despite its position after the optional
- msg argument. Passing it as a keyword argument is
- recommended, though not required for legacy API reasons.
-
- You can now feed arbitrary bytes into the object using its update()
+ key: bytes or buffer, The starting key for the hash.
+ msg: bytes or buffer, Initial input for the hash, or None.
+ digestmod: A hash name suitable for hashlib.new(). *OR*
+ A hashlib constructor returning a new hash object. *OR*
+ A module supporting PEP 247.
+
+ Required as of 3.8, despite its position after the optional
+ msg argument. Passing it as a keyword argument is
+ recommended, though not required for legacy API reasons.
+
+ You can now feed arbitrary bytes into the object using its update()
method, and can ask for the hash value at any time by calling its digest()
- or hexdigest() methods.
+ or hexdigest() methods.
"""
return HMAC(key, msg, digestmod)
def digest(key, msg, digest):
- """Fast inline implementation of HMAC.
+ """Fast inline implementation of HMAC.
- key: bytes or buffer, The key for the keyed hash object.
- msg: bytes or buffer, Input message.
+ key: bytes or buffer, The key for the keyed hash object.
+ msg: bytes or buffer, Input message.
digest: A hash name suitable for hashlib.new() for best performance. *OR*
A hashlib constructor returning a new hash object. *OR*
A module supporting PEP 247.
diff --git a/contrib/tools/python3/src/Lib/html/parser.py b/contrib/tools/python3/src/Lib/html/parser.py
index 58f6bb3b1e..a53dc89776 100644
--- a/contrib/tools/python3/src/Lib/html/parser.py
+++ b/contrib/tools/python3/src/Lib/html/parser.py
@@ -46,7 +46,7 @@ locatestarttagend_tolerant = re.compile(r"""
|"[^"]*" # LIT-enclosed value
|(?!['"])[^>\s]* # bare value
)
- \s* # possibly followed by a space
+ \s* # possibly followed by a space
)?(?:\s|/(?!>))*
)*
)?
@@ -405,7 +405,7 @@ class HTMLParser(_markupbase.ParserBase):
tagname = namematch.group(1).lower()
# consume and ignore other stuff between the name and the >
# Note: this is not 100% correct, since we might have things like
- # </tag attr=">">, but looking for > after the name should cover
+ # </tag attr=">">, but looking for > after the name should cover
# most of the cases and is much simpler
gtpos = rawdata.find('>', namematch.end())
self.handle_endtag(tagname)
diff --git a/contrib/tools/python3/src/Lib/http/__init__.py b/contrib/tools/python3/src/Lib/http/__init__.py
index 37be765349..2efca879a9 100644
--- a/contrib/tools/python3/src/Lib/http/__init__.py
+++ b/contrib/tools/python3/src/Lib/http/__init__.py
@@ -15,11 +15,11 @@ class HTTPStatus(IntEnum):
* RFC 7238: Permanent Redirect
* RFC 2295: Transparent Content Negotiation in HTTP
* RFC 2774: An HTTP Extension Framework
- * RFC 7725: An HTTP Status Code to Report Legal Obstacles
+ * RFC 7725: An HTTP Status Code to Report Legal Obstacles
* RFC 7540: Hypertext Transfer Protocol Version 2 (HTTP/2)
- * RFC 2324: Hyper Text Coffee Pot Control Protocol (HTCPCP/1.0)
- * RFC 8297: An HTTP Status Code for Indicating Hints
- * RFC 8470: Using Early Data in HTTP
+ * RFC 2324: Hyper Text Coffee Pot Control Protocol (HTCPCP/1.0)
+ * RFC 8297: An HTTP Status Code for Indicating Hints
+ * RFC 8470: Using Early Data in HTTP
"""
def __new__(cls, value, phrase, description=''):
obj = int.__new__(cls, value)
@@ -34,7 +34,7 @@ class HTTPStatus(IntEnum):
SWITCHING_PROTOCOLS = (101, 'Switching Protocols',
'Switching to new protocol; obey Upgrade header')
PROCESSING = 102, 'Processing'
- EARLY_HINTS = 103, 'Early Hints'
+ EARLY_HINTS = 103, 'Early Hints'
# success
OK = 200, 'OK', 'Request fulfilled, document follows'
@@ -64,7 +64,7 @@ class HTTPStatus(IntEnum):
TEMPORARY_REDIRECT = (307, 'Temporary Redirect',
'Object moved temporarily -- see URI list')
PERMANENT_REDIRECT = (308, 'Permanent Redirect',
- 'Object moved permanently -- see URI list')
+ 'Object moved permanently -- see URI list')
# client error
BAD_REQUEST = (400, 'Bad Request',
@@ -104,14 +104,14 @@ class HTTPStatus(IntEnum):
'Cannot satisfy request range')
EXPECTATION_FAILED = (417, 'Expectation Failed',
'Expect condition could not be satisfied')
- IM_A_TEAPOT = (418, 'I\'m a Teapot',
- 'Server refuses to brew coffee because it is a teapot.')
+ IM_A_TEAPOT = (418, 'I\'m a Teapot',
+ 'Server refuses to brew coffee because it is a teapot.')
MISDIRECTED_REQUEST = (421, 'Misdirected Request',
'Server is not able to produce a response')
UNPROCESSABLE_ENTITY = 422, 'Unprocessable Entity'
LOCKED = 423, 'Locked'
FAILED_DEPENDENCY = 424, 'Failed Dependency'
- TOO_EARLY = 425, 'Too Early'
+ TOO_EARLY = 425, 'Too Early'
UPGRADE_REQUIRED = 426, 'Upgrade Required'
PRECONDITION_REQUIRED = (428, 'Precondition Required',
'The origin server requires the request to be conditional')
@@ -122,10 +122,10 @@ class HTTPStatus(IntEnum):
'Request Header Fields Too Large',
'The server is unwilling to process the request because its header '
'fields are too large')
- UNAVAILABLE_FOR_LEGAL_REASONS = (451,
- 'Unavailable For Legal Reasons',
- 'The server is denying access to the '
- 'resource as a consequence of a legal demand')
+ UNAVAILABLE_FOR_LEGAL_REASONS = (451,
+ 'Unavailable For Legal Reasons',
+ 'The server is denying access to the '
+ 'resource as a consequence of a legal demand')
# server errors
INTERNAL_SERVER_ERROR = (500, 'Internal Server Error',
diff --git a/contrib/tools/python3/src/Lib/http/client.py b/contrib/tools/python3/src/Lib/http/client.py
index a98432e568..f0ce40cdc0 100644
--- a/contrib/tools/python3/src/Lib/http/client.py
+++ b/contrib/tools/python3/src/Lib/http/client.py
@@ -70,7 +70,7 @@ Req-sent-unread-response _CS_REQ_SENT <response_class>
import email.parser
import email.message
-import errno
+import errno
import http
import io
import re
@@ -106,9 +106,9 @@ globals().update(http.HTTPStatus.__members__)
# Mapping status codes to official W3C names
responses = {v: v.phrase for v in http.HTTPStatus.__members__.values()}
-# maximal amount of data to read at one time in _safe_read
-MAXAMOUNT = 1048576
-
+# maximal amount of data to read at one time in _safe_read
+MAXAMOUNT = 1048576
+
# maximal line length when calling readline().
_MAXLINE = 65536
_MAXHEADERS = 100
@@ -141,20 +141,20 @@ _MAXHEADERS = 100
_is_legal_header_name = re.compile(rb'[^:\s][^:\r\n]*').fullmatch
_is_illegal_header_value = re.compile(rb'\n(?![ \t])|\r(?![ \t\n])').search
-# These characters are not allowed within HTTP URL paths.
-# See https://tools.ietf.org/html/rfc3986#section-3.3 and the
-# https://tools.ietf.org/html/rfc3986#appendix-A pchar definition.
-# Prevents CVE-2019-9740. Includes control characters such as \r\n.
-# We don't restrict chars above \x7f as putrequest() limits us to ASCII.
-_contains_disallowed_url_pchar_re = re.compile('[\x00-\x20\x7f]')
-# Arguably only these _should_ allowed:
-# _is_allowed_url_pchars_re = re.compile(r"^[/!$&'()*+,;=:@%a-zA-Z0-9._~-]+$")
-# We are more lenient for assumed real world compatibility purposes.
-
-# These characters are not allowed within HTTP method names
-# to prevent http header injection.
-_contains_disallowed_method_pchar_re = re.compile('[\x00-\x1f]')
-
+# These characters are not allowed within HTTP URL paths.
+# See https://tools.ietf.org/html/rfc3986#section-3.3 and the
+# https://tools.ietf.org/html/rfc3986#appendix-A pchar definition.
+# Prevents CVE-2019-9740. Includes control characters such as \r\n.
+# We don't restrict chars above \x7f as putrequest() limits us to ASCII.
+_contains_disallowed_url_pchar_re = re.compile('[\x00-\x20\x7f]')
+# Arguably only these _should_ allowed:
+# _is_allowed_url_pchars_re = re.compile(r"^[/!$&'()*+,;=:@%a-zA-Z0-9._~-]+$")
+# We are more lenient for assumed real world compatibility purposes.
+
+# These characters are not allowed within HTTP method names
+# to prevent http header injection.
+_contains_disallowed_method_pchar_re = re.compile('[\x00-\x1f]')
+
# We always set the Content-Length header for these methods because some
# servers will otherwise respond with a 411
_METHODS_EXPECTING_BODY = {'PATCH', 'POST', 'PUT'}
@@ -205,11 +205,11 @@ class HTTPMessage(email.message.Message):
lst.append(line)
return lst
-def _read_headers(fp):
- """Reads potential header lines into a list from a file pointer.
+def _read_headers(fp):
+ """Reads potential header lines into a list from a file pointer.
- Length of line is limited by _MAXLINE, and number of
- headers is limited by _MAXHEADERS.
+ Length of line is limited by _MAXLINE, and number of
+ headers is limited by _MAXHEADERS.
"""
headers = []
while True:
@@ -221,19 +221,19 @@ def _read_headers(fp):
raise HTTPException("got more than %d headers" % _MAXHEADERS)
if line in (b'\r\n', b'\n', b''):
break
- return headers
-
-def parse_headers(fp, _class=HTTPMessage):
- """Parses only RFC2822 headers from a file pointer.
-
- email Parser wants to see strings rather than bytes.
- But a TextIOWrapper around self.rfile would buffer too many bytes
- from the stream, bytes which we later need to read as bytes.
- So we read the correct bytes here, as bytes, for email Parser
- to parse.
-
- """
- headers = _read_headers(fp)
+ return headers
+
+def parse_headers(fp, _class=HTTPMessage):
+ """Parses only RFC2822 headers from a file pointer.
+
+ email Parser wants to see strings rather than bytes.
+ But a TextIOWrapper around self.rfile would buffer too many bytes
+ from the stream, bytes which we later need to read as bytes.
+ So we read the correct bytes here, as bytes, for email Parser
+ to parse.
+
+ """
+ headers = _read_headers(fp)
hstring = b''.join(headers).decode('iso-8859-1')
return email.parser.Parser(_class=_class).parsestr(hstring)
@@ -321,10 +321,10 @@ class HTTPResponse(io.BufferedIOBase):
if status != CONTINUE:
break
# skip the header from the 100 response
- skipped_headers = _read_headers(self.fp)
- if self.debuglevel > 0:
- print("headers:", skipped_headers)
- del skipped_headers
+ skipped_headers = _read_headers(self.fp)
+ if self.debuglevel > 0:
+ print("headers:", skipped_headers)
+ del skipped_headers
self.code = self.status = status
self.reason = reason.strip()
@@ -339,8 +339,8 @@ class HTTPResponse(io.BufferedIOBase):
self.headers = self.msg = parse_headers(self.fp)
if self.debuglevel > 0:
- for hdr, val in self.headers.items():
- print("header:", hdr + ":", val)
+ for hdr, val in self.headers.items():
+ print("header:", hdr + ":", val)
# are we using the chunked-style of transfer encoding?
tr_enc = self.headers.get("transfer-encoding")
@@ -608,43 +608,43 @@ class HTTPResponse(io.BufferedIOBase):
raise IncompleteRead(bytes(b[0:total_bytes]))
def _safe_read(self, amt):
- """Read the number of bytes requested, compensating for partial reads.
-
- Normally, we have a blocking socket, but a read() can be interrupted
- by a signal (resulting in a partial read).
-
- Note that we cannot distinguish between EOF and an interrupt when zero
- bytes have been read. IncompleteRead() will be raised in this
- situation.
-
+ """Read the number of bytes requested, compensating for partial reads.
+
+ Normally, we have a blocking socket, but a read() can be interrupted
+ by a signal (resulting in a partial read).
+
+ Note that we cannot distinguish between EOF and an interrupt when zero
+ bytes have been read. IncompleteRead() will be raised in this
+ situation.
+
This function should be used when <amt> bytes "should" be present for
reading. If the bytes are truly not available (due to EOF), then the
IncompleteRead exception can be used to detect the problem.
"""
- s = []
- while amt > 0:
- chunk = self.fp.read(min(amt, MAXAMOUNT))
- if not chunk:
- raise IncompleteRead(b''.join(s), amt)
- s.append(chunk)
- amt -= len(chunk)
- return b"".join(s)
+ s = []
+ while amt > 0:
+ chunk = self.fp.read(min(amt, MAXAMOUNT))
+ if not chunk:
+ raise IncompleteRead(b''.join(s), amt)
+ s.append(chunk)
+ amt -= len(chunk)
+ return b"".join(s)
def _safe_readinto(self, b):
"""Same as _safe_read, but for reading into a buffer."""
- total_bytes = 0
- mvb = memoryview(b)
- while total_bytes < len(b):
- if MAXAMOUNT < len(mvb):
- temp_mvb = mvb[0:MAXAMOUNT]
- n = self.fp.readinto(temp_mvb)
- else:
- n = self.fp.readinto(mvb)
- if not n:
- raise IncompleteRead(bytes(mvb[0:total_bytes]), len(b))
- mvb = mvb[n:]
- total_bytes += n
- return total_bytes
+ total_bytes = 0
+ mvb = memoryview(b)
+ while total_bytes < len(b):
+ if MAXAMOUNT < len(mvb):
+ temp_mvb = mvb[0:MAXAMOUNT]
+ n = self.fp.readinto(temp_mvb)
+ else:
+ n = self.fp.readinto(mvb)
+ if not n:
+ raise IncompleteRead(bytes(mvb[0:total_bytes]), len(b))
+ mvb = mvb[n:]
+ total_bytes += n
+ return total_bytes
def read1(self, n=-1):
"""Read with at most one underlying system call. If at least one
@@ -856,8 +856,8 @@ class HTTPConnection:
(self.host, self.port) = self._get_hostport(host, port)
- self._validate_host(self.host)
-
+ self._validate_host(self.host)
+
# This is stored as an instance variable to allow unit
# tests to replace it with a suitable mockup
self._create_connection = socket.create_connection
@@ -870,7 +870,7 @@ class HTTPConnection:
the endpoint passed to `set_tunnel`. This done by sending an HTTP
CONNECT request to the proxy server when the connection is established.
- This method must be called before the HTTP connection has been
+ This method must be called before the HTTP connection has been
established.
The headers argument should be a mapping of extra HTTP headers to send
@@ -910,24 +910,24 @@ class HTTPConnection:
self.debuglevel = level
def _tunnel(self):
- connect = b"CONNECT %s:%d HTTP/1.0\r\n" % (
- self._tunnel_host.encode("ascii"), self._tunnel_port)
- headers = [connect]
+ connect = b"CONNECT %s:%d HTTP/1.0\r\n" % (
+ self._tunnel_host.encode("ascii"), self._tunnel_port)
+ headers = [connect]
for header, value in self._tunnel_headers.items():
- headers.append(f"{header}: {value}\r\n".encode("latin-1"))
- headers.append(b"\r\n")
- # Making a single send() call instead of one per line encourages
- # the host OS to use a more optimal packet size instead of
- # potentially emitting a series of small packets.
- self.send(b"".join(headers))
- del headers
+ headers.append(f"{header}: {value}\r\n".encode("latin-1"))
+ headers.append(b"\r\n")
+ # Making a single send() call instead of one per line encourages
+ # the host OS to use a more optimal packet size instead of
+ # potentially emitting a series of small packets.
+ self.send(b"".join(headers))
+ del headers
response = self.response_class(self.sock, method=self._method)
(version, code, message) = response._read_status()
if code != http.HTTPStatus.OK:
self.close()
- raise OSError(f"Tunnel connection failed: {code} {message.strip()}")
+ raise OSError(f"Tunnel connection failed: {code} {message.strip()}")
while True:
line = response.fp.readline(_MAXLINE + 1)
if len(line) > _MAXLINE:
@@ -945,12 +945,12 @@ class HTTPConnection:
"""Connect to the host and port specified in __init__."""
self.sock = self._create_connection(
(self.host,self.port), self.timeout, self.source_address)
- # Might fail in OSs that don't implement TCP_NODELAY
- try:
- self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
- except OSError as e:
- if e.errno != errno.ENOPROTOOPT:
- raise
+ # Might fail in OSs that don't implement TCP_NODELAY
+ try:
+ self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
+ except OSError as e:
+ if e.errno != errno.ENOPROTOOPT:
+ raise
if self._tunnel_host:
self._tunnel()
@@ -1121,17 +1121,17 @@ class HTTPConnection:
else:
raise CannotSendRequest(self.__state)
- self._validate_method(method)
-
- # Save the method for use later in the response phase
+ self._validate_method(method)
+
+ # Save the method for use later in the response phase
self._method = method
-
- url = url or '/'
- self._validate_path(url)
-
+
+ url = url or '/'
+ self._validate_path(url)
+
request = '%s %s %s' % (method, url, self._http_vsn_str)
- self._output(self._encode_request(request))
+ self._output(self._encode_request(request))
if self._http_vsn == 11:
# Issue some standard headers for better HTTP/1.1 compliance
@@ -1209,35 +1209,35 @@ class HTTPConnection:
# For HTTP/1.0, the server will assume "not chunked"
pass
- def _encode_request(self, request):
- # ASCII also helps prevent CVE-2019-9740.
- return request.encode('ascii')
-
- def _validate_method(self, method):
- """Validate a method name for putrequest."""
- # prevent http header injection
- match = _contains_disallowed_method_pchar_re.search(method)
- if match:
- raise ValueError(
- f"method can't contain control characters. {method!r} "
- f"(found at least {match.group()!r})")
-
- def _validate_path(self, url):
- """Validate a url for putrequest."""
- # Prevent CVE-2019-9740.
- match = _contains_disallowed_url_pchar_re.search(url)
- if match:
- raise InvalidURL(f"URL can't contain control characters. {url!r} "
- f"(found at least {match.group()!r})")
-
- def _validate_host(self, host):
- """Validate a host so it doesn't contain control characters."""
- # Prevent CVE-2019-18348.
- match = _contains_disallowed_url_pchar_re.search(host)
- if match:
- raise InvalidURL(f"URL can't contain control characters. {host!r} "
- f"(found at least {match.group()!r})")
-
+ def _encode_request(self, request):
+ # ASCII also helps prevent CVE-2019-9740.
+ return request.encode('ascii')
+
+ def _validate_method(self, method):
+ """Validate a method name for putrequest."""
+ # prevent http header injection
+ match = _contains_disallowed_method_pchar_re.search(method)
+ if match:
+ raise ValueError(
+ f"method can't contain control characters. {method!r} "
+ f"(found at least {match.group()!r})")
+
+ def _validate_path(self, url):
+ """Validate a url for putrequest."""
+ # Prevent CVE-2019-9740.
+ match = _contains_disallowed_url_pchar_re.search(url)
+ if match:
+ raise InvalidURL(f"URL can't contain control characters. {url!r} "
+ f"(found at least {match.group()!r})")
+
+ def _validate_host(self, host):
+ """Validate a host so it doesn't contain control characters."""
+ # Prevent CVE-2019-18348.
+ match = _contains_disallowed_url_pchar_re.search(host)
+ if match:
+ raise InvalidURL(f"URL can't contain control characters. {host!r} "
+ f"(found at least {match.group()!r})")
+
def putheader(self, header, *values):
"""Send a request header line to the server.
@@ -1422,9 +1422,9 @@ else:
self.cert_file = cert_file
if context is None:
context = ssl._create_default_https_context()
- # enable PHA for TLS 1.3 connections if available
- if context.post_handshake_auth is not None:
- context.post_handshake_auth = True
+ # enable PHA for TLS 1.3 connections if available
+ if context.post_handshake_auth is not None:
+ context.post_handshake_auth = True
will_verify = context.verify_mode != ssl.CERT_NONE
if check_hostname is None:
check_hostname = context.check_hostname
@@ -1433,10 +1433,10 @@ else:
"either CERT_OPTIONAL or CERT_REQUIRED")
if key_file or cert_file:
context.load_cert_chain(cert_file, key_file)
- # cert and key file means the user wants to authenticate.
- # enable TLS 1.3 PHA implicitly even for custom contexts.
- if context.post_handshake_auth is not None:
- context.post_handshake_auth = True
+ # cert and key file means the user wants to authenticate.
+ # enable TLS 1.3 PHA implicitly even for custom contexts.
+ if context.post_handshake_auth is not None:
+ context.post_handshake_auth = True
self._context = context
if check_hostname is not None:
self._context.check_hostname = check_hostname
@@ -1490,7 +1490,7 @@ class IncompleteRead(HTTPException):
e = ''
return '%s(%i bytes read%s)' % (self.__class__.__name__,
len(self.partial), e)
- __str__ = object.__str__
+ __str__ = object.__str__
class ImproperConnectionState(HTTPException):
pass
diff --git a/contrib/tools/python3/src/Lib/http/cookiejar.py b/contrib/tools/python3/src/Lib/http/cookiejar.py
index 47ed5c3d64..27debead36 100644
--- a/contrib/tools/python3/src/Lib/http/cookiejar.py
+++ b/contrib/tools/python3/src/Lib/http/cookiejar.py
@@ -28,7 +28,7 @@ http://wwwsearch.sf.net/):
__all__ = ['Cookie', 'CookieJar', 'CookiePolicy', 'DefaultCookiePolicy',
'FileCookieJar', 'LWPCookieJar', 'LoadError', 'MozillaCookieJar']
-import os
+import os
import copy
import datetime
import re
@@ -214,14 +214,14 @@ LOOSE_HTTP_DATE_RE = re.compile(
(?::(\d\d))? # optional seconds
)? # optional clock
\s*
- (?:
- ([-+]?\d{2,4}|(?![APap][Mm]\b)[A-Za-z]+) # timezone
- \s*
- )?
- (?:
- \(\w+\) # ASCII representation of timezone in parens.
+ (?:
+ ([-+]?\d{2,4}|(?![APap][Mm]\b)[A-Za-z]+) # timezone
\s*
- )?$""", re.X | re.ASCII)
+ )?
+ (?:
+ \(\w+\) # ASCII representation of timezone in parens.
+ \s*
+ )?$""", re.X | re.ASCII)
def http2time(text):
"""Returns time in seconds since epoch of time represented by a string.
@@ -291,11 +291,11 @@ ISO_DATE_RE = re.compile(
(?::?(\d\d(?:\.\d*)?))? # optional seconds (and fractional)
)? # optional clock
\s*
- (?:
- ([-+]?\d\d?:?(:?\d\d)?
- |Z|z) # timezone (Z is "zero meridian", i.e. GMT)
- \s*
- )?$""", re.X | re. ASCII)
+ (?:
+ ([-+]?\d\d?:?(:?\d\d)?
+ |Z|z) # timezone (Z is "zero meridian", i.e. GMT)
+ \s*
+ )?$""", re.X | re. ASCII)
def iso2time(text):
"""
As for http2time, but parses the ISO 8601 formats:
@@ -885,7 +885,7 @@ class DefaultCookiePolicy(CookiePolicy):
strict_ns_domain=DomainLiberal,
strict_ns_set_initial_dollar=False,
strict_ns_set_path=False,
- secure_protocols=("https", "wss")
+ secure_protocols=("https", "wss")
):
"""Constructor arguments should be passed as keyword arguments only."""
self.netscape = netscape
@@ -898,7 +898,7 @@ class DefaultCookiePolicy(CookiePolicy):
self.strict_ns_domain = strict_ns_domain
self.strict_ns_set_initial_dollar = strict_ns_set_initial_dollar
self.strict_ns_set_path = strict_ns_set_path
- self.secure_protocols = secure_protocols
+ self.secure_protocols = secure_protocols
if blocked_domains is not None:
self._blocked_domains = tuple(blocked_domains)
@@ -1125,7 +1125,7 @@ class DefaultCookiePolicy(CookiePolicy):
return True
def return_ok_secure(self, cookie, request):
- if cookie.secure and request.type not in self.secure_protocols:
+ if cookie.secure and request.type not in self.secure_protocols:
_debug(" secure cookie with non-secure request")
return False
return True
@@ -1599,7 +1599,7 @@ class CookieJar:
headers = response.info()
rfc2965_hdrs = headers.get_all("Set-Cookie2", [])
ns_hdrs = headers.get_all("Set-Cookie", [])
- self._policy._now = self._now = int(time.time())
+ self._policy._now = self._now = int(time.time())
rfc2965 = self._policy.rfc2965
netscape = self._policy.netscape
@@ -1781,7 +1781,7 @@ class FileCookieJar(CookieJar):
"""
CookieJar.__init__(self, policy)
if filename is not None:
- filename = os.fspath(filename)
+ filename = os.fspath(filename)
self.filename = filename
self.delayload = bool(delayload)
diff --git a/contrib/tools/python3/src/Lib/http/cookies.py b/contrib/tools/python3/src/Lib/http/cookies.py
index 35ac2dc6ae..efdd950d07 100644
--- a/contrib/tools/python3/src/Lib/http/cookies.py
+++ b/contrib/tools/python3/src/Lib/http/cookies.py
@@ -131,7 +131,7 @@ Finis.
#
import re
import string
-import types
+import types
__all__ = ["CookieError", "BaseCookie", "SimpleCookie"]
@@ -257,7 +257,7 @@ class Morsel(dict):
In a cookie, each such pair may have several attributes, so this class is
used to keep the attributes associated with the appropriate key,value pair.
This class also includes a coded_value attribute, which is used to hold
- the network representation of the value.
+ the network representation of the value.
"""
# RFC 2109 lists these attributes as reserved:
# path comment domain
@@ -281,7 +281,7 @@ class Morsel(dict):
"secure" : "Secure",
"httponly" : "HttpOnly",
"version" : "Version",
- "samesite" : "SameSite",
+ "samesite" : "SameSite",
}
_flags = {'secure', 'httponly'}
@@ -420,9 +420,9 @@ class Morsel(dict):
# Return the result
return _semispacejoin(result)
- __class_getitem__ = classmethod(types.GenericAlias)
-
+ __class_getitem__ = classmethod(types.GenericAlias)
+
#
# Pattern for finding cookie
#
diff --git a/contrib/tools/python3/src/Lib/http/server.py b/contrib/tools/python3/src/Lib/http/server.py
index d7cce20432..ca40a0f1f1 100644
--- a/contrib/tools/python3/src/Lib/http/server.py
+++ b/contrib/tools/python3/src/Lib/http/server.py
@@ -103,7 +103,7 @@ import socketserver
import sys
import time
import urllib.parse
-import contextlib
+import contextlib
from functools import partial
from http import HTTPStatus
@@ -639,17 +639,17 @@ class SimpleHTTPRequestHandler(BaseHTTPRequestHandler):
"""
server_version = "SimpleHTTP/" + __version__
- extensions_map = _encodings_map_default = {
- '.gz': 'application/gzip',
- '.Z': 'application/octet-stream',
- '.bz2': 'application/x-bzip2',
- '.xz': 'application/x-xz',
- }
+ extensions_map = _encodings_map_default = {
+ '.gz': 'application/gzip',
+ '.Z': 'application/octet-stream',
+ '.bz2': 'application/x-bzip2',
+ '.xz': 'application/x-xz',
+ }
def __init__(self, *args, directory=None, **kwargs):
if directory is None:
directory = os.getcwd()
- self.directory = os.fspath(directory)
+ self.directory = os.fspath(directory)
super().__init__(*args, **kwargs)
def do_GET(self):
@@ -689,7 +689,7 @@ class SimpleHTTPRequestHandler(BaseHTTPRequestHandler):
parts[3], parts[4])
new_url = urllib.parse.urlunsplit(new_parts)
self.send_header("Location", new_url)
- self.send_header("Content-Length", "0")
+ self.send_header("Content-Length", "0")
self.end_headers()
return None
for index in "index.html", "index.htm":
@@ -700,14 +700,14 @@ class SimpleHTTPRequestHandler(BaseHTTPRequestHandler):
else:
return self.list_directory(path)
ctype = self.guess_type(path)
- # check for trailing "/" which should return 404. See Issue17324
- # The test for this was added in test_httpserver.py
- # However, some OS platforms accept a trailingSlash as a filename
- # See discussion on python-dev and Issue34711 regarding
- # parseing and rejection of filenames with a trailing slash
- if path.endswith("/"):
- self.send_error(HTTPStatus.NOT_FOUND, "File not found")
- return None
+ # check for trailing "/" which should return 404. See Issue17324
+ # The test for this was added in test_httpserver.py
+ # However, some OS platforms accept a trailingSlash as a filename
+ # See discussion on python-dev and Issue34711 regarding
+ # parseing and rejection of filenames with a trailing slash
+ if path.endswith("/"):
+ self.send_error(HTTPStatus.NOT_FOUND, "File not found")
+ return None
try:
f = open(path, 'rb')
except OSError:
@@ -879,10 +879,10 @@ class SimpleHTTPRequestHandler(BaseHTTPRequestHandler):
ext = ext.lower()
if ext in self.extensions_map:
return self.extensions_map[ext]
- guess, _ = mimetypes.guess_type(path)
- if guess:
- return guess
- return 'application/octet-stream'
+ guess, _ = mimetypes.guess_type(path)
+ if guess:
+ return guess
+ return 'application/octet-stream'
# Utilities for CGIHTTPRequestHandler
@@ -1013,10 +1013,10 @@ class CGIHTTPRequestHandler(SimpleHTTPRequestHandler):
"""
collapsed_path = _url_collapse_path(self.path)
dir_sep = collapsed_path.find('/', 1)
- while dir_sep > 0 and not collapsed_path[:dir_sep] in self.cgi_directories:
- dir_sep = collapsed_path.find('/', dir_sep+1)
- if dir_sep > 0:
- head, tail = collapsed_path[:dir_sep], collapsed_path[dir_sep+1:]
+ while dir_sep > 0 and not collapsed_path[:dir_sep] in self.cgi_directories:
+ dir_sep = collapsed_path.find('/', dir_sep+1)
+ if dir_sep > 0:
+ head, tail = collapsed_path[:dir_sep], collapsed_path[dir_sep+1:]
self.cgi_info = head, tail
return True
return False
@@ -1124,7 +1124,7 @@ class CGIHTTPRequestHandler(SimpleHTTPRequestHandler):
referer = self.headers.get('referer')
if referer:
env['HTTP_REFERER'] = referer
- accept = self.headers.get_all('accept', ())
+ accept = self.headers.get_all('accept', ())
env['HTTP_ACCEPT'] = ','.join(accept)
ua = self.headers.get('user-agent')
if ua:
@@ -1160,9 +1160,9 @@ class CGIHTTPRequestHandler(SimpleHTTPRequestHandler):
while select.select([self.rfile], [], [], 0)[0]:
if not self.rfile.read(1):
break
- exitcode = os.waitstatus_to_exitcode(sts)
- if exitcode:
- self.log_error(f"CGI script exit code {exitcode}")
+ exitcode = os.waitstatus_to_exitcode(sts)
+ if exitcode:
+ self.log_error(f"CGI script exit code {exitcode}")
return
# Child
try:
@@ -1221,34 +1221,34 @@ class CGIHTTPRequestHandler(SimpleHTTPRequestHandler):
self.log_message("CGI script exited OK")
-def _get_best_family(*address):
- infos = socket.getaddrinfo(
- *address,
- type=socket.SOCK_STREAM,
- flags=socket.AI_PASSIVE,
- )
- family, type, proto, canonname, sockaddr = next(iter(infos))
- return family, sockaddr
-
-
+def _get_best_family(*address):
+ infos = socket.getaddrinfo(
+ *address,
+ type=socket.SOCK_STREAM,
+ flags=socket.AI_PASSIVE,
+ )
+ family, type, proto, canonname, sockaddr = next(iter(infos))
+ return family, sockaddr
+
+
def test(HandlerClass=BaseHTTPRequestHandler,
ServerClass=ThreadingHTTPServer,
- protocol="HTTP/1.0", port=8000, bind=None):
+ protocol="HTTP/1.0", port=8000, bind=None):
"""Test the HTTP request handler class.
This runs an HTTP server on port 8000 (or the port argument).
"""
- ServerClass.address_family, addr = _get_best_family(bind, port)
+ ServerClass.address_family, addr = _get_best_family(bind, port)
HandlerClass.protocol_version = protocol
- with ServerClass(addr, HandlerClass) as httpd:
- host, port = httpd.socket.getsockname()[:2]
- url_host = f'[{host}]' if ':' in host else host
- print(
- f"Serving HTTP on {host} port {port} "
- f"(http://{url_host}:{port}/) ..."
- )
+ with ServerClass(addr, HandlerClass) as httpd:
+ host, port = httpd.socket.getsockname()[:2]
+ url_host = f'[{host}]' if ':' in host else host
+ print(
+ f"Serving HTTP on {host} port {port} "
+ f"(http://{url_host}:{port}/) ..."
+ )
try:
httpd.serve_forever()
except KeyboardInterrupt:
@@ -1261,7 +1261,7 @@ if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--cgi', action='store_true',
help='Run as CGI Server')
- parser.add_argument('--bind', '-b', metavar='ADDRESS',
+ parser.add_argument('--bind', '-b', metavar='ADDRESS',
help='Specify alternate bind address '
'[default: all interfaces]')
parser.add_argument('--directory', '-d', default=os.getcwd(),
@@ -1277,19 +1277,19 @@ if __name__ == '__main__':
else:
handler_class = partial(SimpleHTTPRequestHandler,
directory=args.directory)
-
- # ensure dual-stack is not disabled; ref #38907
- class DualStackServer(ThreadingHTTPServer):
- def server_bind(self):
- # suppress exception when protocol is IPv4
- with contextlib.suppress(Exception):
- self.socket.setsockopt(
- socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 0)
- return super().server_bind()
-
- test(
- HandlerClass=handler_class,
- ServerClass=DualStackServer,
- port=args.port,
- bind=args.bind,
- )
+
+ # ensure dual-stack is not disabled; ref #38907
+ class DualStackServer(ThreadingHTTPServer):
+ def server_bind(self):
+ # suppress exception when protocol is IPv4
+ with contextlib.suppress(Exception):
+ self.socket.setsockopt(
+ socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 0)
+ return super().server_bind()
+
+ test(
+ HandlerClass=handler_class,
+ ServerClass=DualStackServer,
+ port=args.port,
+ bind=args.bind,
+ )
diff --git a/contrib/tools/python3/src/Lib/imaplib.py b/contrib/tools/python3/src/Lib/imaplib.py
index d9720f20c3..cac9a39c5a 100644
--- a/contrib/tools/python3/src/Lib/imaplib.py
+++ b/contrib/tools/python3/src/Lib/imaplib.py
@@ -98,7 +98,7 @@ Commands = {
'THREAD': ('SELECTED',),
'UID': ('SELECTED',),
'UNSUBSCRIBE': ('AUTH', 'SELECTED'),
- 'UNSELECT': ('SELECTED',),
+ 'UNSELECT': ('SELECTED',),
}
# Patterns to match server responses
@@ -136,13 +136,13 @@ class IMAP4:
r"""IMAP4 client class.
- Instantiate with: IMAP4([host[, port[, timeout=None]]])
+ Instantiate with: IMAP4([host[, port[, timeout=None]]])
host - host's name (default: localhost);
port - port number (default: standard IMAP4 port).
- timeout - socket timeout (default: None)
- If timeout is not given or is None,
- the global default socket timeout is used
+ timeout - socket timeout (default: None)
+ If timeout is not given or is None,
+ the global default socket timeout is used
All IMAP4rev1 commands are supported by methods of the same
name (in lower-case).
@@ -185,7 +185,7 @@ class IMAP4:
class abort(error): pass # Service errors - close and retry
class readonly(abort): pass # Mailbox status changed to READ-ONLY
- def __init__(self, host='', port=IMAP4_PORT, timeout=None):
+ def __init__(self, host='', port=IMAP4_PORT, timeout=None):
self.debug = Debug
self.state = 'LOGOUT'
self.literal = None # A literal argument to a command
@@ -199,7 +199,7 @@ class IMAP4:
# Open socket to server.
- self.open(host, port, timeout)
+ self.open(host, port, timeout)
try:
self._connect()
@@ -276,9 +276,9 @@ class IMAP4:
return self
def __exit__(self, *args):
- if self.state == "LOGOUT":
- return
-
+ if self.state == "LOGOUT":
+ return
+
try:
self.logout()
except OSError:
@@ -288,20 +288,20 @@ class IMAP4:
# Overridable methods
- def _create_socket(self, timeout):
+ def _create_socket(self, timeout):
# Default value of IMAP4.host is '', but socket.getaddrinfo()
# (which is used by socket.create_connection()) expects None
# as a default value for host.
- if timeout is not None and not timeout:
- raise ValueError('Non-blocking socket (timeout=0) is not supported')
+ if timeout is not None and not timeout:
+ raise ValueError('Non-blocking socket (timeout=0) is not supported')
host = None if not self.host else self.host
- sys.audit("imaplib.open", self, self.host, self.port)
- address = (host, self.port)
- if timeout is not None:
- return socket.create_connection(address, timeout)
- return socket.create_connection(address)
+ sys.audit("imaplib.open", self, self.host, self.port)
+ address = (host, self.port)
+ if timeout is not None:
+ return socket.create_connection(address, timeout)
+ return socket.create_connection(address)
- def open(self, host='', port=IMAP4_PORT, timeout=None):
+ def open(self, host='', port=IMAP4_PORT, timeout=None):
"""Setup connection to remote server on "host:port"
(default: localhost:standard IMAP4 port).
This connection will be used by the routines:
@@ -309,7 +309,7 @@ class IMAP4:
"""
self.host = host
self.port = port
- self.sock = self._create_socket(timeout)
+ self.sock = self._create_socket(timeout)
self.file = self.sock.makefile('rb')
@@ -328,7 +328,7 @@ class IMAP4:
def send(self, data):
"""Send data to remote."""
- sys.audit("imaplib.send", self, data)
+ sys.audit("imaplib.send", self, data)
self.sock.sendall(data)
@@ -511,7 +511,7 @@ class IMAP4:
def enable(self, capability):
"""Send an RFC5161 enable string to the server.
- (typ, [data]) = <instance>.enable(capability)
+ (typ, [data]) = <instance>.enable(capability)
"""
if 'ENABLE' not in self.capabilities:
raise IMAP4.error("Server does not support ENABLE")
@@ -639,7 +639,7 @@ class IMAP4:
Returns server 'BYE' response.
"""
self.state = 'LOGOUT'
- typ, dat = self._simple_command('LOGOUT')
+ typ, dat = self._simple_command('LOGOUT')
self.shutdown()
return typ, dat
@@ -903,22 +903,22 @@ class IMAP4:
return self._simple_command('UNSUBSCRIBE', mailbox)
- def unselect(self):
- """Free server's resources associated with the selected mailbox
- and returns the server to the authenticated state.
- This command performs the same actions as CLOSE, except
- that no messages are permanently removed from the currently
- selected mailbox.
-
- (typ, [data]) = <instance>.unselect()
- """
- try:
- typ, data = self._simple_command('UNSELECT')
- finally:
- self.state = 'AUTH'
- return typ, data
-
-
+ def unselect(self):
+ """Free server's resources associated with the selected mailbox
+ and returns the server to the authenticated state.
+ This command performs the same actions as CLOSE, except
+ that no messages are permanently removed from the currently
+ selected mailbox.
+
+ (typ, [data]) = <instance>.unselect()
+ """
+ try:
+ typ, data = self._simple_command('UNSELECT')
+ finally:
+ self.state = 'AUTH'
+ return typ, data
+
+
def xatom(self, name, *args):
"""Allow simple extension commands
notified by server in CAPABILITY response.
@@ -1039,17 +1039,17 @@ class IMAP4:
def _command_complete(self, name, tag):
- logout = (name == 'LOGOUT')
+ logout = (name == 'LOGOUT')
# BYE is expected after LOGOUT
- if not logout:
+ if not logout:
self._check_bye()
try:
- typ, data = self._get_tagged_response(tag, expect_bye=logout)
+ typ, data = self._get_tagged_response(tag, expect_bye=logout)
except self.abort as val:
raise self.abort('command: %s => %s' % (name, val))
except self.error as val:
raise self.error('command: %s => %s' % (name, val))
- if not logout:
+ if not logout:
self._check_bye()
if typ == 'BAD':
raise self.error('%s command error: %s %s' % (name, typ, data))
@@ -1145,7 +1145,7 @@ class IMAP4:
return resp
- def _get_tagged_response(self, tag, expect_bye=False):
+ def _get_tagged_response(self, tag, expect_bye=False):
while 1:
result = self.tagged_commands[tag]
@@ -1153,13 +1153,13 @@ class IMAP4:
del self.tagged_commands[tag]
return result
- if expect_bye:
- typ = 'BYE'
- bye = self.untagged_responses.pop(typ, None)
- if bye is not None:
- # Server replies to the "LOGOUT" command with "BYE"
- return (typ, bye)
-
+ if expect_bye:
+ typ = 'BYE'
+ bye = self.untagged_responses.pop(typ, None)
+ if bye is not None:
+ # Server replies to the "LOGOUT" command with "BYE"
+ return (typ, bye)
+
# If we've seen a BYE at this point, the socket will be
# closed, so report the BYE now.
self._check_bye()
@@ -1286,7 +1286,7 @@ if HAVE_SSL:
"""IMAP4 client class over SSL connection
- Instantiate with: IMAP4_SSL([host[, port[, keyfile[, certfile[, ssl_context[, timeout=None]]]]]])
+ Instantiate with: IMAP4_SSL([host[, port[, keyfile[, certfile[, ssl_context[, timeout=None]]]]]])
host - host's name (default: localhost);
port - port number (default: standard IMAP4 SSL port);
@@ -1296,15 +1296,15 @@ if HAVE_SSL:
and private key (default: None)
Note: if ssl_context is provided, then parameters keyfile or
certfile should not be set otherwise ValueError is raised.
- timeout - socket timeout (default: None) If timeout is not given or is None,
- the global default socket timeout is used
+ timeout - socket timeout (default: None) If timeout is not given or is None,
+ the global default socket timeout is used
for more documentation see the docstring of the parent class IMAP4.
"""
def __init__(self, host='', port=IMAP4_SSL_PORT, keyfile=None,
- certfile=None, ssl_context=None, timeout=None):
+ certfile=None, ssl_context=None, timeout=None):
if ssl_context is not None and keyfile is not None:
raise ValueError("ssl_context and keyfile arguments are mutually "
"exclusive")
@@ -1321,20 +1321,20 @@ if HAVE_SSL:
ssl_context = ssl._create_stdlib_context(certfile=certfile,
keyfile=keyfile)
self.ssl_context = ssl_context
- IMAP4.__init__(self, host, port, timeout)
+ IMAP4.__init__(self, host, port, timeout)
- def _create_socket(self, timeout):
- sock = IMAP4._create_socket(self, timeout)
+ def _create_socket(self, timeout):
+ sock = IMAP4._create_socket(self, timeout)
return self.ssl_context.wrap_socket(sock,
server_hostname=self.host)
- def open(self, host='', port=IMAP4_SSL_PORT, timeout=None):
+ def open(self, host='', port=IMAP4_SSL_PORT, timeout=None):
"""Setup connection to remote server on "host:port".
(default: localhost:standard IMAP4 SSL port).
This connection will be used by the routines:
read, readline, send, shutdown.
"""
- IMAP4.open(self, host, port, timeout)
+ IMAP4.open(self, host, port, timeout)
__all__.append("IMAP4_SSL")
@@ -1356,7 +1356,7 @@ class IMAP4_stream(IMAP4):
IMAP4.__init__(self)
- def open(self, host=None, port=None, timeout=None):
+ def open(self, host=None, port=None, timeout=None):
"""Setup a stream connection.
This connection will be used by the routines:
read, readline, send, shutdown.
diff --git a/contrib/tools/python3/src/Lib/imghdr.py b/contrib/tools/python3/src/Lib/imghdr.py
index 6e01fd8574..450a8c4a74 100644
--- a/contrib/tools/python3/src/Lib/imghdr.py
+++ b/contrib/tools/python3/src/Lib/imghdr.py
@@ -152,7 +152,7 @@ def testall(list, recursive, toplevel):
if recursive or toplevel:
print('recursing down:')
import glob
- names = glob.glob(os.path.join(glob.escape(filename), '*'))
+ names = glob.glob(os.path.join(glob.escape(filename), '*'))
testall(names, recursive, 0)
else:
print('*** directory (use -r) ***')
diff --git a/contrib/tools/python3/src/Lib/importlib/__init__.py b/contrib/tools/python3/src/Lib/importlib/__init__.py
index 0c73c505f9..feaba8010d 100644
--- a/contrib/tools/python3/src/Lib/importlib/__init__.py
+++ b/contrib/tools/python3/src/Lib/importlib/__init__.py
@@ -48,8 +48,8 @@ else:
sys.modules['importlib._bootstrap_external'] = _bootstrap_external
# To simplify imports in test code
-_pack_uint32 = _bootstrap_external._pack_uint32
-_unpack_uint32 = _bootstrap_external._unpack_uint32
+_pack_uint32 = _bootstrap_external._pack_uint32
+_unpack_uint32 = _bootstrap_external._unpack_uint32
# Fully bootstrapped at this point, import whatever you like, circular
# dependencies and startup overhead minimisation permitting :)
diff --git a/contrib/tools/python3/src/Lib/importlib/_bootstrap.py b/contrib/tools/python3/src/Lib/importlib/_bootstrap.py
index e00b27ece2..4a2553175c 100644
--- a/contrib/tools/python3/src/Lib/importlib/_bootstrap.py
+++ b/contrib/tools/python3/src/Lib/importlib/_bootstrap.py
@@ -7,9 +7,9 @@ work. One should use importlib as the public-facing version of this module.
"""
#
-# IMPORTANT: Whenever making changes to this module, be sure to run a top-level
-# `make regen-importlib` followed by `make` in order to get the frozen version
-# of the module updated. Not doing so will result in the Makefile to fail for
+# IMPORTANT: Whenever making changes to this module, be sure to run a top-level
+# `make regen-importlib` followed by `make` in order to get the frozen version
+# of the module updated. Not doing so will result in the Makefile to fail for
# all others who don't have a ./python around to freeze the module
# in the early stages of compilation.
#
@@ -67,7 +67,7 @@ class _ModuleLock:
# Deadlock avoidance for concurrent circular imports.
me = _thread.get_ident()
tid = self.owner
- seen = set()
+ seen = set()
while True:
lock = _blocking_on.get(tid)
if lock is None:
@@ -75,14 +75,14 @@ class _ModuleLock:
tid = lock.owner
if tid == me:
return True
- if tid in seen:
- # bpo 38091: the chain of tid's we encounter here
- # eventually leads to a fixpoint or a cycle, but
- # does not reach 'me'. This means we would not
- # actually deadlock. This can happen if other
- # threads are at the beginning of acquire() below.
- return False
- seen.add(tid)
+ if tid in seen:
+ # bpo 38091: the chain of tid's we encounter here
+ # eventually leads to a fixpoint or a cycle, but
+ # does not reach 'me'. This means we would not
+ # actually deadlock. This can happen if other
+ # threads are at the beginning of acquire() below.
+ return False
+ seen.add(tid)
def acquire(self):
"""
@@ -380,7 +380,7 @@ class ModuleSpec:
self.cached == other.cached and
self.has_location == other.has_location)
except AttributeError:
- return NotImplemented
+ return NotImplemented
@property
def cached(self):
@@ -596,44 +596,44 @@ def _exec(spec, module):
if sys.modules.get(name) is not module:
msg = 'module {!r} not in sys.modules'.format(name)
raise ImportError(msg, name=name)
- try:
- if spec.loader is None:
- if spec.submodule_search_locations is None:
- raise ImportError('missing loader', name=spec.name)
- # Namespace package.
- _init_module_attrs(spec, module, override=True)
- else:
- _init_module_attrs(spec, module, override=True)
- if not hasattr(spec.loader, 'exec_module'):
- # (issue19713) Once BuiltinImporter and ExtensionFileLoader
- # have exec_module() implemented, we can add a deprecation
- # warning here.
- spec.loader.load_module(name)
- else:
- spec.loader.exec_module(module)
- finally:
- # Update the order of insertion into sys.modules for module
- # clean-up at shutdown.
- module = sys.modules.pop(spec.name)
- sys.modules[spec.name] = module
- return module
+ try:
+ if spec.loader is None:
+ if spec.submodule_search_locations is None:
+ raise ImportError('missing loader', name=spec.name)
+ # Namespace package.
+ _init_module_attrs(spec, module, override=True)
+ else:
+ _init_module_attrs(spec, module, override=True)
+ if not hasattr(spec.loader, 'exec_module'):
+ # (issue19713) Once BuiltinImporter and ExtensionFileLoader
+ # have exec_module() implemented, we can add a deprecation
+ # warning here.
+ spec.loader.load_module(name)
+ else:
+ spec.loader.exec_module(module)
+ finally:
+ # Update the order of insertion into sys.modules for module
+ # clean-up at shutdown.
+ module = sys.modules.pop(spec.name)
+ sys.modules[spec.name] = module
+ return module
def _load_backward_compatible(spec):
# (issue19713) Once BuiltinImporter and ExtensionFileLoader
# have exec_module() implemented, we can add a deprecation
# warning here.
- try:
- spec.loader.load_module(spec.name)
- except:
- if spec.name in sys.modules:
- module = sys.modules.pop(spec.name)
- sys.modules[spec.name] = module
- raise
+ try:
+ spec.loader.load_module(spec.name)
+ except:
+ if spec.name in sys.modules:
+ module = sys.modules.pop(spec.name)
+ sys.modules[spec.name] = module
+ raise
# The module must be in sys.modules at this point!
- # Move it to the end of sys.modules.
- module = sys.modules.pop(spec.name)
- sys.modules[spec.name] = module
+ # Move it to the end of sys.modules.
+ module = sys.modules.pop(spec.name)
+ sys.modules[spec.name] = module
if getattr(module, '__loader__', None) is None:
try:
module.__loader__ = spec.loader
@@ -659,43 +659,43 @@ def _load_backward_compatible(spec):
def _load_unlocked(spec):
# A helper for direct use by the import system.
if spec.loader is not None:
- # Not a namespace package.
+ # Not a namespace package.
if not hasattr(spec.loader, 'exec_module'):
return _load_backward_compatible(spec)
module = module_from_spec(spec)
- # This must be done before putting the module in sys.modules
- # (otherwise an optimization shortcut in import.c becomes
- # wrong).
- spec._initializing = True
- try:
- sys.modules[spec.name] = module
- try:
- if spec.loader is None:
- if spec.submodule_search_locations is None:
- raise ImportError('missing loader', name=spec.name)
- # A namespace package so do nothing.
- else:
- spec.loader.exec_module(module)
- except:
- try:
- del sys.modules[spec.name]
- except KeyError:
- pass
- raise
- # Move the module to the end of sys.modules.
- # We don't ensure that the import-related module attributes get
- # set in the sys.modules replacement case. Such modules are on
- # their own.
- module = sys.modules.pop(spec.name)
- sys.modules[spec.name] = module
- _verbose_message('import {!r} # {!r}', spec.name, spec.loader)
- finally:
- spec._initializing = False
-
- return module
-
+ # This must be done before putting the module in sys.modules
+ # (otherwise an optimization shortcut in import.c becomes
+ # wrong).
+ spec._initializing = True
+ try:
+ sys.modules[spec.name] = module
+ try:
+ if spec.loader is None:
+ if spec.submodule_search_locations is None:
+ raise ImportError('missing loader', name=spec.name)
+ # A namespace package so do nothing.
+ else:
+ spec.loader.exec_module(module)
+ except:
+ try:
+ del sys.modules[spec.name]
+ except KeyError:
+ pass
+ raise
+ # Move the module to the end of sys.modules.
+ # We don't ensure that the import-related module attributes get
+ # set in the sys.modules replacement case. Such modules are on
+ # their own.
+ module = sys.modules.pop(spec.name)
+ sys.modules[spec.name] = module
+ _verbose_message('import {!r} # {!r}', spec.name, spec.loader)
+ finally:
+ spec._initializing = False
+
+ return module
+
# A method used during testing of _load_unlocked() and by
# _load_module_shim().
def _load(spec):
@@ -722,8 +722,8 @@ class BuiltinImporter:
"""
- _ORIGIN = "built-in"
-
+ _ORIGIN = "built-in"
+
@staticmethod
def module_repr(module):
"""Return repr for the module.
@@ -731,14 +731,14 @@ class BuiltinImporter:
The method is deprecated. The import machinery does the job itself.
"""
- return f'<module {module.__name__!r} ({BuiltinImporter._ORIGIN})>'
+ return f'<module {module.__name__!r} ({BuiltinImporter._ORIGIN})>'
@classmethod
def find_spec(cls, fullname, path=None, target=None):
if path is not None:
return None
if _imp.is_builtin(fullname):
- return spec_from_loader(fullname, cls, origin=cls._ORIGIN)
+ return spec_from_loader(fullname, cls, origin=cls._ORIGIN)
else:
return None
@@ -797,8 +797,8 @@ class FrozenImporter:
"""
- _ORIGIN = "frozen"
-
+ _ORIGIN = "frozen"
+
@staticmethod
def module_repr(m):
"""Return repr for the module.
@@ -806,12 +806,12 @@ class FrozenImporter:
The method is deprecated. The import machinery does the job itself.
"""
- return '<module {!r} ({})>'.format(m.__name__, FrozenImporter._ORIGIN)
+ return '<module {!r} ({})>'.format(m.__name__, FrozenImporter._ORIGIN)
@classmethod
def find_spec(cls, fullname, path=None, target=None):
if _imp.is_frozen(fullname):
- return spec_from_loader(fullname, cls, origin=cls._ORIGIN)
+ return spec_from_loader(fullname, cls, origin=cls._ORIGIN)
else:
return None
@@ -884,7 +884,7 @@ def _resolve_name(name, package, level):
"""Resolve a relative module name to an absolute one."""
bits = package.rsplit('.', level - 1)
if len(bits) < level:
- raise ImportError('attempted relative import beyond top-level package')
+ raise ImportError('attempted relative import beyond top-level package')
base = bits[0]
return '{}.{}'.format(base, name) if name else base
@@ -987,12 +987,12 @@ def _find_and_load_unlocked(name, import_):
if parent:
# Set the module as an attribute on its parent.
parent_module = sys.modules[parent]
- child = name.rpartition('.')[2]
- try:
- setattr(parent_module, child, module)
- except AttributeError:
- msg = f"Cannot set an attribute on {parent!r} for child module {child!r}"
- _warnings.warn(msg, ImportWarning)
+ child = name.rpartition('.')[2]
+ try:
+ setattr(parent_module, child, module)
+ except AttributeError:
+ msg = f"Cannot set an attribute on {parent!r} for child module {child!r}"
+ _warnings.warn(msg, ImportWarning)
return module
@@ -1040,30 +1040,30 @@ def _handle_fromlist(module, fromlist, import_, *, recursive=False):
"""
# The hell that is fromlist ...
# If a package was imported, try to import stuff from fromlist.
- for x in fromlist:
- if not isinstance(x, str):
- if recursive:
- where = module.__name__ + '.__all__'
- else:
- where = "``from list''"
- raise TypeError(f"Item in {where} must be str, "
- f"not {type(x).__name__}")
- elif x == '*':
- if not recursive and hasattr(module, '__all__'):
- _handle_fromlist(module, module.__all__, import_,
- recursive=True)
- elif not hasattr(module, x):
- from_name = '{}.{}'.format(module.__name__, x)
- try:
- _call_with_frames_removed(import_, from_name)
- except ModuleNotFoundError as exc:
- # Backwards-compatibility dictates we ignore failed
- # imports triggered by fromlist for modules that don't
- # exist.
- if (exc.name == from_name and
- sys.modules.get(from_name, _NEEDS_LOADING) is not None):
- continue
- raise
+ for x in fromlist:
+ if not isinstance(x, str):
+ if recursive:
+ where = module.__name__ + '.__all__'
+ else:
+ where = "``from list''"
+ raise TypeError(f"Item in {where} must be str, "
+ f"not {type(x).__name__}")
+ elif x == '*':
+ if not recursive and hasattr(module, '__all__'):
+ _handle_fromlist(module, module.__all__, import_,
+ recursive=True)
+ elif not hasattr(module, x):
+ from_name = '{}.{}'.format(module.__name__, x)
+ try:
+ _call_with_frames_removed(import_, from_name)
+ except ModuleNotFoundError as exc:
+ # Backwards-compatibility dictates we ignore failed
+ # imports triggered by fromlist for modules that don't
+ # exist.
+ if (exc.name == from_name and
+ sys.modules.get(from_name, _NEEDS_LOADING) is not None):
+ continue
+ raise
return module
@@ -1125,10 +1125,10 @@ def __import__(name, globals=None, locals=None, fromlist=(), level=0):
# Slice end needs to be positive to alleviate need to special-case
# when ``'.' not in name``.
return sys.modules[module.__name__[:len(module.__name__)-cut_off]]
- elif hasattr(module, '__path__'):
- return _handle_fromlist(module, fromlist, _gcd_import)
+ elif hasattr(module, '__path__'):
+ return _handle_fromlist(module, fromlist, _gcd_import)
else:
- return module
+ return module
def _builtin_from_name(name):
diff --git a/contrib/tools/python3/src/Lib/importlib/_bootstrap_external.py b/contrib/tools/python3/src/Lib/importlib/_bootstrap_external.py
index fe31f437da..0a28784837 100644
--- a/contrib/tools/python3/src/Lib/importlib/_bootstrap_external.py
+++ b/contrib/tools/python3/src/Lib/importlib/_bootstrap_external.py
@@ -19,34 +19,34 @@ work. One should use importlib as the public-facing version of this module.
# reference any injected objects! This includes not only global code but also
# anything specified at the class level.
-# Import builtin modules
-import _imp
-import _io
-import sys
-import _warnings
-import marshal
-
-
-_MS_WINDOWS = (sys.platform == 'win32')
-if _MS_WINDOWS:
- import nt as _os
- import winreg
-else:
- import posix as _os
-
-
-if _MS_WINDOWS:
- path_separators = ['\\', '/']
-else:
- path_separators = ['/']
-# Assumption made in _path_join()
-assert all(len(sep) == 1 for sep in path_separators)
-path_sep = path_separators[0]
-path_sep_tuple = tuple(path_separators)
-path_separators = ''.join(path_separators)
-_pathseps_with_colon = {f':{s}' for s in path_separators}
-
-
+# Import builtin modules
+import _imp
+import _io
+import sys
+import _warnings
+import marshal
+
+
+_MS_WINDOWS = (sys.platform == 'win32')
+if _MS_WINDOWS:
+ import nt as _os
+ import winreg
+else:
+ import posix as _os
+
+
+if _MS_WINDOWS:
+ path_separators = ['\\', '/']
+else:
+ path_separators = ['/']
+# Assumption made in _path_join()
+assert all(len(sep) == 1 for sep in path_separators)
+path_sep = path_separators[0]
+path_sep_tuple = tuple(path_separators)
+path_separators = ''.join(path_separators)
+_pathseps_with_colon = {f':{s}' for s in path_separators}
+
+
# Bootstrap-related code ######################################################
_CASE_INSENSITIVE_PLATFORMS_STR_KEY = 'win',
_CASE_INSENSITIVE_PLATFORMS_BYTES_KEY = 'cygwin', 'darwin'
@@ -62,8 +62,8 @@ def _make_relax_case():
key = b'PYTHONCASEOK'
def _relax_case():
- """True if filenames must be checked case-insensitively and ignore environment flags are not set."""
- return not sys.flags.ignore_environment and key in _os.environ
+ """True if filenames must be checked case-insensitively and ignore environment flags are not set."""
+ return not sys.flags.ignore_environment and key in _os.environ
else:
def _relax_case():
"""True if filenames must be checked case-insensitively."""
@@ -71,65 +71,65 @@ def _make_relax_case():
return _relax_case
-def _pack_uint32(x):
+def _pack_uint32(x):
"""Convert a 32-bit integer to little-endian."""
return (int(x) & 0xFFFFFFFF).to_bytes(4, 'little')
-def _unpack_uint32(data):
+def _unpack_uint32(data):
"""Convert 4 bytes in little-endian to an integer."""
- assert len(data) == 4
- return int.from_bytes(data, 'little')
-
-def _unpack_uint16(data):
- """Convert 2 bytes in little-endian to an integer."""
- assert len(data) == 2
- return int.from_bytes(data, 'little')
-
-
-if _MS_WINDOWS:
- def _path_join(*path_parts):
- """Replacement for os.path.join()."""
- if not path_parts:
- return ""
- if len(path_parts) == 1:
- return path_parts[0]
- root = ""
- path = []
- for new_root, tail in map(_os._path_splitroot, path_parts):
- if new_root.startswith(path_sep_tuple) or new_root.endswith(path_sep_tuple):
- root = new_root.rstrip(path_separators) or root
- path = [path_sep + tail]
- elif new_root.endswith(':'):
- if root.casefold() != new_root.casefold():
- # Drive relative paths have to be resolved by the OS, so we reset the
- # tail but do not add a path_sep prefix.
- root = new_root
- path = [tail]
- else:
- path.append(tail)
- else:
- root = new_root or root
- path.append(tail)
- path = [p.rstrip(path_separators) for p in path if p]
- if len(path) == 1 and not path[0]:
- # Avoid losing the root's trailing separator when joining with nothing
- return root + path_sep
- return root + path_sep.join(path)
-
-else:
- def _path_join(*path_parts):
- """Replacement for os.path.join()."""
- return path_sep.join([part.rstrip(path_separators)
- for part in path_parts if part])
-
-
+ assert len(data) == 4
+ return int.from_bytes(data, 'little')
+
+def _unpack_uint16(data):
+ """Convert 2 bytes in little-endian to an integer."""
+ assert len(data) == 2
+ return int.from_bytes(data, 'little')
+
+
+if _MS_WINDOWS:
+ def _path_join(*path_parts):
+ """Replacement for os.path.join()."""
+ if not path_parts:
+ return ""
+ if len(path_parts) == 1:
+ return path_parts[0]
+ root = ""
+ path = []
+ for new_root, tail in map(_os._path_splitroot, path_parts):
+ if new_root.startswith(path_sep_tuple) or new_root.endswith(path_sep_tuple):
+ root = new_root.rstrip(path_separators) or root
+ path = [path_sep + tail]
+ elif new_root.endswith(':'):
+ if root.casefold() != new_root.casefold():
+ # Drive relative paths have to be resolved by the OS, so we reset the
+ # tail but do not add a path_sep prefix.
+ root = new_root
+ path = [tail]
+ else:
+ path.append(tail)
+ else:
+ root = new_root or root
+ path.append(tail)
+ path = [p.rstrip(path_separators) for p in path if p]
+ if len(path) == 1 and not path[0]:
+ # Avoid losing the root's trailing separator when joining with nothing
+ return root + path_sep
+ return root + path_sep.join(path)
+
+else:
+ def _path_join(*path_parts):
+ """Replacement for os.path.join()."""
+ return path_sep.join([part.rstrip(path_separators)
+ for part in path_parts if part])
+
+
def _path_split(path):
"""Replacement for os.path.split()."""
- i = max(path.rfind(p) for p in path_separators)
- if i < 0:
- return '', path
- return path[:i], path[i + 1:]
+ i = max(path.rfind(p) for p in path_separators)
+ if i < 0:
+ return '', path
+ return path[:i], path[i + 1:]
def _path_stat(path):
@@ -163,20 +163,20 @@ def _path_isdir(path):
return _path_is_mode_type(path, 0o040000)
-if _MS_WINDOWS:
- def _path_isabs(path):
- """Replacement for os.path.isabs."""
- if not path:
- return False
- root = _os._path_splitroot(path)[0].replace('/', '\\')
- return len(root) > 1 and (root.startswith('\\\\') or root.endswith('\\'))
-
-else:
- def _path_isabs(path):
- """Replacement for os.path.isabs."""
- return path.startswith(path_separators)
-
-
+if _MS_WINDOWS:
+ def _path_isabs(path):
+ """Replacement for os.path.isabs."""
+ if not path:
+ return False
+ root = _os._path_splitroot(path)[0].replace('/', '\\')
+ return len(root) > 1 and (root.startswith('\\\\') or root.endswith('\\'))
+
+else:
+ def _path_isabs(path):
+ """Replacement for os.path.isabs."""
+ return path.startswith(path_separators)
+
+
def _write_atomic(path, data, mode=0o666):
"""Best-effort function to write data to a path atomically.
Be prepared to handle a FileExistsError if concurrent writing of the
@@ -321,23 +321,23 @@ _code_type = type(_write_atomic.__code__)
# Python 3.7a2 3391 (update GET_AITER #31709)
# Python 3.7a4 3392 (PEP 552: Deterministic pycs #31650)
# Python 3.7b1 3393 (remove STORE_ANNOTATION opcode #32550)
-# Python 3.7b5 3394 (restored docstring as the first stmt in the body;
+# Python 3.7b5 3394 (restored docstring as the first stmt in the body;
# this might affected the first line number #32911)
-# Python 3.8a1 3400 (move frame block handling to compiler #17611)
-# Python 3.8a1 3401 (add END_ASYNC_FOR #33041)
-# Python 3.8a1 3410 (PEP570 Python Positional-Only Parameters #36540)
-# Python 3.8b2 3411 (Reverse evaluation order of key: value in dict
-# comprehensions #35224)
-# Python 3.8b2 3412 (Swap the position of positional args and positional
-# only args in ast.arguments #37593)
-# Python 3.8b4 3413 (Fix "break" and "continue" in "finally" #37830)
-# Python 3.9a0 3420 (add LOAD_ASSERTION_ERROR #34880)
-# Python 3.9a0 3421 (simplified bytecode for with blocks #32949)
-# Python 3.9a0 3422 (remove BEGIN_FINALLY, END_FINALLY, CALL_FINALLY, POP_FINALLY bytecodes #33387)
-# Python 3.9a2 3423 (add IS_OP, CONTAINS_OP and JUMP_IF_NOT_EXC_MATCH bytecodes #39156)
-# Python 3.9a2 3424 (simplify bytecodes for *value unpacking)
-# Python 3.9a2 3425 (simplify bytecodes for **value unpacking)
-
+# Python 3.8a1 3400 (move frame block handling to compiler #17611)
+# Python 3.8a1 3401 (add END_ASYNC_FOR #33041)
+# Python 3.8a1 3410 (PEP570 Python Positional-Only Parameters #36540)
+# Python 3.8b2 3411 (Reverse evaluation order of key: value in dict
+# comprehensions #35224)
+# Python 3.8b2 3412 (Swap the position of positional args and positional
+# only args in ast.arguments #37593)
+# Python 3.8b4 3413 (Fix "break" and "continue" in "finally" #37830)
+# Python 3.9a0 3420 (add LOAD_ASSERTION_ERROR #34880)
+# Python 3.9a0 3421 (simplified bytecode for with blocks #32949)
+# Python 3.9a0 3422 (remove BEGIN_FINALLY, END_FINALLY, CALL_FINALLY, POP_FINALLY bytecodes #33387)
+# Python 3.9a2 3423 (add IS_OP, CONTAINS_OP and JUMP_IF_NOT_EXC_MATCH bytecodes #39156)
+# Python 3.9a2 3424 (simplify bytecodes for *value unpacking)
+# Python 3.9a2 3425 (simplify bytecodes for **value unpacking)
+
#
# MAGIC must change whenever the bytecode emitted by the compiler may no
# longer be understood by older implementations of the eval loop (usually
@@ -346,7 +346,7 @@ _code_type = type(_write_atomic.__code__)
# Whenever MAGIC_NUMBER is changed, the ranges in the magic_values array
# in PC/launcher.c must also be updated.
-MAGIC_NUMBER = (3425).to_bytes(2, 'little') + b'\r\n'
+MAGIC_NUMBER = (3425).to_bytes(2, 'little') + b'\r\n'
_RAW_MAGIC_NUMBER = int.from_bytes(MAGIC_NUMBER, 'little') # For import.c
_PYCACHE = '__pycache__'
@@ -400,35 +400,35 @@ def cache_from_source(path, debug_override=None, *, optimization=None):
if not optimization.isalnum():
raise ValueError('{!r} is not alphanumeric'.format(optimization))
almost_filename = '{}.{}{}'.format(almost_filename, _OPT, optimization)
- filename = almost_filename + BYTECODE_SUFFIXES[0]
- if sys.pycache_prefix is not None:
- # We need an absolute path to the py file to avoid the possibility of
- # collisions within sys.pycache_prefix, if someone has two different
- # `foo/bar.py` on their system and they import both of them using the
- # same sys.pycache_prefix. Let's say sys.pycache_prefix is
- # `C:\Bytecode`; the idea here is that if we get `Foo\Bar`, we first
- # make it absolute (`C:\Somewhere\Foo\Bar`), then make it root-relative
- # (`Somewhere\Foo\Bar`), so we end up placing the bytecode file in an
- # unambiguous `C:\Bytecode\Somewhere\Foo\Bar\`.
- if not _path_isabs(head):
- head = _path_join(_os.getcwd(), head)
-
- # Strip initial drive from a Windows path. We know we have an absolute
- # path here, so the second part of the check rules out a POSIX path that
- # happens to contain a colon at the second character.
- if head[1] == ':' and head[0] not in path_separators:
- head = head[2:]
-
- # Strip initial path separator from `head` to complete the conversion
- # back to a root-relative path before joining.
- return _path_join(
- sys.pycache_prefix,
- head.lstrip(path_separators),
- filename,
- )
- return _path_join(head, _PYCACHE, filename)
-
-
+ filename = almost_filename + BYTECODE_SUFFIXES[0]
+ if sys.pycache_prefix is not None:
+ # We need an absolute path to the py file to avoid the possibility of
+ # collisions within sys.pycache_prefix, if someone has two different
+ # `foo/bar.py` on their system and they import both of them using the
+ # same sys.pycache_prefix. Let's say sys.pycache_prefix is
+ # `C:\Bytecode`; the idea here is that if we get `Foo\Bar`, we first
+ # make it absolute (`C:\Somewhere\Foo\Bar`), then make it root-relative
+ # (`Somewhere\Foo\Bar`), so we end up placing the bytecode file in an
+ # unambiguous `C:\Bytecode\Somewhere\Foo\Bar\`.
+ if not _path_isabs(head):
+ head = _path_join(_os.getcwd(), head)
+
+ # Strip initial drive from a Windows path. We know we have an absolute
+ # path here, so the second part of the check rules out a POSIX path that
+ # happens to contain a colon at the second character.
+ if head[1] == ':' and head[0] not in path_separators:
+ head = head[2:]
+
+ # Strip initial path separator from `head` to complete the conversion
+ # back to a root-relative path before joining.
+ return _path_join(
+ sys.pycache_prefix,
+ head.lstrip(path_separators),
+ filename,
+ )
+ return _path_join(head, _PYCACHE, filename)
+
+
def source_from_cache(path):
"""Given the path to a .pyc. file, return the path to its .py file.
@@ -442,29 +442,29 @@ def source_from_cache(path):
raise NotImplementedError('sys.implementation.cache_tag is None')
path = _os.fspath(path)
head, pycache_filename = _path_split(path)
- found_in_pycache_prefix = False
- if sys.pycache_prefix is not None:
- stripped_path = sys.pycache_prefix.rstrip(path_separators)
- if head.startswith(stripped_path + path_sep):
- head = head[len(stripped_path):]
- found_in_pycache_prefix = True
- if not found_in_pycache_prefix:
- head, pycache = _path_split(head)
- if pycache != _PYCACHE:
- raise ValueError(f'{_PYCACHE} not bottom-level directory in '
- f'{path!r}')
+ found_in_pycache_prefix = False
+ if sys.pycache_prefix is not None:
+ stripped_path = sys.pycache_prefix.rstrip(path_separators)
+ if head.startswith(stripped_path + path_sep):
+ head = head[len(stripped_path):]
+ found_in_pycache_prefix = True
+ if not found_in_pycache_prefix:
+ head, pycache = _path_split(head)
+ if pycache != _PYCACHE:
+ raise ValueError(f'{_PYCACHE} not bottom-level directory in '
+ f'{path!r}')
dot_count = pycache_filename.count('.')
if dot_count not in {2, 3}:
- raise ValueError(f'expected only 2 or 3 dots in {pycache_filename!r}')
+ raise ValueError(f'expected only 2 or 3 dots in {pycache_filename!r}')
elif dot_count == 3:
optimization = pycache_filename.rsplit('.', 2)[-2]
if not optimization.startswith(_OPT):
raise ValueError("optimization portion of filename does not start "
- f"with {_OPT!r}")
+ f"with {_OPT!r}")
opt_level = optimization[len(_OPT):]
if not opt_level.isalnum():
- raise ValueError(f"optimization level {optimization!r} is not an "
- "alphanumeric value")
+ raise ValueError(f"optimization level {optimization!r} is not an "
+ "alphanumeric value")
base_filename = pycache_filename.partition('.')[0]
return _path_join(head, base_filename + SOURCE_SUFFIXES[0])
@@ -582,7 +582,7 @@ def _classify_pyc(data, name, exc_details):
message = f'reached EOF while reading pyc header of {name!r}'
_bootstrap._verbose_message('{}', message)
raise EOFError(message)
- flags = _unpack_uint32(data[4:8])
+ flags = _unpack_uint32(data[4:8])
# Only the first two flags are defined.
if flags & ~0b11:
message = f'invalid flags {flags!r} in {name!r}'
@@ -609,12 +609,12 @@ def _validate_timestamp_pyc(data, source_mtime, source_size, name,
An ImportError is raised if the bytecode is stale.
"""
- if _unpack_uint32(data[8:12]) != (source_mtime & 0xFFFFFFFF):
+ if _unpack_uint32(data[8:12]) != (source_mtime & 0xFFFFFFFF):
message = f'bytecode is stale for {name!r}'
_bootstrap._verbose_message('{}', message)
raise ImportError(message, **exc_details)
if (source_size is not None and
- _unpack_uint32(data[12:16]) != (source_size & 0xFFFFFFFF)):
+ _unpack_uint32(data[12:16]) != (source_size & 0xFFFFFFFF)):
raise ImportError(f'bytecode is stale for {name!r}', **exc_details)
@@ -658,9 +658,9 @@ def _compile_bytecode(data, name=None, bytecode_path=None, source_path=None):
def _code_to_timestamp_pyc(code, mtime=0, source_size=0):
"Produce the data for a timestamp-based pyc."
data = bytearray(MAGIC_NUMBER)
- data.extend(_pack_uint32(0))
- data.extend(_pack_uint32(mtime))
- data.extend(_pack_uint32(source_size))
+ data.extend(_pack_uint32(0))
+ data.extend(_pack_uint32(mtime))
+ data.extend(_pack_uint32(source_size))
data.extend(marshal.dumps(code))
return data
@@ -669,7 +669,7 @@ def _code_to_hash_pyc(code, source_hash, checked=True):
"Produce the data for a hash-based pyc."
data = bytearray(MAGIC_NUMBER)
flags = 0b1 | checked << 1
- data.extend(_pack_uint32(flags))
+ data.extend(_pack_uint32(flags))
assert len(source_hash) == 8
data.extend(source_hash)
data.extend(marshal.dumps(code))
@@ -776,9 +776,9 @@ class WindowsRegistryFinder:
@classmethod
def _open_registry(cls, key):
try:
- return winreg.OpenKey(winreg.HKEY_CURRENT_USER, key)
+ return winreg.OpenKey(winreg.HKEY_CURRENT_USER, key)
except OSError:
- return winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, key)
+ return winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, key)
@classmethod
def _search_registry(cls, fullname):
@@ -790,7 +790,7 @@ class WindowsRegistryFinder:
sys_version='%d.%d' % sys.version_info[:2])
try:
with cls._open_registry(key) as hkey:
- filepath = winreg.QueryValue(hkey, '')
+ filepath = winreg.QueryValue(hkey, '')
except OSError:
return None
return filepath
@@ -858,16 +858,16 @@ class SourceLoader(_LoaderBasics):
def path_mtime(self, path):
"""Optional method that returns the modification time (an int) for the
- specified path (a str).
+ specified path (a str).
Raises OSError when the path cannot be handled.
"""
raise OSError
def path_stats(self, path):
- """Optional method returning a metadata dict for the specified
- path (a str).
-
+ """Optional method returning a metadata dict for the specified
+ path (a str).
+
Possible keys:
- 'mtime' (mandatory) is the numeric timestamp of last source
code modification;
@@ -1035,12 +1035,12 @@ class FileLoader:
def get_data(self, path):
"""Return the data from path as raw bytes."""
- if isinstance(self, (SourceLoader, ExtensionFileLoader)):
- with _io.open_code(str(path)) as file:
- return file.read()
- else:
- with _io.FileIO(path, 'r') as file:
- return file.read()
+ if isinstance(self, (SourceLoader, ExtensionFileLoader)):
+ with _io.open_code(str(path)) as file:
+ return file.read()
+ else:
+ with _io.FileIO(path, 'r') as file:
+ return file.read()
# ResourceReader ABC API.
@@ -1154,11 +1154,11 @@ class ExtensionFileLoader(FileLoader, _LoaderBasics):
def __init__(self, name, path):
self.name = name
- if not _path_isabs(path):
- try:
- path = _path_join(_os.getcwd(), path)
- except OSError:
- pass
+ if not _path_isabs(path):
+ try:
+ path = _path_join(_os.getcwd(), path)
+ except OSError:
+ pass
self.path = path
def __eq__(self, other):
@@ -1245,9 +1245,9 @@ class _NamespacePath:
def __iter__(self):
return iter(self._recalculate())
- def __getitem__(self, index):
- return self._recalculate()[index]
-
+ def __getitem__(self, index):
+ return self._recalculate()[index]
+
def __setitem__(self, index, path):
self._path[index] = path
@@ -1440,20 +1440,20 @@ class PathFinder:
return None
return spec.loader
- @classmethod
- def find_distributions(cls, *args, **kwargs):
- """
- Find distributions.
-
- Return an iterable of all Distribution instances capable of
- loading the metadata for packages matching ``context.name``
- (or all names if ``None`` indicated) along the paths in the list
- of directories ``context.path``.
- """
- from importlib.metadata import MetadataPathFinder
- return MetadataPathFinder.find_distributions(*args, **kwargs)
-
-
+ @classmethod
+ def find_distributions(cls, *args, **kwargs):
+ """
+ Find distributions.
+
+ Return an iterable of all Distribution instances capable of
+ loading the metadata for packages matching ``context.name``
+ (or all names if ``None`` indicated) along the paths in the list
+ of directories ``context.path``.
+ """
+ from importlib.metadata import MetadataPathFinder
+ return MetadataPathFinder.find_distributions(*args, **kwargs)
+
+
class FileFinder:
"""File-based finder.
@@ -1473,8 +1473,8 @@ class FileFinder:
self._loaders = loaders
# Base (directory) path
self.path = path or '.'
- if not _path_isabs(self.path):
- self.path = _path_join(_os.getcwd(), self.path)
+ if not _path_isabs(self.path):
+ self.path = _path_join(_os.getcwd(), self.path)
self._path_mtime = -1
self._path_cache = set()
self._relaxed_path_cache = set()
@@ -1537,10 +1537,10 @@ class FileFinder:
is_namespace = _path_isdir(base_path)
# Check for a file w/ a proper suffix exists.
for suffix, loader_class in self._loaders:
- try:
- full_path = _path_join(self.path, tail_module + suffix)
- except ValueError:
- return None
+ try:
+ full_path = _path_join(self.path, tail_module + suffix)
+ except ValueError:
+ return None
_bootstrap._verbose_message('trying {}', full_path, verbosity=2)
if cache_module + suffix in cache:
if _path_isfile(full_path):
@@ -1673,22 +1673,22 @@ def _setup(_bootstrap_module):
continue
else:
raise ImportError('importlib requires posix or nt')
-
+
setattr(self_module, '_os', os_module)
setattr(self_module, 'path_sep', path_sep)
setattr(self_module, 'path_separators', ''.join(path_separators))
- setattr(self_module, '_pathseps_with_colon', {f':{s}' for s in path_separators})
+ setattr(self_module, '_pathseps_with_colon', {f':{s}' for s in path_separators})
- # Directly load built-in modules needed during bootstrap.
- builtin_names = ['_io', '_warnings', 'marshal']
+ # Directly load built-in modules needed during bootstrap.
+ builtin_names = ['_io', '_warnings', 'marshal']
if builtin_os == 'nt':
- builtin_names.append('winreg')
- for builtin_name in builtin_names:
- if builtin_name not in sys.modules:
- builtin_module = _bootstrap._builtin_from_name(builtin_name)
- else:
- builtin_module = sys.modules[builtin_name]
- setattr(self_module, builtin_name, builtin_module)
+ builtin_names.append('winreg')
+ for builtin_name in builtin_names:
+ if builtin_name not in sys.modules:
+ builtin_module = _bootstrap._builtin_from_name(builtin_name)
+ else:
+ builtin_module = sys.modules[builtin_name]
+ setattr(self_module, builtin_name, builtin_module)
# Constants
setattr(self_module, '_relax_case', _make_relax_case())
diff --git a/contrib/tools/python3/src/Lib/importlib/_common.py b/contrib/tools/python3/src/Lib/importlib/_common.py
index c1204f0b8f..82b02c7458 100644
--- a/contrib/tools/python3/src/Lib/importlib/_common.py
+++ b/contrib/tools/python3/src/Lib/importlib/_common.py
@@ -1,62 +1,62 @@
-import os
-import pathlib
-import zipfile
-import tempfile
-import functools
-import contextlib
-
-
-def from_package(package):
- """
- Return a Traversable object for the given package.
-
- """
- return fallback_resources(package.__spec__)
-
-
-def fallback_resources(spec):
- package_directory = pathlib.Path(spec.origin).parent
- try:
- archive_path = spec.loader.archive
- rel_path = package_directory.relative_to(archive_path)
- return zipfile.Path(archive_path, str(rel_path) + '/')
- except Exception:
- pass
- return package_directory
-
-
-@contextlib.contextmanager
-def _tempfile(reader, suffix=''):
- # Not using tempfile.NamedTemporaryFile as it leads to deeper 'try'
- # blocks due to the need to close the temporary file to work on Windows
- # properly.
- fd, raw_path = tempfile.mkstemp(suffix=suffix)
- try:
- os.write(fd, reader())
- os.close(fd)
- yield pathlib.Path(raw_path)
- finally:
- try:
- os.remove(raw_path)
- except FileNotFoundError:
- pass
-
-
-@functools.singledispatch
-@contextlib.contextmanager
-def as_file(path):
- """
- Given a Traversable object, return that object as a
- path on the local file system in a context manager.
- """
- with _tempfile(path.read_bytes, suffix=path.name) as local:
- yield local
-
-
-@as_file.register(pathlib.Path)
-@contextlib.contextmanager
-def _(path):
- """
- Degenerate behavior for pathlib.Path objects.
- """
- yield path
+import os
+import pathlib
+import zipfile
+import tempfile
+import functools
+import contextlib
+
+
+def from_package(package):
+ """
+ Return a Traversable object for the given package.
+
+ """
+ return fallback_resources(package.__spec__)
+
+
+def fallback_resources(spec):
+ package_directory = pathlib.Path(spec.origin).parent
+ try:
+ archive_path = spec.loader.archive
+ rel_path = package_directory.relative_to(archive_path)
+ return zipfile.Path(archive_path, str(rel_path) + '/')
+ except Exception:
+ pass
+ return package_directory
+
+
+@contextlib.contextmanager
+def _tempfile(reader, suffix=''):
+ # Not using tempfile.NamedTemporaryFile as it leads to deeper 'try'
+ # blocks due to the need to close the temporary file to work on Windows
+ # properly.
+ fd, raw_path = tempfile.mkstemp(suffix=suffix)
+ try:
+ os.write(fd, reader())
+ os.close(fd)
+ yield pathlib.Path(raw_path)
+ finally:
+ try:
+ os.remove(raw_path)
+ except FileNotFoundError:
+ pass
+
+
+@functools.singledispatch
+@contextlib.contextmanager
+def as_file(path):
+ """
+ Given a Traversable object, return that object as a
+ path on the local file system in a context manager.
+ """
+ with _tempfile(path.read_bytes, suffix=path.name) as local:
+ yield local
+
+
+@as_file.register(pathlib.Path)
+@contextlib.contextmanager
+def _(path):
+ """
+ Degenerate behavior for pathlib.Path objects.
+ """
+ yield path
diff --git a/contrib/tools/python3/src/Lib/importlib/abc.py b/contrib/tools/python3/src/Lib/importlib/abc.py
index b8a9bb1a21..def2e342dc 100644
--- a/contrib/tools/python3/src/Lib/importlib/abc.py
+++ b/contrib/tools/python3/src/Lib/importlib/abc.py
@@ -10,11 +10,11 @@ except ImportError as exc:
_frozen_importlib = None
try:
import _frozen_importlib_external
-except ImportError:
+except ImportError:
_frozen_importlib_external = _bootstrap_external
import abc
import warnings
-from typing import Protocol, runtime_checkable
+from typing import Protocol, runtime_checkable
def _register(abstract_cls, *classes):
@@ -387,88 +387,88 @@ class ResourceReader(metaclass=abc.ABCMeta):
_register(ResourceReader, machinery.SourceFileLoader)
-
-
-@runtime_checkable
-class Traversable(Protocol):
- """
- An object with a subset of pathlib.Path methods suitable for
- traversing directories and opening files.
- """
-
- @abc.abstractmethod
- def iterdir(self):
- """
- Yield Traversable objects in self
- """
-
- @abc.abstractmethod
- def read_bytes(self):
- """
- Read contents of self as bytes
- """
-
- @abc.abstractmethod
- def read_text(self, encoding=None):
- """
- Read contents of self as bytes
- """
-
- @abc.abstractmethod
- def is_dir(self):
- """
- Return True if self is a dir
- """
-
- @abc.abstractmethod
- def is_file(self):
- """
- Return True if self is a file
- """
-
- @abc.abstractmethod
- def joinpath(self, child):
- """
- Return Traversable child in self
- """
-
- @abc.abstractmethod
- def __truediv__(self, child):
- """
- Return Traversable child in self
- """
-
- @abc.abstractmethod
- def open(self, mode='r', *args, **kwargs):
- """
- mode may be 'r' or 'rb' to open as text or binary. Return a handle
- suitable for reading (same as pathlib.Path.open).
-
- When opening as text, accepts encoding parameters such as those
- accepted by io.TextIOWrapper.
- """
-
- @abc.abstractproperty
- def name(self):
- # type: () -> str
- """
- The base name of this object without any parent references.
- """
-
-
-class TraversableResources(ResourceReader):
- @abc.abstractmethod
- def files(self):
- """Return a Traversable object for the loaded package."""
-
- def open_resource(self, resource):
- return self.files().joinpath(resource).open('rb')
-
- def resource_path(self, resource):
- raise FileNotFoundError(resource)
-
- def is_resource(self, path):
- return self.files().joinpath(path).isfile()
-
- def contents(self):
- return (item.name for item in self.files().iterdir())
+
+
+@runtime_checkable
+class Traversable(Protocol):
+ """
+ An object with a subset of pathlib.Path methods suitable for
+ traversing directories and opening files.
+ """
+
+ @abc.abstractmethod
+ def iterdir(self):
+ """
+ Yield Traversable objects in self
+ """
+
+ @abc.abstractmethod
+ def read_bytes(self):
+ """
+ Read contents of self as bytes
+ """
+
+ @abc.abstractmethod
+ def read_text(self, encoding=None):
+ """
+ Read contents of self as bytes
+ """
+
+ @abc.abstractmethod
+ def is_dir(self):
+ """
+ Return True if self is a dir
+ """
+
+ @abc.abstractmethod
+ def is_file(self):
+ """
+ Return True if self is a file
+ """
+
+ @abc.abstractmethod
+ def joinpath(self, child):
+ """
+ Return Traversable child in self
+ """
+
+ @abc.abstractmethod
+ def __truediv__(self, child):
+ """
+ Return Traversable child in self
+ """
+
+ @abc.abstractmethod
+ def open(self, mode='r', *args, **kwargs):
+ """
+ mode may be 'r' or 'rb' to open as text or binary. Return a handle
+ suitable for reading (same as pathlib.Path.open).
+
+ When opening as text, accepts encoding parameters such as those
+ accepted by io.TextIOWrapper.
+ """
+
+ @abc.abstractproperty
+ def name(self):
+ # type: () -> str
+ """
+ The base name of this object without any parent references.
+ """
+
+
+class TraversableResources(ResourceReader):
+ @abc.abstractmethod
+ def files(self):
+ """Return a Traversable object for the loaded package."""
+
+ def open_resource(self, resource):
+ return self.files().joinpath(resource).open('rb')
+
+ def resource_path(self, resource):
+ raise FileNotFoundError(resource)
+
+ def is_resource(self, path):
+ return self.files().joinpath(path).isfile()
+
+ def contents(self):
+ return (item.name for item in self.files().iterdir())
diff --git a/contrib/tools/python3/src/Lib/importlib/metadata.py b/contrib/tools/python3/src/Lib/importlib/metadata.py
index 594986ce23..c48aa9e765 100644
--- a/contrib/tools/python3/src/Lib/importlib/metadata.py
+++ b/contrib/tools/python3/src/Lib/importlib/metadata.py
@@ -1,595 +1,595 @@
-import io
-import os
-import re
-import abc
-import csv
-import sys
-import email
-import pathlib
-import zipfile
-import operator
-import functools
-import itertools
-import posixpath
-import collections
-
-from configparser import ConfigParser
-from contextlib import suppress
-from importlib import import_module
-from importlib.abc import MetaPathFinder
-from itertools import starmap
-
-
-__all__ = [
- 'Distribution',
- 'DistributionFinder',
- 'PackageNotFoundError',
- 'distribution',
- 'distributions',
- 'entry_points',
- 'files',
- 'metadata',
- 'requires',
- 'version',
- ]
-
-
-class PackageNotFoundError(ModuleNotFoundError):
- """The package was not found."""
-
-
-class EntryPoint(
- collections.namedtuple('EntryPointBase', 'name value group')):
- """An entry point as defined by Python packaging conventions.
-
- See `the packaging docs on entry points
- <https://packaging.python.org/specifications/entry-points/>`_
- for more information.
- """
-
- pattern = re.compile(
- r'(?P<module>[\w.]+)\s*'
- r'(:\s*(?P<attr>[\w.]+))?\s*'
- r'(?P<extras>\[.*\])?\s*$'
- )
- """
- A regular expression describing the syntax for an entry point,
- which might look like:
-
- - module
- - package.module
- - package.module:attribute
- - package.module:object.attribute
- - package.module:attr [extra1, extra2]
-
- Other combinations are possible as well.
-
- The expression is lenient about whitespace around the ':',
- following the attr, and following any extras.
- """
-
- def load(self):
- """Load the entry point from its definition. If only a module
- is indicated by the value, return that module. Otherwise,
- return the named object.
- """
- match = self.pattern.match(self.value)
- module = import_module(match.group('module'))
- attrs = filter(None, (match.group('attr') or '').split('.'))
- return functools.reduce(getattr, attrs, module)
-
- @property
- def module(self):
- match = self.pattern.match(self.value)
- return match.group('module')
-
- @property
- def attr(self):
- match = self.pattern.match(self.value)
- return match.group('attr')
-
- @property
- def extras(self):
- match = self.pattern.match(self.value)
- return list(re.finditer(r'\w+', match.group('extras') or ''))
-
- @classmethod
- def _from_config(cls, config):
- return [
- cls(name, value, group)
- for group in config.sections()
- for name, value in config.items(group)
- ]
-
- @classmethod
- def _from_text(cls, text):
- config = ConfigParser(delimiters='=')
- # case sensitive: https://stackoverflow.com/q/1611799/812183
- config.optionxform = str
- try:
- config.read_string(text)
- except AttributeError: # pragma: nocover
- # Python 2 has no read_string
- config.readfp(io.StringIO(text))
- return EntryPoint._from_config(config)
-
- def __iter__(self):
- """
- Supply iter so one may construct dicts of EntryPoints easily.
- """
- return iter((self.name, self))
-
- def __reduce__(self):
- return (
- self.__class__,
- (self.name, self.value, self.group),
- )
-
-
-class PackagePath(pathlib.PurePosixPath):
- """A reference to a path in a package"""
-
- def read_text(self, encoding='utf-8'):
- with self.locate().open(encoding=encoding) as stream:
- return stream.read()
-
- def read_binary(self):
- with self.locate().open('rb') as stream:
- return stream.read()
-
- def locate(self):
- """Return a path-like object for this path"""
- return self.dist.locate_file(self)
-
-
-class FileHash:
- def __init__(self, spec):
- self.mode, _, self.value = spec.partition('=')
-
- def __repr__(self):
- return '<FileHash mode: {} value: {}>'.format(self.mode, self.value)
-
-
-class Distribution:
- """A Python distribution package."""
-
- @abc.abstractmethod
- def read_text(self, filename):
- """Attempt to load metadata file given by the name.
-
- :param filename: The name of the file in the distribution info.
- :return: The text if found, otherwise None.
- """
-
- @abc.abstractmethod
- def locate_file(self, path):
- """
- Given a path to a file in this distribution, return a path
- to it.
- """
-
- @classmethod
- def from_name(cls, name):
- """Return the Distribution for the given package name.
-
- :param name: The name of the distribution package to search for.
- :return: The Distribution instance (or subclass thereof) for the named
- package, if found.
- :raises PackageNotFoundError: When the named package's distribution
- metadata cannot be found.
- """
- for resolver in cls._discover_resolvers():
- dists = resolver(DistributionFinder.Context(name=name))
- dist = next(iter(dists), None)
- if dist is not None:
- return dist
- else:
- raise PackageNotFoundError(name)
-
- @classmethod
- def discover(cls, **kwargs):
- """Return an iterable of Distribution objects for all packages.
-
- Pass a ``context`` or pass keyword arguments for constructing
- a context.
-
- :context: A ``DistributionFinder.Context`` object.
- :return: Iterable of Distribution objects for all packages.
- """
- context = kwargs.pop('context', None)
- if context and kwargs:
- raise ValueError("cannot accept context and kwargs")
- context = context or DistributionFinder.Context(**kwargs)
- return itertools.chain.from_iterable(
- resolver(context)
- for resolver in cls._discover_resolvers()
- )
-
- @staticmethod
- def at(path):
- """Return a Distribution for the indicated metadata path
-
- :param path: a string or path-like object
- :return: a concrete Distribution instance for the path
- """
- return PathDistribution(pathlib.Path(path))
-
- @staticmethod
- def _discover_resolvers():
- """Search the meta_path for resolvers."""
- declared = (
- getattr(finder, 'find_distributions', None)
- for finder in sys.meta_path
- )
- return filter(None, declared)
-
- @classmethod
- def _local(cls, root='.'):
- from pep517 import build, meta
- system = build.compat_system(root)
- builder = functools.partial(
- meta.build,
- source_dir=root,
- system=system,
- )
- return PathDistribution(zipfile.Path(meta.build_as_zip(builder)))
-
- @property
- def metadata(self):
- """Return the parsed metadata for this Distribution.
-
- The returned object will have keys that name the various bits of
- metadata. See PEP 566 for details.
- """
- text = (
- self.read_text('METADATA')
- or self.read_text('PKG-INFO')
- # This last clause is here to support old egg-info files. Its
- # effect is to just end up using the PathDistribution's self._path
- # (which points to the egg-info file) attribute unchanged.
- or self.read_text('')
- )
- return email.message_from_string(text)
-
- @property
- def version(self):
- """Return the 'Version' metadata for the distribution package."""
- return self.metadata['Version']
-
- @property
- def entry_points(self):
- return EntryPoint._from_text(self.read_text('entry_points.txt'))
-
- @property
- def files(self):
- """Files in this distribution.
-
- :return: List of PackagePath for this distribution or None
-
- Result is `None` if the metadata file that enumerates files
- (i.e. RECORD for dist-info or SOURCES.txt for egg-info) is
- missing.
- Result may be empty if the metadata exists but is empty.
- """
- file_lines = self._read_files_distinfo() or self._read_files_egginfo()
-
- def make_file(name, hash=None, size_str=None):
- result = PackagePath(name)
- result.hash = FileHash(hash) if hash else None
- result.size = int(size_str) if size_str else None
- result.dist = self
- return result
-
- return file_lines and list(starmap(make_file, csv.reader(file_lines)))
-
- def _read_files_distinfo(self):
- """
- Read the lines of RECORD
- """
- text = self.read_text('RECORD')
- return text and text.splitlines()
-
- def _read_files_egginfo(self):
- """
- SOURCES.txt might contain literal commas, so wrap each line
- in quotes.
- """
- text = self.read_text('SOURCES.txt')
- return text and map('"{}"'.format, text.splitlines())
-
- @property
- def requires(self):
- """Generated requirements specified for this Distribution"""
- reqs = self._read_dist_info_reqs() or self._read_egg_info_reqs()
- return reqs and list(reqs)
-
- def _read_dist_info_reqs(self):
- return self.metadata.get_all('Requires-Dist')
-
- def _read_egg_info_reqs(self):
- source = self.read_text('requires.txt')
- return source and self._deps_from_requires_text(source)
-
- @classmethod
- def _deps_from_requires_text(cls, source):
- section_pairs = cls._read_sections(source.splitlines())
- sections = {
- section: list(map(operator.itemgetter('line'), results))
- for section, results in
- itertools.groupby(section_pairs, operator.itemgetter('section'))
- }
- return cls._convert_egg_info_reqs_to_simple_reqs(sections)
-
- @staticmethod
- def _read_sections(lines):
- section = None
- for line in filter(None, lines):
- section_match = re.match(r'\[(.*)\]$', line)
- if section_match:
- section = section_match.group(1)
- continue
- yield locals()
-
- @staticmethod
- def _convert_egg_info_reqs_to_simple_reqs(sections):
- """
- Historically, setuptools would solicit and store 'extra'
- requirements, including those with environment markers,
- in separate sections. More modern tools expect each
- dependency to be defined separately, with any relevant
- extras and environment markers attached directly to that
- requirement. This method converts the former to the
- latter. See _test_deps_from_requires_text for an example.
- """
- def make_condition(name):
- return name and 'extra == "{name}"'.format(name=name)
-
- def quoted_marker(section):
- section = section or ''
- extra, sep, markers = section.partition(':')
- if extra and markers:
- markers = f'({markers})'
- conditions = list(filter(None, [markers, make_condition(extra)]))
- return '; ' + ' and '.join(conditions) if conditions else ''
-
- def url_req_space(req):
- """
- PEP 508 requires a space between the url_spec and the quoted_marker.
- Ref python/importlib_metadata#357.
- """
- # '@' is uniquely indicative of a url_req.
- return ' ' * ('@' in req)
-
- for section, deps in sections.items():
- for dep in deps:
- space = url_req_space(dep)
- yield dep + space + quoted_marker(section)
-
-
-class DistributionFinder(MetaPathFinder):
- """
- A MetaPathFinder capable of discovering installed distributions.
- """
-
- class Context:
- """
- Keyword arguments presented by the caller to
- ``distributions()`` or ``Distribution.discover()``
- to narrow the scope of a search for distributions
- in all DistributionFinders.
-
- Each DistributionFinder may expect any parameters
- and should attempt to honor the canonical
- parameters defined below when appropriate.
- """
-
- name = None
- """
- Specific name for which a distribution finder should match.
- A name of ``None`` matches all distributions.
- """
-
- def __init__(self, **kwargs):
- vars(self).update(kwargs)
-
- @property
- def path(self):
- """
- The path that a distribution finder should search.
-
- Typically refers to Python package paths and defaults
- to ``sys.path``.
- """
- return vars(self).get('path', sys.path)
-
- @abc.abstractmethod
- def find_distributions(self, context=Context()):
- """
- Find distributions.
-
- Return an iterable of all Distribution instances capable of
- loading the metadata for packages matching the ``context``,
- a DistributionFinder.Context instance.
- """
-
-
-class FastPath:
- """
- Micro-optimized class for searching a path for
- children.
- """
-
- def __init__(self, root):
- self.root = root
- self.base = os.path.basename(self.root).lower()
-
- def joinpath(self, child):
- return pathlib.Path(self.root, child)
-
- def children(self):
- with suppress(Exception):
- return os.listdir(self.root or '.')
- with suppress(Exception):
- return self.zip_children()
- return []
-
- def zip_children(self):
- zip_path = zipfile.Path(self.root)
- names = zip_path.root.namelist()
- self.joinpath = zip_path.joinpath
-
- return dict.fromkeys(
- child.split(posixpath.sep, 1)[0]
- for child in names
- )
-
- def is_egg(self, search):
- base = self.base
- return (
- base == search.versionless_egg_name
- or base.startswith(search.prefix)
- and base.endswith('.egg'))
-
- def search(self, name):
- for child in self.children():
- n_low = child.lower()
- if (n_low in name.exact_matches
- or n_low.startswith(name.prefix)
- and n_low.endswith(name.suffixes)
- # legacy case:
- or self.is_egg(name) and n_low == 'egg-info'):
- yield self.joinpath(child)
-
-
-class Prepared:
- """
- A prepared search for metadata on a possibly-named package.
- """
- normalized = ''
- prefix = ''
- suffixes = '.dist-info', '.egg-info'
- exact_matches = [''][:0]
- versionless_egg_name = ''
-
- def __init__(self, name):
- self.name = name
- if name is None:
- return
- self.normalized = name.lower().replace('-', '_')
- self.prefix = self.normalized + '-'
- self.exact_matches = [
- self.normalized + suffix for suffix in self.suffixes]
- self.versionless_egg_name = self.normalized + '.egg'
-
-
-class MetadataPathFinder(DistributionFinder):
- @classmethod
- def find_distributions(cls, context=DistributionFinder.Context()):
- """
- Find distributions.
-
- Return an iterable of all Distribution instances capable of
- loading the metadata for packages matching ``context.name``
- (or all names if ``None`` indicated) along the paths in the list
- of directories ``context.path``.
- """
- found = cls._search_paths(context.name, context.path)
- return map(PathDistribution, found)
-
- @classmethod
- def _search_paths(cls, name, paths):
- """Find metadata directories in paths heuristically."""
- return itertools.chain.from_iterable(
- path.search(Prepared(name))
- for path in map(FastPath, paths)
- )
-
-
-class PathDistribution(Distribution):
- def __init__(self, path):
- """Construct a distribution from a path to the metadata directory.
-
- :param path: A pathlib.Path or similar object supporting
- .joinpath(), __div__, .parent, and .read_text().
- """
- self._path = path
-
- def read_text(self, filename):
- with suppress(FileNotFoundError, IsADirectoryError, KeyError,
- NotADirectoryError, PermissionError):
- return self._path.joinpath(filename).read_text(encoding='utf-8')
- read_text.__doc__ = Distribution.read_text.__doc__
-
- def locate_file(self, path):
- return self._path.parent / path
-
-
-def distribution(distribution_name):
- """Get the ``Distribution`` instance for the named package.
-
- :param distribution_name: The name of the distribution package as a string.
- :return: A ``Distribution`` instance (or subclass thereof).
- """
- return Distribution.from_name(distribution_name)
-
-
-def distributions(**kwargs):
- """Get all ``Distribution`` instances in the current environment.
-
- :return: An iterable of ``Distribution`` instances.
- """
- return Distribution.discover(**kwargs)
-
-
-def metadata(distribution_name):
- """Get the metadata for the named package.
-
- :param distribution_name: The name of the distribution package to query.
- :return: An email.Message containing the parsed metadata.
- """
- return Distribution.from_name(distribution_name).metadata
-
-
-def version(distribution_name):
- """Get the version string for the named package.
-
- :param distribution_name: The name of the distribution package to query.
- :return: The version string for the package as defined in the package's
- "Version" metadata key.
- """
- return distribution(distribution_name).version
-
-
-def entry_points():
- """Return EntryPoint objects for all installed packages.
-
- :return: EntryPoint objects for all installed packages.
- """
- eps = itertools.chain.from_iterable(
- dist.entry_points for dist in distributions())
- by_group = operator.attrgetter('group')
- ordered = sorted(eps, key=by_group)
- grouped = itertools.groupby(ordered, by_group)
- return {
- group: tuple(eps)
- for group, eps in grouped
- }
-
-
-def files(distribution_name):
- """Return a list of files for the named package.
-
- :param distribution_name: The name of the distribution package to query.
- :return: List of files composing the distribution.
- """
- return distribution(distribution_name).files
-
-
-def requires(distribution_name):
- """
- Return a list of requirements for the named package.
-
- :return: An iterator of requirements, suitable for
- packaging.requirement.Requirement.
- """
- return distribution(distribution_name).requires
+import io
+import os
+import re
+import abc
+import csv
+import sys
+import email
+import pathlib
+import zipfile
+import operator
+import functools
+import itertools
+import posixpath
+import collections
+
+from configparser import ConfigParser
+from contextlib import suppress
+from importlib import import_module
+from importlib.abc import MetaPathFinder
+from itertools import starmap
+
+
+__all__ = [
+ 'Distribution',
+ 'DistributionFinder',
+ 'PackageNotFoundError',
+ 'distribution',
+ 'distributions',
+ 'entry_points',
+ 'files',
+ 'metadata',
+ 'requires',
+ 'version',
+ ]
+
+
+class PackageNotFoundError(ModuleNotFoundError):
+ """The package was not found."""
+
+
+class EntryPoint(
+ collections.namedtuple('EntryPointBase', 'name value group')):
+ """An entry point as defined by Python packaging conventions.
+
+ See `the packaging docs on entry points
+ <https://packaging.python.org/specifications/entry-points/>`_
+ for more information.
+ """
+
+ pattern = re.compile(
+ r'(?P<module>[\w.]+)\s*'
+ r'(:\s*(?P<attr>[\w.]+))?\s*'
+ r'(?P<extras>\[.*\])?\s*$'
+ )
+ """
+ A regular expression describing the syntax for an entry point,
+ which might look like:
+
+ - module
+ - package.module
+ - package.module:attribute
+ - package.module:object.attribute
+ - package.module:attr [extra1, extra2]
+
+ Other combinations are possible as well.
+
+ The expression is lenient about whitespace around the ':',
+ following the attr, and following any extras.
+ """
+
+ def load(self):
+ """Load the entry point from its definition. If only a module
+ is indicated by the value, return that module. Otherwise,
+ return the named object.
+ """
+ match = self.pattern.match(self.value)
+ module = import_module(match.group('module'))
+ attrs = filter(None, (match.group('attr') or '').split('.'))
+ return functools.reduce(getattr, attrs, module)
+
+ @property
+ def module(self):
+ match = self.pattern.match(self.value)
+ return match.group('module')
+
+ @property
+ def attr(self):
+ match = self.pattern.match(self.value)
+ return match.group('attr')
+
+ @property
+ def extras(self):
+ match = self.pattern.match(self.value)
+ return list(re.finditer(r'\w+', match.group('extras') or ''))
+
+ @classmethod
+ def _from_config(cls, config):
+ return [
+ cls(name, value, group)
+ for group in config.sections()
+ for name, value in config.items(group)
+ ]
+
+ @classmethod
+ def _from_text(cls, text):
+ config = ConfigParser(delimiters='=')
+ # case sensitive: https://stackoverflow.com/q/1611799/812183
+ config.optionxform = str
+ try:
+ config.read_string(text)
+ except AttributeError: # pragma: nocover
+ # Python 2 has no read_string
+ config.readfp(io.StringIO(text))
+ return EntryPoint._from_config(config)
+
+ def __iter__(self):
+ """
+ Supply iter so one may construct dicts of EntryPoints easily.
+ """
+ return iter((self.name, self))
+
+ def __reduce__(self):
+ return (
+ self.__class__,
+ (self.name, self.value, self.group),
+ )
+
+
+class PackagePath(pathlib.PurePosixPath):
+ """A reference to a path in a package"""
+
+ def read_text(self, encoding='utf-8'):
+ with self.locate().open(encoding=encoding) as stream:
+ return stream.read()
+
+ def read_binary(self):
+ with self.locate().open('rb') as stream:
+ return stream.read()
+
+ def locate(self):
+ """Return a path-like object for this path"""
+ return self.dist.locate_file(self)
+
+
+class FileHash:
+ def __init__(self, spec):
+ self.mode, _, self.value = spec.partition('=')
+
+ def __repr__(self):
+ return '<FileHash mode: {} value: {}>'.format(self.mode, self.value)
+
+
+class Distribution:
+ """A Python distribution package."""
+
+ @abc.abstractmethod
+ def read_text(self, filename):
+ """Attempt to load metadata file given by the name.
+
+ :param filename: The name of the file in the distribution info.
+ :return: The text if found, otherwise None.
+ """
+
+ @abc.abstractmethod
+ def locate_file(self, path):
+ """
+ Given a path to a file in this distribution, return a path
+ to it.
+ """
+
+ @classmethod
+ def from_name(cls, name):
+ """Return the Distribution for the given package name.
+
+ :param name: The name of the distribution package to search for.
+ :return: The Distribution instance (or subclass thereof) for the named
+ package, if found.
+ :raises PackageNotFoundError: When the named package's distribution
+ metadata cannot be found.
+ """
+ for resolver in cls._discover_resolvers():
+ dists = resolver(DistributionFinder.Context(name=name))
+ dist = next(iter(dists), None)
+ if dist is not None:
+ return dist
+ else:
+ raise PackageNotFoundError(name)
+
+ @classmethod
+ def discover(cls, **kwargs):
+ """Return an iterable of Distribution objects for all packages.
+
+ Pass a ``context`` or pass keyword arguments for constructing
+ a context.
+
+ :context: A ``DistributionFinder.Context`` object.
+ :return: Iterable of Distribution objects for all packages.
+ """
+ context = kwargs.pop('context', None)
+ if context and kwargs:
+ raise ValueError("cannot accept context and kwargs")
+ context = context or DistributionFinder.Context(**kwargs)
+ return itertools.chain.from_iterable(
+ resolver(context)
+ for resolver in cls._discover_resolvers()
+ )
+
+ @staticmethod
+ def at(path):
+ """Return a Distribution for the indicated metadata path
+
+ :param path: a string or path-like object
+ :return: a concrete Distribution instance for the path
+ """
+ return PathDistribution(pathlib.Path(path))
+
+ @staticmethod
+ def _discover_resolvers():
+ """Search the meta_path for resolvers."""
+ declared = (
+ getattr(finder, 'find_distributions', None)
+ for finder in sys.meta_path
+ )
+ return filter(None, declared)
+
+ @classmethod
+ def _local(cls, root='.'):
+ from pep517 import build, meta
+ system = build.compat_system(root)
+ builder = functools.partial(
+ meta.build,
+ source_dir=root,
+ system=system,
+ )
+ return PathDistribution(zipfile.Path(meta.build_as_zip(builder)))
+
+ @property
+ def metadata(self):
+ """Return the parsed metadata for this Distribution.
+
+ The returned object will have keys that name the various bits of
+ metadata. See PEP 566 for details.
+ """
+ text = (
+ self.read_text('METADATA')
+ or self.read_text('PKG-INFO')
+ # This last clause is here to support old egg-info files. Its
+ # effect is to just end up using the PathDistribution's self._path
+ # (which points to the egg-info file) attribute unchanged.
+ or self.read_text('')
+ )
+ return email.message_from_string(text)
+
+ @property
+ def version(self):
+ """Return the 'Version' metadata for the distribution package."""
+ return self.metadata['Version']
+
+ @property
+ def entry_points(self):
+ return EntryPoint._from_text(self.read_text('entry_points.txt'))
+
+ @property
+ def files(self):
+ """Files in this distribution.
+
+ :return: List of PackagePath for this distribution or None
+
+ Result is `None` if the metadata file that enumerates files
+ (i.e. RECORD for dist-info or SOURCES.txt for egg-info) is
+ missing.
+ Result may be empty if the metadata exists but is empty.
+ """
+ file_lines = self._read_files_distinfo() or self._read_files_egginfo()
+
+ def make_file(name, hash=None, size_str=None):
+ result = PackagePath(name)
+ result.hash = FileHash(hash) if hash else None
+ result.size = int(size_str) if size_str else None
+ result.dist = self
+ return result
+
+ return file_lines and list(starmap(make_file, csv.reader(file_lines)))
+
+ def _read_files_distinfo(self):
+ """
+ Read the lines of RECORD
+ """
+ text = self.read_text('RECORD')
+ return text and text.splitlines()
+
+ def _read_files_egginfo(self):
+ """
+ SOURCES.txt might contain literal commas, so wrap each line
+ in quotes.
+ """
+ text = self.read_text('SOURCES.txt')
+ return text and map('"{}"'.format, text.splitlines())
+
+ @property
+ def requires(self):
+ """Generated requirements specified for this Distribution"""
+ reqs = self._read_dist_info_reqs() or self._read_egg_info_reqs()
+ return reqs and list(reqs)
+
+ def _read_dist_info_reqs(self):
+ return self.metadata.get_all('Requires-Dist')
+
+ def _read_egg_info_reqs(self):
+ source = self.read_text('requires.txt')
+ return source and self._deps_from_requires_text(source)
+
+ @classmethod
+ def _deps_from_requires_text(cls, source):
+ section_pairs = cls._read_sections(source.splitlines())
+ sections = {
+ section: list(map(operator.itemgetter('line'), results))
+ for section, results in
+ itertools.groupby(section_pairs, operator.itemgetter('section'))
+ }
+ return cls._convert_egg_info_reqs_to_simple_reqs(sections)
+
+ @staticmethod
+ def _read_sections(lines):
+ section = None
+ for line in filter(None, lines):
+ section_match = re.match(r'\[(.*)\]$', line)
+ if section_match:
+ section = section_match.group(1)
+ continue
+ yield locals()
+
+ @staticmethod
+ def _convert_egg_info_reqs_to_simple_reqs(sections):
+ """
+ Historically, setuptools would solicit and store 'extra'
+ requirements, including those with environment markers,
+ in separate sections. More modern tools expect each
+ dependency to be defined separately, with any relevant
+ extras and environment markers attached directly to that
+ requirement. This method converts the former to the
+ latter. See _test_deps_from_requires_text for an example.
+ """
+ def make_condition(name):
+ return name and 'extra == "{name}"'.format(name=name)
+
+ def quoted_marker(section):
+ section = section or ''
+ extra, sep, markers = section.partition(':')
+ if extra and markers:
+ markers = f'({markers})'
+ conditions = list(filter(None, [markers, make_condition(extra)]))
+ return '; ' + ' and '.join(conditions) if conditions else ''
+
+ def url_req_space(req):
+ """
+ PEP 508 requires a space between the url_spec and the quoted_marker.
+ Ref python/importlib_metadata#357.
+ """
+ # '@' is uniquely indicative of a url_req.
+ return ' ' * ('@' in req)
+
+ for section, deps in sections.items():
+ for dep in deps:
+ space = url_req_space(dep)
+ yield dep + space + quoted_marker(section)
+
+
+class DistributionFinder(MetaPathFinder):
+ """
+ A MetaPathFinder capable of discovering installed distributions.
+ """
+
+ class Context:
+ """
+ Keyword arguments presented by the caller to
+ ``distributions()`` or ``Distribution.discover()``
+ to narrow the scope of a search for distributions
+ in all DistributionFinders.
+
+ Each DistributionFinder may expect any parameters
+ and should attempt to honor the canonical
+ parameters defined below when appropriate.
+ """
+
+ name = None
+ """
+ Specific name for which a distribution finder should match.
+ A name of ``None`` matches all distributions.
+ """
+
+ def __init__(self, **kwargs):
+ vars(self).update(kwargs)
+
+ @property
+ def path(self):
+ """
+ The path that a distribution finder should search.
+
+ Typically refers to Python package paths and defaults
+ to ``sys.path``.
+ """
+ return vars(self).get('path', sys.path)
+
+ @abc.abstractmethod
+ def find_distributions(self, context=Context()):
+ """
+ Find distributions.
+
+ Return an iterable of all Distribution instances capable of
+ loading the metadata for packages matching the ``context``,
+ a DistributionFinder.Context instance.
+ """
+
+
+class FastPath:
+ """
+ Micro-optimized class for searching a path for
+ children.
+ """
+
+ def __init__(self, root):
+ self.root = root
+ self.base = os.path.basename(self.root).lower()
+
+ def joinpath(self, child):
+ return pathlib.Path(self.root, child)
+
+ def children(self):
+ with suppress(Exception):
+ return os.listdir(self.root or '.')
+ with suppress(Exception):
+ return self.zip_children()
+ return []
+
+ def zip_children(self):
+ zip_path = zipfile.Path(self.root)
+ names = zip_path.root.namelist()
+ self.joinpath = zip_path.joinpath
+
+ return dict.fromkeys(
+ child.split(posixpath.sep, 1)[0]
+ for child in names
+ )
+
+ def is_egg(self, search):
+ base = self.base
+ return (
+ base == search.versionless_egg_name
+ or base.startswith(search.prefix)
+ and base.endswith('.egg'))
+
+ def search(self, name):
+ for child in self.children():
+ n_low = child.lower()
+ if (n_low in name.exact_matches
+ or n_low.startswith(name.prefix)
+ and n_low.endswith(name.suffixes)
+ # legacy case:
+ or self.is_egg(name) and n_low == 'egg-info'):
+ yield self.joinpath(child)
+
+
+class Prepared:
+ """
+ A prepared search for metadata on a possibly-named package.
+ """
+ normalized = ''
+ prefix = ''
+ suffixes = '.dist-info', '.egg-info'
+ exact_matches = [''][:0]
+ versionless_egg_name = ''
+
+ def __init__(self, name):
+ self.name = name
+ if name is None:
+ return
+ self.normalized = name.lower().replace('-', '_')
+ self.prefix = self.normalized + '-'
+ self.exact_matches = [
+ self.normalized + suffix for suffix in self.suffixes]
+ self.versionless_egg_name = self.normalized + '.egg'
+
+
+class MetadataPathFinder(DistributionFinder):
+ @classmethod
+ def find_distributions(cls, context=DistributionFinder.Context()):
+ """
+ Find distributions.
+
+ Return an iterable of all Distribution instances capable of
+ loading the metadata for packages matching ``context.name``
+ (or all names if ``None`` indicated) along the paths in the list
+ of directories ``context.path``.
+ """
+ found = cls._search_paths(context.name, context.path)
+ return map(PathDistribution, found)
+
+ @classmethod
+ def _search_paths(cls, name, paths):
+ """Find metadata directories in paths heuristically."""
+ return itertools.chain.from_iterable(
+ path.search(Prepared(name))
+ for path in map(FastPath, paths)
+ )
+
+
+class PathDistribution(Distribution):
+ def __init__(self, path):
+ """Construct a distribution from a path to the metadata directory.
+
+ :param path: A pathlib.Path or similar object supporting
+ .joinpath(), __div__, .parent, and .read_text().
+ """
+ self._path = path
+
+ def read_text(self, filename):
+ with suppress(FileNotFoundError, IsADirectoryError, KeyError,
+ NotADirectoryError, PermissionError):
+ return self._path.joinpath(filename).read_text(encoding='utf-8')
+ read_text.__doc__ = Distribution.read_text.__doc__
+
+ def locate_file(self, path):
+ return self._path.parent / path
+
+
+def distribution(distribution_name):
+ """Get the ``Distribution`` instance for the named package.
+
+ :param distribution_name: The name of the distribution package as a string.
+ :return: A ``Distribution`` instance (or subclass thereof).
+ """
+ return Distribution.from_name(distribution_name)
+
+
+def distributions(**kwargs):
+ """Get all ``Distribution`` instances in the current environment.
+
+ :return: An iterable of ``Distribution`` instances.
+ """
+ return Distribution.discover(**kwargs)
+
+
+def metadata(distribution_name):
+ """Get the metadata for the named package.
+
+ :param distribution_name: The name of the distribution package to query.
+ :return: An email.Message containing the parsed metadata.
+ """
+ return Distribution.from_name(distribution_name).metadata
+
+
+def version(distribution_name):
+ """Get the version string for the named package.
+
+ :param distribution_name: The name of the distribution package to query.
+ :return: The version string for the package as defined in the package's
+ "Version" metadata key.
+ """
+ return distribution(distribution_name).version
+
+
+def entry_points():
+ """Return EntryPoint objects for all installed packages.
+
+ :return: EntryPoint objects for all installed packages.
+ """
+ eps = itertools.chain.from_iterable(
+ dist.entry_points for dist in distributions())
+ by_group = operator.attrgetter('group')
+ ordered = sorted(eps, key=by_group)
+ grouped = itertools.groupby(ordered, by_group)
+ return {
+ group: tuple(eps)
+ for group, eps in grouped
+ }
+
+
+def files(distribution_name):
+ """Return a list of files for the named package.
+
+ :param distribution_name: The name of the distribution package to query.
+ :return: List of files composing the distribution.
+ """
+ return distribution(distribution_name).files
+
+
+def requires(distribution_name):
+ """
+ Return a list of requirements for the named package.
+
+ :return: An iterator of requirements, suitable for
+ packaging.requirement.Requirement.
+ """
+ return distribution(distribution_name).requires
diff --git a/contrib/tools/python3/src/Lib/importlib/resources.py b/contrib/tools/python3/src/Lib/importlib/resources.py
index b803a01c91..7d03311cf4 100644
--- a/contrib/tools/python3/src/Lib/importlib/resources.py
+++ b/contrib/tools/python3/src/Lib/importlib/resources.py
@@ -1,15 +1,15 @@
import os
from . import abc as resources_abc
-from . import _common
-from ._common import as_file
+from . import _common
+from ._common import as_file
from contextlib import contextmanager, suppress
from importlib import import_module
from importlib.abc import ResourceLoader
from io import BytesIO, TextIOWrapper
from pathlib import Path
from types import ModuleType
-from typing import ContextManager, Iterable, Optional, Union
+from typing import ContextManager, Iterable, Optional, Union
from typing import cast
from typing.io import BinaryIO, TextIO
@@ -17,9 +17,9 @@ from typing.io import BinaryIO, TextIO
__all__ = [
'Package',
'Resource',
- 'as_file',
+ 'as_file',
'contents',
- 'files',
+ 'files',
'is_resource',
'open_binary',
'open_text',
@@ -33,23 +33,23 @@ Package = Union[str, ModuleType]
Resource = Union[str, os.PathLike]
-def _resolve(name) -> ModuleType:
- """If name is a string, resolve to a module."""
- if hasattr(name, '__spec__'):
- return name
- return import_module(name)
-
-
+def _resolve(name) -> ModuleType:
+ """If name is a string, resolve to a module."""
+ if hasattr(name, '__spec__'):
+ return name
+ return import_module(name)
+
+
def _get_package(package) -> ModuleType:
"""Take a package name or module object and return the module.
- If a name, the module is imported. If the resolved module
+ If a name, the module is imported. If the resolved module
object is not a package, raise an exception.
"""
- module = _resolve(package)
- if module.__spec__.submodule_search_locations is None:
- raise TypeError('{!r} is not a package'.format(package))
- return module
+ module = _resolve(package)
+ if module.__spec__.submodule_search_locations is None:
+ raise TypeError('{!r} is not a package'.format(package))
+ return module
def _normalize_path(path) -> str:
@@ -60,7 +60,7 @@ def _normalize_path(path) -> str:
parent, file_name = os.path.split(path)
if parent:
raise ValueError('{!r} must be only a file name'.format(path))
- return file_name
+ return file_name
def _get_resource_reader(
@@ -89,8 +89,8 @@ def open_binary(package: Package, resource: Resource) -> BinaryIO:
reader = _get_resource_reader(package)
if reader is not None:
return reader.open_resource(resource)
- absolute_package_path = os.path.abspath(
- package.__spec__.origin or 'non-existent file')
+ absolute_package_path = os.path.abspath(
+ package.__spec__.origin or 'non-existent file')
package_path = os.path.dirname(absolute_package_path)
full_path = os.path.join(package_path, resource)
try:
@@ -109,7 +109,7 @@ def open_binary(package: Package, resource: Resource) -> BinaryIO:
message = '{!r} resource not found in {!r}'.format(
resource, package_name)
raise FileNotFoundError(message)
- return BytesIO(data)
+ return BytesIO(data)
def open_text(package: Package,
@@ -117,8 +117,8 @@ def open_text(package: Package,
encoding: str = 'utf-8',
errors: str = 'strict') -> TextIO:
"""Return a file-like object opened for text reading of the resource."""
- return TextIOWrapper(
- open_binary(package, resource), encoding=encoding, errors=errors)
+ return TextIOWrapper(
+ open_binary(package, resource), encoding=encoding, errors=errors)
def read_binary(package: Package, resource: Resource) -> bytes:
@@ -140,16 +140,16 @@ def read_text(package: Package,
return fp.read()
-def files(package: Package) -> resources_abc.Traversable:
- """
- Get a Traversable resource from a package
- """
- return _common.from_package(_get_package(package))
-
-
-def path(
- package: Package, resource: Resource,
- ) -> 'ContextManager[Path]':
+def files(package: Package) -> resources_abc.Traversable:
+ """
+ Get a Traversable resource from a package
+ """
+ return _common.from_package(_get_package(package))
+
+
+def path(
+ package: Package, resource: Resource,
+ ) -> 'ContextManager[Path]':
"""A context manager providing a file path object to the resource.
If the resource does not already exist on its own on the file system,
@@ -158,25 +158,25 @@ def path(
raised if the file was deleted prior to the context manager
exiting).
"""
- reader = _get_resource_reader(_get_package(package))
- return (
- _path_from_reader(reader, resource)
- if reader else
- _common.as_file(files(package).joinpath(_normalize_path(resource)))
- )
-
-
-@contextmanager
-def _path_from_reader(reader, resource):
- norm_resource = _normalize_path(resource)
- with suppress(FileNotFoundError):
- yield Path(reader.resource_path(norm_resource))
- return
- opener_reader = reader.open_resource(norm_resource)
- with _common._tempfile(opener_reader.read, suffix=norm_resource) as res:
- yield res
-
-
+ reader = _get_resource_reader(_get_package(package))
+ return (
+ _path_from_reader(reader, resource)
+ if reader else
+ _common.as_file(files(package).joinpath(_normalize_path(resource)))
+ )
+
+
+@contextmanager
+def _path_from_reader(reader, resource):
+ norm_resource = _normalize_path(resource)
+ with suppress(FileNotFoundError):
+ yield Path(reader.resource_path(norm_resource))
+ return
+ opener_reader = reader.open_resource(norm_resource)
+ with _common._tempfile(opener_reader.read, suffix=norm_resource) as res:
+ yield res
+
+
def is_resource(package: Package, name: str) -> bool:
"""True if 'name' is a resource inside 'package'.
@@ -187,10 +187,10 @@ def is_resource(package: Package, name: str) -> bool:
reader = _get_resource_reader(package)
if reader is not None:
return reader.is_resource(name)
- package_contents = set(contents(package))
+ package_contents = set(contents(package))
if name not in package_contents:
return False
- return (_common.from_package(package) / name).is_file()
+ return (_common.from_package(package) / name).is_file()
def contents(package: Package) -> Iterable[str]:
@@ -205,11 +205,11 @@ def contents(package: Package) -> Iterable[str]:
if reader is not None:
return reader.contents()
# Is the package a namespace package? By definition, namespace packages
- # cannot have resources.
- namespace = (
- package.__spec__.origin is None or
- package.__spec__.origin == 'namespace'
- )
- if namespace or not package.__spec__.has_location:
+ # cannot have resources.
+ namespace = (
+ package.__spec__.origin is None or
+ package.__spec__.origin == 'namespace'
+ )
+ if namespace or not package.__spec__.has_location:
return ()
- return list(item.name for item in _common.from_package(package).iterdir())
+ return list(item.name for item in _common.from_package(package).iterdir())
diff --git a/contrib/tools/python3/src/Lib/importlib/util.py b/contrib/tools/python3/src/Lib/importlib/util.py
index 269a6fa930..07a0f2db28 100644
--- a/contrib/tools/python3/src/Lib/importlib/util.py
+++ b/contrib/tools/python3/src/Lib/importlib/util.py
@@ -29,8 +29,8 @@ def resolve_name(name, package):
if not name.startswith('.'):
return name
elif not package:
- raise ImportError(f'no package specified for {repr(name)} '
- '(required for relative module names)')
+ raise ImportError(f'no package specified for {repr(name)} '
+ '(required for relative module names)')
level = 0
for character in name:
if character != '.':
diff --git a/contrib/tools/python3/src/Lib/inspect.py b/contrib/tools/python3/src/Lib/inspect.py
index ffe0a781f7..626e408e9f 100644
--- a/contrib/tools/python3/src/Lib/inspect.py
+++ b/contrib/tools/python3/src/Lib/inspect.py
@@ -32,7 +32,7 @@ __author__ = ('Ka-Ping Yee <ping@lfw.org>',
'Yury Selivanov <yselivanov@sprymix.com>')
import abc
-import ast
+import ast
import dis
import collections.abc
import enum
@@ -111,7 +111,7 @@ def ismethoddescriptor(object):
def isdatadescriptor(object):
"""Return true if the object is a data descriptor.
- Data descriptors have a __set__ or a __delete__ attribute. Examples are
+ Data descriptors have a __set__ or a __delete__ attribute. Examples are
properties (defined in Python) and getsets and members (defined in C).
Typically, data descriptors will also have __name__ and __doc__ attributes
(properties, getsets, and members have both of these attributes), but this
@@ -120,7 +120,7 @@ def isdatadescriptor(object):
# mutual exclusion
return False
tp = type(object)
- return hasattr(tp, "__set__") or hasattr(tp, "__delete__")
+ return hasattr(tp, "__set__") or hasattr(tp, "__delete__")
if hasattr(types, 'MemberDescriptorType'):
# CPython and equivalent
@@ -169,38 +169,38 @@ def isfunction(object):
__kwdefaults__ dict of keyword only parameters with defaults"""
return isinstance(object, types.FunctionType)
-def _has_code_flag(f, flag):
- """Return true if ``f`` is a function (or a method or functools.partial
- wrapper wrapping a function) whose code object has the given ``flag``
- set in its flags."""
- while ismethod(f):
- f = f.__func__
- f = functools._unwrap_partial(f)
- if not isfunction(f):
- return False
- return bool(f.__code__.co_flags & flag)
-
-def isgeneratorfunction(obj):
+def _has_code_flag(f, flag):
+ """Return true if ``f`` is a function (or a method or functools.partial
+ wrapper wrapping a function) whose code object has the given ``flag``
+ set in its flags."""
+ while ismethod(f):
+ f = f.__func__
+ f = functools._unwrap_partial(f)
+ if not isfunction(f):
+ return False
+ return bool(f.__code__.co_flags & flag)
+
+def isgeneratorfunction(obj):
"""Return true if the object is a user-defined generator function.
Generator function objects provide the same attributes as functions.
See help(isfunction) for a list of attributes."""
- return _has_code_flag(obj, CO_GENERATOR)
+ return _has_code_flag(obj, CO_GENERATOR)
-def iscoroutinefunction(obj):
+def iscoroutinefunction(obj):
"""Return true if the object is a coroutine function.
Coroutine functions are defined with "async def" syntax.
"""
- return _has_code_flag(obj, CO_COROUTINE)
+ return _has_code_flag(obj, CO_COROUTINE)
-def isasyncgenfunction(obj):
+def isasyncgenfunction(obj):
"""Return true if the object is an asynchronous generator function.
Asynchronous generator functions are defined with "async def"
syntax and have "yield" expressions in their body.
"""
- return _has_code_flag(obj, CO_ASYNC_GENERATOR)
+ return _has_code_flag(obj, CO_ASYNC_GENERATOR)
def isasyncgen(object):
"""Return true if the object is an asynchronous generator."""
@@ -273,11 +273,11 @@ def iscode(object):
| 16=nested | 32=generator | 64=nofree | 128=coroutine
| 256=iterable_coroutine | 512=async_generator
co_freevars tuple of names of free variables
- co_posonlyargcount number of positional only arguments
+ co_posonlyargcount number of positional only arguments
co_kwonlyargcount number of keyword only arguments (not including ** arg)
co_lnotab encoded mapping of line numbers to bytecode indices
co_name name with which this code object was defined
- co_names tuple of names other than arguments and function locals
+ co_names tuple of names other than arguments and function locals
co_nlocals number of local variables
co_stacksize virtual machine stack space required
co_varnames tuple of names of arguments and local variables"""
@@ -589,10 +589,10 @@ def _finddoc(obj):
cls = obj.__objclass__
if getattr(cls, name) is not obj:
return None
- if ismemberdescriptor(obj):
- slots = getattr(cls, '__slots__', None)
- if isinstance(slots, dict) and name in slots:
- return slots[name]
+ if ismemberdescriptor(obj):
+ slots = getattr(cls, '__slots__', None)
+ if isinstance(slots, dict) and name in slots:
+ return slots[name]
else:
return None
for base in cls.__mro__:
@@ -660,9 +660,9 @@ def getfile(object):
raise TypeError('{!r} is a built-in module'.format(object))
if isclass(object):
if hasattr(object, '__module__'):
- module = sys.modules.get(object.__module__)
- if getattr(module, '__file__', None) is not None:
- return module.__file__
+ module = sys.modules.get(object.__module__)
+ if getattr(module, '__file__', None) is not None:
+ return module.__file__
raise TypeError('{!r} is a built-in class'.format(object))
if ismethod(object):
object = object.__func__
@@ -736,13 +736,13 @@ def getmodule(object, _filename=None):
# Try the cache again with the absolute file name
try:
file = getabsfile(object, _filename)
- except (TypeError, FileNotFoundError):
+ except (TypeError, FileNotFoundError):
return None
if file in modulesbyfile:
return sys.modules.get(modulesbyfile[file])
# Update the filename to module name cache and check yet again
# Copy sys.modules in order to cope with changes while iterating
- for modname, module in sys.modules.copy().items():
+ for modname, module in sys.modules.copy().items():
if ismodule(module) and hasattr(module, '__file__'):
f = module.__file__
if f == _filesbymodname.get(modname, None):
@@ -770,42 +770,42 @@ def getmodule(object, _filename=None):
if builtinobject is object:
return builtin
-
-class ClassFoundException(Exception):
- pass
-
-
-class _ClassFinder(ast.NodeVisitor):
-
- def __init__(self, qualname):
- self.stack = []
- self.qualname = qualname
-
- def visit_FunctionDef(self, node):
- self.stack.append(node.name)
- self.stack.append('<locals>')
- self.generic_visit(node)
- self.stack.pop()
- self.stack.pop()
-
- visit_AsyncFunctionDef = visit_FunctionDef
-
- def visit_ClassDef(self, node):
- self.stack.append(node.name)
- if self.qualname == '.'.join(self.stack):
- # Return the decorator for the class if present
- if node.decorator_list:
- line_number = node.decorator_list[0].lineno
- else:
- line_number = node.lineno
-
- # decrement by one since lines starts with indexing by zero
- line_number -= 1
- raise ClassFoundException(line_number)
- self.generic_visit(node)
- self.stack.pop()
-
-
+
+class ClassFoundException(Exception):
+ pass
+
+
+class _ClassFinder(ast.NodeVisitor):
+
+ def __init__(self, qualname):
+ self.stack = []
+ self.qualname = qualname
+
+ def visit_FunctionDef(self, node):
+ self.stack.append(node.name)
+ self.stack.append('<locals>')
+ self.generic_visit(node)
+ self.stack.pop()
+ self.stack.pop()
+
+ visit_AsyncFunctionDef = visit_FunctionDef
+
+ def visit_ClassDef(self, node):
+ self.stack.append(node.name)
+ if self.qualname == '.'.join(self.stack):
+ # Return the decorator for the class if present
+ if node.decorator_list:
+ line_number = node.decorator_list[0].lineno
+ else:
+ line_number = node.lineno
+
+ # decrement by one since lines starts with indexing by zero
+ line_number -= 1
+ raise ClassFoundException(line_number)
+ self.generic_visit(node)
+ self.stack.pop()
+
+
def findsource(object):
"""Return the entire source file and starting line number for an object.
@@ -838,15 +838,15 @@ def findsource(object):
return lines, 0
if isclass(object):
- qualname = object.__qualname__
- source = ''.join(lines)
- tree = ast.parse(source)
- class_finder = _ClassFinder(qualname)
- try:
- class_finder.visit(tree)
- except ClassFoundException as e:
- line_number = e.args[0]
- return lines, line_number
+ qualname = object.__qualname__
+ source = ''.join(lines)
+ tree = ast.parse(source)
+ class_finder = _ClassFinder(qualname)
+ try:
+ class_finder.visit(tree)
+ except ClassFoundException as e:
+ line_number = e.args[0]
+ return lines, line_number
else:
raise OSError('could not find class definition')
@@ -864,12 +864,12 @@ def findsource(object):
lnum = object.co_firstlineno - 1
pat = re.compile(r'^(\s*def\s)|(\s*async\s+def\s)|(.*(?<!\w)lambda(:|\s))|^(\s*@)')
while lnum > 0:
- try:
- line = lines[lnum]
- except IndexError:
- raise OSError('lineno is out of bounds')
- if pat.match(line):
- break
+ try:
+ line = lines[lnum]
+ except IndexError:
+ raise OSError('lineno is out of bounds')
+ if pat.match(line):
+ break
lnum = lnum - 1
return lines, lnum
raise OSError('could not find code object')
@@ -931,7 +931,7 @@ class BlockFinder:
self.indecorator = False
self.decoratorhasargs = False
self.last = 1
- self.body_col0 = None
+ self.body_col0 = None
def tokeneater(self, type, token, srowcol, erowcol, line):
if not self.started and not self.indecorator:
@@ -963,8 +963,8 @@ class BlockFinder:
elif self.passline:
pass
elif type == tokenize.INDENT:
- if self.body_col0 is None and self.started:
- self.body_col0 = erowcol[1]
+ if self.body_col0 is None and self.started:
+ self.body_col0 = erowcol[1]
self.indent = self.indent + 1
self.passline = True
elif type == tokenize.DEDENT:
@@ -974,10 +974,10 @@ class BlockFinder:
# not e.g. for "if: else:" or "try: finally:" blocks)
if self.indent <= 0:
raise EndOfBlock
- elif type == tokenize.COMMENT:
- if self.body_col0 is not None and srowcol[1] >= self.body_col0:
- # Include comments if indented at least as much as the block
- self.last = srowcol[0]
+ elif type == tokenize.COMMENT:
+ if self.body_col0 is not None and srowcol[1] >= self.body_col0:
+ # Include comments if indented at least as much as the block
+ self.last = srowcol[0]
elif self.indent == 0 and type not in (tokenize.COMMENT, tokenize.NL):
# any other token on the same indentation level end the previous
# block as well, except the pseudo-tokens COMMENT and NL.
@@ -1049,7 +1049,7 @@ def getclasstree(classes, unique=False):
for c in classes:
if c.__bases__:
for parent in c.__bases__:
- if parent not in children:
+ if parent not in children:
children[parent] = []
if c not in children[parent]:
children[parent].append(c)
@@ -1074,7 +1074,7 @@ def getargs(co):
if not iscode(co):
raise TypeError('{!r} is not a code object'.format(co))
- names = co.co_varnames
+ names = co.co_varnames
nargs = co.co_argcount
nkwargs = co.co_kwonlyargcount
args = list(names[:nargs])
@@ -1089,7 +1089,7 @@ def getargs(co):
varkw = None
if co.co_flags & CO_VARKEYWORDS:
varkw = co.co_varnames[nargs]
- return Arguments(args + kwonlyargs, varargs, varkw)
+ return Arguments(args + kwonlyargs, varargs, varkw)
ArgSpec = namedtuple('ArgSpec', 'args varargs keywords defaults')
@@ -1120,7 +1120,7 @@ def getargspec(func):
getfullargspec(func)
if kwonlyargs or ann:
raise ValueError("Function has keyword-only parameters or annotations"
- ", use inspect.signature() API which can support them")
+ ", use inspect.signature() API which can support them")
return ArgSpec(args, varargs, varkw, defaults)
FullArgSpec = namedtuple('FullArgSpec',
@@ -1173,7 +1173,7 @@ def getfullargspec(func):
args = []
varargs = None
varkw = None
- posonlyargs = []
+ posonlyargs = []
kwonlyargs = []
annotations = {}
defaults = ()
@@ -1187,9 +1187,9 @@ def getfullargspec(func):
name = param.name
if kind is _POSITIONAL_ONLY:
- posonlyargs.append(name)
- if param.default is not param.empty:
- defaults += (param.default,)
+ posonlyargs.append(name)
+ if param.default is not param.empty:
+ defaults += (param.default,)
elif kind is _POSITIONAL_OR_KEYWORD:
args.append(name)
if param.default is not param.empty:
@@ -1214,7 +1214,7 @@ def getfullargspec(func):
# compatibility with 'func.__defaults__'
defaults = None
- return FullArgSpec(posonlyargs + args, varargs, varkw, defaults,
+ return FullArgSpec(posonlyargs + args, varargs, varkw, defaults,
kwonlyargs, kwdefaults, annotations)
@@ -1233,8 +1233,8 @@ def getargvalues(frame):
def formatannotation(annotation, base_module=None):
if getattr(annotation, '__module__', None) == 'typing':
return repr(annotation).replace('typing.', '')
- if isinstance(annotation, types.GenericAlias):
- return str(annotation)
+ if isinstance(annotation, types.GenericAlias):
+ return str(annotation)
if isinstance(annotation, type):
if annotation.__module__ in ('builtins', base_module):
return annotation.__qualname__
@@ -1365,7 +1365,7 @@ def _too_many(f_name, args, kwonly, varargs, defcount, given, values):
(f_name, sig, "s" if plural else "", given, kwonly_sig,
"was" if given == 1 and not kwonly_given else "were"))
-def getcallargs(func, /, *positional, **named):
+def getcallargs(func, /, *positional, **named):
"""Get the mapping of arguments to values.
A dict is returned, with keys the function argument names (including the
@@ -2035,7 +2035,7 @@ def _signature_fromstr(cls, obj, s, skip_bound_arg=True):
def parse_name(node):
assert isinstance(node, ast.arg)
- if node.annotation is not None:
+ if node.annotation is not None:
raise ValueError("Annotations are not currently supported")
return node.arg
@@ -2048,8 +2048,8 @@ def _signature_fromstr(cls, obj, s, skip_bound_arg=True):
except NameError:
raise RuntimeError()
- if isinstance(value, (str, int, float, bytes, bool, type(None))):
- return ast.Constant(value)
+ if isinstance(value, (str, int, float, bytes, bool, type(None))):
+ return ast.Constant(value)
raise RuntimeError()
class RewriteSymbolics(ast.NodeTransformer):
@@ -2149,7 +2149,7 @@ def _signature_from_builtin(cls, func, skip_bound_arg=True):
return _signature_fromstr(cls, func, s, skip_bound_arg)
-def _signature_from_function(cls, func, skip_bound_arg=True):
+def _signature_from_function(cls, func, skip_bound_arg=True):
"""Private helper: constructs Signature for the given python function."""
is_duck_function = False
@@ -2161,20 +2161,20 @@ def _signature_from_function(cls, func, skip_bound_arg=True):
# of pure function:
raise TypeError('{!r} is not a Python function'.format(func))
- s = getattr(func, "__text_signature__", None)
- if s:
- return _signature_fromstr(cls, func, s, skip_bound_arg)
-
+ s = getattr(func, "__text_signature__", None)
+ if s:
+ return _signature_fromstr(cls, func, s, skip_bound_arg)
+
Parameter = cls._parameter_cls
# Parameter information.
func_code = func.__code__
pos_count = func_code.co_argcount
arg_names = func_code.co_varnames
- posonly_count = func_code.co_posonlyargcount
- positional = arg_names[:pos_count]
+ posonly_count = func_code.co_posonlyargcount
+ positional = arg_names[:pos_count]
keyword_only_count = func_code.co_kwonlyargcount
- keyword_only = arg_names[pos_count:pos_count + keyword_only_count]
+ keyword_only = arg_names[pos_count:pos_count + keyword_only_count]
annotations = func.__annotations__
defaults = func.__defaults__
kwdefaults = func.__kwdefaults__
@@ -2186,27 +2186,27 @@ def _signature_from_function(cls, func, skip_bound_arg=True):
parameters = []
- non_default_count = pos_count - pos_default_count
- posonly_left = posonly_count
-
+ non_default_count = pos_count - pos_default_count
+ posonly_left = posonly_count
+
# Non-keyword-only parameters w/o defaults.
for name in positional[:non_default_count]:
- kind = _POSITIONAL_ONLY if posonly_left else _POSITIONAL_OR_KEYWORD
+ kind = _POSITIONAL_ONLY if posonly_left else _POSITIONAL_OR_KEYWORD
annotation = annotations.get(name, _empty)
parameters.append(Parameter(name, annotation=annotation,
- kind=kind))
- if posonly_left:
- posonly_left -= 1
+ kind=kind))
+ if posonly_left:
+ posonly_left -= 1
# ... w/ defaults.
for offset, name in enumerate(positional[non_default_count:]):
- kind = _POSITIONAL_ONLY if posonly_left else _POSITIONAL_OR_KEYWORD
+ kind = _POSITIONAL_ONLY if posonly_left else _POSITIONAL_OR_KEYWORD
annotation = annotations.get(name, _empty)
parameters.append(Parameter(name, annotation=annotation,
- kind=kind,
+ kind=kind,
default=defaults[offset]))
- if posonly_left:
- posonly_left -= 1
+ if posonly_left:
+ posonly_left -= 1
# *args
if func_code.co_flags & CO_VARARGS:
@@ -2252,18 +2252,18 @@ def _signature_from_callable(obj, *,
callable objects.
"""
- _get_signature_of = functools.partial(_signature_from_callable,
- follow_wrapper_chains=follow_wrapper_chains,
- skip_bound_arg=skip_bound_arg,
- sigcls=sigcls)
-
+ _get_signature_of = functools.partial(_signature_from_callable,
+ follow_wrapper_chains=follow_wrapper_chains,
+ skip_bound_arg=skip_bound_arg,
+ sigcls=sigcls)
+
if not callable(obj):
raise TypeError('{!r} is not a callable object'.format(obj))
if isinstance(obj, types.MethodType):
# In this case we skip the first parameter of the underlying
# function (usually `self` or `cls`).
- sig = _get_signature_of(obj.__func__)
+ sig = _get_signature_of(obj.__func__)
if skip_bound_arg:
return _signature_bound_method(sig)
@@ -2277,7 +2277,7 @@ def _signature_from_callable(obj, *,
# If the unwrapped object is a *method*, we might want to
# skip its first parameter (self).
# See test_signature_wrapped_bound_method for details.
- return _get_signature_of(obj)
+ return _get_signature_of(obj)
try:
sig = obj.__signature__
@@ -2304,7 +2304,7 @@ def _signature_from_callable(obj, *,
# (usually `self`, or `cls`) will not be passed
# automatically (as for boundmethods)
- wrapped_sig = _get_signature_of(partialmethod.func)
+ wrapped_sig = _get_signature_of(partialmethod.func)
sig = _signature_get_partial(wrapped_sig, partialmethod, (None,))
first_wrapped_param = tuple(wrapped_sig.parameters.values())[0]
@@ -2322,15 +2322,15 @@ def _signature_from_callable(obj, *,
if isfunction(obj) or _signature_is_functionlike(obj):
# If it's a pure Python function, or an object that is duck type
# of a Python function (Cython functions, for instance), then:
- return _signature_from_function(sigcls, obj,
- skip_bound_arg=skip_bound_arg)
+ return _signature_from_function(sigcls, obj,
+ skip_bound_arg=skip_bound_arg)
if _signature_is_builtin(obj):
return _signature_from_builtin(sigcls, obj,
skip_bound_arg=skip_bound_arg)
if isinstance(obj, functools.partial):
- wrapped_sig = _get_signature_of(obj.func)
+ wrapped_sig = _get_signature_of(obj.func)
return _signature_get_partial(wrapped_sig, obj)
sig = None
@@ -2341,26 +2341,26 @@ def _signature_from_callable(obj, *,
# in its metaclass
call = _signature_get_user_defined_method(type(obj), '__call__')
if call is not None:
- sig = _get_signature_of(call)
+ sig = _get_signature_of(call)
else:
- factory_method = None
+ factory_method = None
new = _signature_get_user_defined_method(obj, '__new__')
- init = _signature_get_user_defined_method(obj, '__init__')
- # Now we check if the 'obj' class has an own '__new__' method
- if '__new__' in obj.__dict__:
- factory_method = new
- # or an own '__init__' method
- elif '__init__' in obj.__dict__:
- factory_method = init
- # If not, we take inherited '__new__' or '__init__', if present
- elif new is not None:
- factory_method = new
- elif init is not None:
- factory_method = init
-
- if factory_method is not None:
- sig = _get_signature_of(factory_method)
-
+ init = _signature_get_user_defined_method(obj, '__init__')
+ # Now we check if the 'obj' class has an own '__new__' method
+ if '__new__' in obj.__dict__:
+ factory_method = new
+ # or an own '__init__' method
+ elif '__init__' in obj.__dict__:
+ factory_method = init
+ # If not, we take inherited '__new__' or '__init__', if present
+ elif new is not None:
+ factory_method = new
+ elif init is not None:
+ factory_method = init
+
+ if factory_method is not None:
+ sig = _get_signature_of(factory_method)
+
if sig is None:
# At this point we know, that `obj` is a class, with no user-
# defined '__init__', '__new__', or class-level '__call__'
@@ -2392,7 +2392,7 @@ def _signature_from_callable(obj, *,
if (obj.__init__ is object.__init__ and
obj.__new__ is object.__new__):
# Return a signature of 'object' builtin.
- return sigcls.from_callable(object)
+ return sigcls.from_callable(object)
else:
raise ValueError(
'no signature found for builtin type {!r}'.format(obj))
@@ -2405,7 +2405,7 @@ def _signature_from_callable(obj, *,
call = _signature_get_user_defined_method(type(obj), '__call__')
if call is not None:
try:
- sig = _get_signature_of(call)
+ sig = _get_signature_of(call)
except ValueError as ex:
msg = 'no signature found for {!r}'.format(obj)
raise ValueError(msg) from ex
@@ -2444,9 +2444,9 @@ class _ParameterKind(enum.IntEnum):
def __str__(self):
return self._name_
- @property
- def description(self):
- return _PARAM_NAME_MAPPING[self]
+ @property
+ def description(self):
+ return _PARAM_NAME_MAPPING[self]
_POSITIONAL_ONLY = _ParameterKind.POSITIONAL_ONLY
_POSITIONAL_OR_KEYWORD = _ParameterKind.POSITIONAL_OR_KEYWORD
@@ -2503,7 +2503,7 @@ class Parameter:
if default is not _empty:
if self._kind in (_VAR_POSITIONAL, _VAR_KEYWORD):
msg = '{} parameters cannot have default values'
- msg = msg.format(self._kind.description)
+ msg = msg.format(self._kind.description)
raise ValueError(msg)
self._default = default
self._annotation = annotation
@@ -2525,7 +2525,7 @@ class Parameter:
'implicit arguments must be passed as '
'positional or keyword arguments, not {}'
)
- msg = msg.format(self._kind.description)
+ msg = msg.format(self._kind.description)
raise ValueError(msg)
self._kind = _POSITIONAL_ONLY
name = 'implicit{}'.format(name[1:])
@@ -2624,7 +2624,7 @@ class BoundArguments:
Has the following public attributes:
- * arguments : dict
+ * arguments : dict
An ordered mutable mapping of parameters' names to arguments' values.
Does not contain arguments' default values.
* signature : Signature
@@ -2724,7 +2724,7 @@ class BoundArguments:
# Signature.bind_partial().
continue
new_arguments.append((name, val))
- self.arguments = dict(new_arguments)
+ self.arguments = dict(new_arguments)
def __eq__(self, other):
if self is other:
@@ -2792,7 +2792,7 @@ class Signature:
top_kind = _POSITIONAL_ONLY
kind_defaults = False
- for param in parameters:
+ for param in parameters:
kind = param.kind
name = param.name
@@ -2801,8 +2801,8 @@ class Signature:
'wrong parameter order: {} parameter before {} '
'parameter'
)
- msg = msg.format(top_kind.description,
- kind.description)
+ msg = msg.format(top_kind.description,
+ kind.description)
raise ValueError(msg)
elif kind > top_kind:
kind_defaults = False
@@ -2827,7 +2827,7 @@ class Signature:
params[name] = param
else:
- params = OrderedDict((param.name, param) for param in parameters)
+ params = OrderedDict((param.name, param) for param in parameters)
self._parameters = types.MappingProxyType(params)
self._return_annotation = return_annotation
@@ -2909,7 +2909,7 @@ class Signature:
def _bind(self, args, kwargs, *, partial=False):
"""Private method. Don't use directly."""
- arguments = {}
+ arguments = {}
parameters = iter(self.parameters.values())
parameters_ex = ()
@@ -2980,7 +2980,7 @@ class Signature:
arguments[param.name] = tuple(values)
break
- if param.name in kwargs and param.kind != _POSITIONAL_ONLY:
+ if param.name in kwargs and param.kind != _POSITIONAL_ONLY:
raise TypeError(
'multiple values for argument {arg!r}'.format(
arg=param.name)) from None
@@ -3037,19 +3037,19 @@ class Signature:
return self._bound_arguments_cls(self, arguments)
- def bind(self, /, *args, **kwargs):
+ def bind(self, /, *args, **kwargs):
"""Get a BoundArguments object, that maps the passed `args`
and `kwargs` to the function's signature. Raises `TypeError`
if the passed arguments can not be bound.
"""
- return self._bind(args, kwargs)
+ return self._bind(args, kwargs)
- def bind_partial(self, /, *args, **kwargs):
+ def bind_partial(self, /, *args, **kwargs):
"""Get a BoundArguments object, that partially maps the
passed `args` and `kwargs` to the function's signature.
Raises `TypeError` if the passed arguments can not be bound.
"""
- return self._bind(args, kwargs, partial=True)
+ return self._bind(args, kwargs, partial=True)
def __reduce__(self):
return (type(self),
@@ -3138,7 +3138,7 @@ def _main():
type(exc).__name__,
exc)
print(msg, file=sys.stderr)
- sys.exit(2)
+ sys.exit(2)
if has_attrs:
parts = attrs.split(".")
@@ -3148,7 +3148,7 @@ def _main():
if module.__name__ in sys.builtin_module_names:
print("Can't get info for builtin modules.", file=sys.stderr)
- sys.exit(1)
+ sys.exit(1)
if args.details:
print('Target: {}'.format(target))
diff --git a/contrib/tools/python3/src/Lib/io.py b/contrib/tools/python3/src/Lib/io.py
index fbce6efc01..47afaeb664 100644
--- a/contrib/tools/python3/src/Lib/io.py
+++ b/contrib/tools/python3/src/Lib/io.py
@@ -41,8 +41,8 @@ __author__ = ("Guido van Rossum <guido@python.org>, "
"Amaury Forgeot d'Arc <amauryfa@gmail.com>, "
"Benjamin Peterson <benjamin@python.org>")
-__all__ = ["BlockingIOError", "open", "open_code", "IOBase", "RawIOBase",
- "FileIO", "BytesIO", "StringIO", "BufferedIOBase",
+__all__ = ["BlockingIOError", "open", "open_code", "IOBase", "RawIOBase",
+ "FileIO", "BytesIO", "StringIO", "BufferedIOBase",
"BufferedReader", "BufferedWriter", "BufferedRWPair",
"BufferedRandom", "TextIOBase", "TextIOWrapper",
"UnsupportedOperation", "SEEK_SET", "SEEK_CUR", "SEEK_END"]
@@ -52,7 +52,7 @@ import _io
import abc
from _io import (DEFAULT_BUFFER_SIZE, BlockingIOError, UnsupportedOperation,
- open, open_code, FileIO, BytesIO, StringIO, BufferedReader,
+ open, open_code, FileIO, BytesIO, StringIO, BufferedReader,
BufferedWriter, BufferedRWPair, BufferedRandom,
IncrementalNewlineDecoder, TextIOWrapper)
diff --git a/contrib/tools/python3/src/Lib/ipaddress.py b/contrib/tools/python3/src/Lib/ipaddress.py
index 6cb92ed552..d61cc08a67 100644
--- a/contrib/tools/python3/src/Lib/ipaddress.py
+++ b/contrib/tools/python3/src/Lib/ipaddress.py
@@ -488,7 +488,7 @@ class _IPAddressBase:
"""
# int allows a leading +/- as well as surrounding whitespace,
# so we ensure that isn't the case
- if not (prefixlen_str.isascii() and prefixlen_str.isdigit()):
+ if not (prefixlen_str.isascii() and prefixlen_str.isdigit()):
cls._report_invalid_netmask(prefixlen_str)
try:
prefixlen = int(prefixlen_str)
@@ -532,36 +532,36 @@ class _IPAddressBase:
except ValueError:
cls._report_invalid_netmask(ip_str)
- @classmethod
- def _split_addr_prefix(cls, address):
- """Helper function to parse address of Network/Interface.
-
- Arg:
- address: Argument of Network/Interface.
-
- Returns:
- (addr, prefix) tuple.
- """
- # a packed address or integer
- if isinstance(address, (bytes, int)):
- return address, cls._max_prefixlen
-
- if not isinstance(address, tuple):
- # Assume input argument to be string or any object representation
- # which converts into a formatted IP prefix string.
- address = _split_optional_netmask(address)
-
- # Constructing from a tuple (addr, [mask])
- if len(address) > 1:
- return address
- return address[0], cls._max_prefixlen
-
+ @classmethod
+ def _split_addr_prefix(cls, address):
+ """Helper function to parse address of Network/Interface.
+
+ Arg:
+ address: Argument of Network/Interface.
+
+ Returns:
+ (addr, prefix) tuple.
+ """
+ # a packed address or integer
+ if isinstance(address, (bytes, int)):
+ return address, cls._max_prefixlen
+
+ if not isinstance(address, tuple):
+ # Assume input argument to be string or any object representation
+ # which converts into a formatted IP prefix string.
+ address = _split_optional_netmask(address)
+
+ # Constructing from a tuple (addr, [mask])
+ if len(address) > 1:
+ return address
+ return address[0], cls._max_prefixlen
+
def __reduce__(self):
return self.__class__, (str(self),)
-_address_fmt_re = None
-
+_address_fmt_re = None
+
@functools.total_ordering
class _BaseAddress(_IPAddressBase):
@@ -620,56 +620,56 @@ class _BaseAddress(_IPAddressBase):
def __reduce__(self):
return self.__class__, (self._ip,)
- def __format__(self, fmt):
- """Returns an IP address as a formatted string.
-
- Supported presentation types are:
- 's': returns the IP address as a string (default)
- 'b': converts to binary and returns a zero-padded string
- 'X' or 'x': converts to upper- or lower-case hex and returns a zero-padded string
- 'n': the same as 'b' for IPv4 and 'x' for IPv6
-
- For binary and hex presentation types, the alternate form specifier
- '#' and the grouping option '_' are supported.
- """
-
- # Support string formatting
- if not fmt or fmt[-1] == 's':
- return format(str(self), fmt)
-
- # From here on down, support for 'bnXx'
- global _address_fmt_re
- if _address_fmt_re is None:
- import re
- _address_fmt_re = re.compile('(#?)(_?)([xbnX])')
-
- m = _address_fmt_re.fullmatch(fmt)
- if not m:
- return super().__format__(fmt)
-
- alternate, grouping, fmt_base = m.groups()
-
- # Set some defaults
- if fmt_base == 'n':
- if self._version == 4:
- fmt_base = 'b' # Binary is default for ipv4
- else:
- fmt_base = 'x' # Hex is default for ipv6
-
- if fmt_base == 'b':
- padlen = self._max_prefixlen
- else:
- padlen = self._max_prefixlen // 4
-
- if grouping:
- padlen += padlen // 4 - 1
-
- if alternate:
- padlen += 2 # 0b or 0x
-
- return format(int(self), f'{alternate}0{padlen}{grouping}{fmt_base}')
-
-
+ def __format__(self, fmt):
+ """Returns an IP address as a formatted string.
+
+ Supported presentation types are:
+ 's': returns the IP address as a string (default)
+ 'b': converts to binary and returns a zero-padded string
+ 'X' or 'x': converts to upper- or lower-case hex and returns a zero-padded string
+ 'n': the same as 'b' for IPv4 and 'x' for IPv6
+
+ For binary and hex presentation types, the alternate form specifier
+ '#' and the grouping option '_' are supported.
+ """
+
+ # Support string formatting
+ if not fmt or fmt[-1] == 's':
+ return format(str(self), fmt)
+
+ # From here on down, support for 'bnXx'
+ global _address_fmt_re
+ if _address_fmt_re is None:
+ import re
+ _address_fmt_re = re.compile('(#?)(_?)([xbnX])')
+
+ m = _address_fmt_re.fullmatch(fmt)
+ if not m:
+ return super().__format__(fmt)
+
+ alternate, grouping, fmt_base = m.groups()
+
+ # Set some defaults
+ if fmt_base == 'n':
+ if self._version == 4:
+ fmt_base = 'b' # Binary is default for ipv4
+ else:
+ fmt_base = 'x' # Hex is default for ipv6
+
+ if fmt_base == 'b':
+ padlen = self._max_prefixlen
+ else:
+ padlen = self._max_prefixlen // 4
+
+ if grouping:
+ padlen += padlen // 4 - 1
+
+ if alternate:
+ padlen += 2 # 0b or 0x
+
+ return format(int(self), f'{alternate}0{padlen}{grouping}{fmt_base}')
+
+
@functools.total_ordering
class _BaseNetwork(_IPAddressBase):
"""A generic IP network object.
@@ -748,7 +748,7 @@ class _BaseNetwork(_IPAddressBase):
# dealing with another address
else:
# address
- return other._ip & self.netmask._ip == self.network_address._ip
+ return other._ip & self.netmask._ip == self.network_address._ip
def overlaps(self, other):
"""Tell if self is partly contained in other."""
@@ -757,14 +757,14 @@ class _BaseNetwork(_IPAddressBase):
other.network_address in self or (
other.broadcast_address in self)))
- @functools.cached_property
+ @functools.cached_property
def broadcast_address(self):
- return self._address_class(int(self.network_address) |
- int(self.hostmask))
+ return self._address_class(int(self.network_address) |
+ int(self.hostmask))
- @functools.cached_property
+ @functools.cached_property
def hostmask(self):
- return self._address_class(int(self.netmask) ^ self._ALL_ONES)
+ return self._address_class(int(self.netmask) ^ self._ALL_ONES)
@property
def with_prefixlen(self):
@@ -1158,8 +1158,8 @@ class _BaseV4:
if arg not in cls._netmask_cache:
if isinstance(arg, int):
prefixlen = arg
- if not (0 <= prefixlen <= cls._max_prefixlen):
- cls._report_invalid_netmask(prefixlen)
+ if not (0 <= prefixlen <= cls._max_prefixlen):
+ cls._report_invalid_netmask(prefixlen)
else:
try:
# Check for a netmask in prefix length form
@@ -1215,7 +1215,7 @@ class _BaseV4:
if not octet_str:
raise ValueError("Empty octet not permitted")
# Whitelist the characters, since int() allows a lot of bizarre stuff.
- if not (octet_str.isascii() and octet_str.isdigit()):
+ if not (octet_str.isascii() and octet_str.isdigit()):
msg = "Only decimal digits permitted in %r"
raise ValueError(msg % octet_str)
# We do the length check second, since the invalid character error
@@ -1223,11 +1223,11 @@ class _BaseV4:
if len(octet_str) > 3:
msg = "At most 3 characters permitted in %r"
raise ValueError(msg % octet_str)
- # Handle leading zeros as strict as glibc's inet_pton()
- # See security bug bpo-36384
- if octet_str != '0' and octet_str[0] == '0':
- msg = "Leading zeros are not permitted in %r"
- raise ValueError(msg % octet_str)
+ # Handle leading zeros as strict as glibc's inet_pton()
+ # See security bug bpo-36384
+ if octet_str != '0' and octet_str[0] == '0':
+ msg = "Leading zeros are not permitted in %r"
+ raise ValueError(msg % octet_str)
# Convert to integer (we know digits are legal)
octet_int = int(octet_str, 10)
if octet_int > 255:
@@ -1385,24 +1385,24 @@ class IPv4Address(_BaseV4, _BaseAddress):
class IPv4Interface(IPv4Address):
def __init__(self, address):
- addr, mask = self._split_addr_prefix(address)
+ addr, mask = self._split_addr_prefix(address)
- IPv4Address.__init__(self, addr)
- self.network = IPv4Network((addr, mask), strict=False)
+ IPv4Address.__init__(self, addr)
+ self.network = IPv4Network((addr, mask), strict=False)
self.netmask = self.network.netmask
- self._prefixlen = self.network._prefixlen
-
- @functools.cached_property
- def hostmask(self):
- return self.network.hostmask
+ self._prefixlen = self.network._prefixlen
+ @functools.cached_property
+ def hostmask(self):
+ return self.network.hostmask
+
def __str__(self):
return '%s/%d' % (self._string_from_ip_int(self._ip),
- self._prefixlen)
+ self._prefixlen)
def __eq__(self, other):
address_equal = IPv4Address.__eq__(self, other)
- if address_equal is NotImplemented or not address_equal:
+ if address_equal is NotImplemented or not address_equal:
return address_equal
try:
return self.network == other.network
@@ -1425,7 +1425,7 @@ class IPv4Interface(IPv4Address):
return False
def __hash__(self):
- return hash((self._ip, self._prefixlen, int(self.network.network_address)))
+ return hash((self._ip, self._prefixlen, int(self.network.network_address)))
__reduce__ = _IPAddressBase.__reduce__
@@ -1471,7 +1471,7 @@ class IPv4Network(_BaseV4, _BaseNetwork):
address: A string or integer representing the IP [& network].
'192.0.2.0/24'
'192.0.2.0/255.255.255.0'
- '192.0.2.0/0.0.0.255'
+ '192.0.2.0/0.0.0.255'
are all functionally the same in IPv4. Similarly,
'192.0.2.1'
'192.0.2.1/255.255.255.255'
@@ -1499,7 +1499,7 @@ class IPv4Network(_BaseV4, _BaseNetwork):
ValueError: If strict is True and a network address is not
supplied.
"""
- addr, mask = self._split_addr_prefix(address)
+ addr, mask = self._split_addr_prefix(address)
self.network_address = IPv4Address(addr)
self.netmask, self._prefixlen = self._make_netmask(mask)
@@ -1513,8 +1513,8 @@ class IPv4Network(_BaseV4, _BaseNetwork):
if self._prefixlen == (self._max_prefixlen - 1):
self.hosts = self.__iter__
- elif self._prefixlen == (self._max_prefixlen):
- self.hosts = lambda: [IPv4Address(addr)]
+ elif self._prefixlen == (self._max_prefixlen):
+ self.hosts = lambda: [IPv4Address(addr)]
@property
@functools.lru_cache()
@@ -1597,8 +1597,8 @@ class _BaseV6:
if arg not in cls._netmask_cache:
if isinstance(arg, int):
prefixlen = arg
- if not (0 <= prefixlen <= cls._max_prefixlen):
- cls._report_invalid_netmask(prefixlen)
+ if not (0 <= prefixlen <= cls._max_prefixlen):
+ cls._report_invalid_netmask(prefixlen)
else:
prefixlen = cls._prefix_from_prefix_string(arg)
netmask = IPv6Address(cls._ip_int_from_prefix(prefixlen))
@@ -1842,26 +1842,26 @@ class _BaseV6:
reverse_chars = self.exploded[::-1].replace(':', '')
return '.'.join(reverse_chars) + '.ip6.arpa'
- @staticmethod
- def _split_scope_id(ip_str):
- """Helper function to parse IPv6 string address with scope id.
-
- See RFC 4007 for details.
-
- Args:
- ip_str: A string, the IPv6 address.
-
- Returns:
- (addr, scope_id) tuple.
-
- """
- addr, sep, scope_id = ip_str.partition('%')
- if not sep:
- scope_id = None
- elif not scope_id or '%' in scope_id:
- raise AddressValueError('Invalid IPv6 address: "%r"' % ip_str)
- return addr, scope_id
-
+ @staticmethod
+ def _split_scope_id(ip_str):
+ """Helper function to parse IPv6 string address with scope id.
+
+ See RFC 4007 for details.
+
+ Args:
+ ip_str: A string, the IPv6 address.
+
+ Returns:
+ (addr, scope_id) tuple.
+
+ """
+ addr, sep, scope_id = ip_str.partition('%')
+ if not sep:
+ scope_id = None
+ elif not scope_id or '%' in scope_id:
+ raise AddressValueError('Invalid IPv6 address: "%r"' % ip_str)
+ return addr, scope_id
+
@property
def max_prefixlen(self):
return self._max_prefixlen
@@ -1875,7 +1875,7 @@ class IPv6Address(_BaseV6, _BaseAddress):
"""Represent and manipulate single IPv6 Addresses."""
- __slots__ = ('_ip', '_scope_id', '__weakref__')
+ __slots__ = ('_ip', '_scope_id', '__weakref__')
def __init__(self, address):
"""Instantiate a new IPv6 address object.
@@ -1898,14 +1898,14 @@ class IPv6Address(_BaseV6, _BaseAddress):
if isinstance(address, int):
self._check_int_address(address)
self._ip = address
- self._scope_id = None
+ self._scope_id = None
return
# Constructing from a packed address
if isinstance(address, bytes):
self._check_packed_address(address, 16)
self._ip = int.from_bytes(address, 'big')
- self._scope_id = None
+ self._scope_id = None
return
# Assume input argument to be string or any object representation
@@ -1913,38 +1913,38 @@ class IPv6Address(_BaseV6, _BaseAddress):
addr_str = str(address)
if '/' in addr_str:
raise AddressValueError("Unexpected '/' in %r" % address)
- addr_str, self._scope_id = self._split_scope_id(addr_str)
-
+ addr_str, self._scope_id = self._split_scope_id(addr_str)
+
self._ip = self._ip_int_from_string(addr_str)
- def __str__(self):
- ip_str = super().__str__()
- return ip_str + '%' + self._scope_id if self._scope_id else ip_str
-
- def __hash__(self):
- return hash((self._ip, self._scope_id))
-
- def __eq__(self, other):
- address_equal = super().__eq__(other)
- if address_equal is NotImplemented:
- return NotImplemented
- if not address_equal:
- return False
- return self._scope_id == getattr(other, '_scope_id', None)
-
- @property
- def scope_id(self):
- """Identifier of a particular zone of the address's scope.
-
- See RFC 4007 for details.
-
- Returns:
- A string identifying the zone of the address if specified, else None.
-
- """
- return self._scope_id
-
+ def __str__(self):
+ ip_str = super().__str__()
+ return ip_str + '%' + self._scope_id if self._scope_id else ip_str
+
+ def __hash__(self):
+ return hash((self._ip, self._scope_id))
+
+ def __eq__(self, other):
+ address_equal = super().__eq__(other)
+ if address_equal is NotImplemented:
+ return NotImplemented
+ if not address_equal:
+ return False
+ return self._scope_id == getattr(other, '_scope_id', None)
+
@property
+ def scope_id(self):
+ """Identifier of a particular zone of the address's scope.
+
+ See RFC 4007 for details.
+
+ Returns:
+ A string identifying the zone of the address if specified, else None.
+
+ """
+ return self._scope_id
+
+ @property
def packed(self):
"""The binary representation of this address."""
return v6_int_to_packed(self._ip)
@@ -2085,24 +2085,24 @@ class IPv6Address(_BaseV6, _BaseAddress):
class IPv6Interface(IPv6Address):
def __init__(self, address):
- addr, mask = self._split_addr_prefix(address)
+ addr, mask = self._split_addr_prefix(address)
- IPv6Address.__init__(self, addr)
- self.network = IPv6Network((addr, mask), strict=False)
+ IPv6Address.__init__(self, addr)
+ self.network = IPv6Network((addr, mask), strict=False)
self.netmask = self.network.netmask
self._prefixlen = self.network._prefixlen
- @functools.cached_property
- def hostmask(self):
- return self.network.hostmask
-
+ @functools.cached_property
+ def hostmask(self):
+ return self.network.hostmask
+
def __str__(self):
- return '%s/%d' % (super().__str__(),
- self._prefixlen)
+ return '%s/%d' % (super().__str__(),
+ self._prefixlen)
def __eq__(self, other):
address_equal = IPv6Address.__eq__(self, other)
- if address_equal is NotImplemented or not address_equal:
+ if address_equal is NotImplemented or not address_equal:
return address_equal
try:
return self.network == other.network
@@ -2115,7 +2115,7 @@ class IPv6Interface(IPv6Address):
def __lt__(self, other):
address_less = IPv6Address.__lt__(self, other)
if address_less is NotImplemented:
- return address_less
+ return address_less
try:
return (self.network < other.network or
self.network == other.network and address_less)
@@ -2125,7 +2125,7 @@ class IPv6Interface(IPv6Address):
return False
def __hash__(self):
- return hash((self._ip, self._prefixlen, int(self.network.network_address)))
+ return hash((self._ip, self._prefixlen, int(self.network.network_address)))
__reduce__ = _IPAddressBase.__reduce__
@@ -2204,7 +2204,7 @@ class IPv6Network(_BaseV6, _BaseNetwork):
ValueError: If strict was True and a network address was not
supplied.
"""
- addr, mask = self._split_addr_prefix(address)
+ addr, mask = self._split_addr_prefix(address)
self.network_address = IPv6Address(addr)
self.netmask, self._prefixlen = self._make_netmask(mask)
@@ -2218,8 +2218,8 @@ class IPv6Network(_BaseV6, _BaseNetwork):
if self._prefixlen == (self._max_prefixlen - 1):
self.hosts = self.__iter__
- elif self._prefixlen == self._max_prefixlen:
- self.hosts = lambda: [IPv6Address(addr)]
+ elif self._prefixlen == self._max_prefixlen:
+ self.hosts = lambda: [IPv6Address(addr)]
def hosts(self):
"""Generate Iterator over usable hosts in a network.
diff --git a/contrib/tools/python3/src/Lib/json/__init__.py b/contrib/tools/python3/src/Lib/json/__init__.py
index e4c21daaf3..f7e69c9da4 100644
--- a/contrib/tools/python3/src/Lib/json/__init__.py
+++ b/contrib/tools/python3/src/Lib/json/__init__.py
@@ -133,7 +133,7 @@ def dump(obj, fp, *, skipkeys=False, ensure_ascii=True, check_circular=True,
If ``check_circular`` is false, then the circular reference check
for container types will be skipped and a circular reference will
- result in an ``RecursionError`` (or worse).
+ result in an ``RecursionError`` (or worse).
If ``allow_nan`` is false, then it will be a ``ValueError`` to
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``)
@@ -195,7 +195,7 @@ def dumps(obj, *, skipkeys=False, ensure_ascii=True, check_circular=True,
If ``check_circular`` is false, then the circular reference check
for container types will be skipped and a circular reference will
- result in an ``RecursionError`` (or worse).
+ result in an ``RecursionError`` (or worse).
If ``allow_nan`` is false, then it will be a ``ValueError`` to
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in
@@ -296,7 +296,7 @@ def load(fp, *, cls=None, object_hook=None, parse_float=None,
parse_constant=parse_constant, object_pairs_hook=object_pairs_hook, **kw)
-def loads(s, *, cls=None, object_hook=None, parse_float=None,
+def loads(s, *, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, object_pairs_hook=None, **kw):
"""Deserialize ``s`` (a ``str``, ``bytes`` or ``bytearray`` instance
containing a JSON document) to a Python object.
diff --git a/contrib/tools/python3/src/Lib/json/encoder.py b/contrib/tools/python3/src/Lib/json/encoder.py
index 21bff2c1a1..e6b844f161 100644
--- a/contrib/tools/python3/src/Lib/json/encoder.py
+++ b/contrib/tools/python3/src/Lib/json/encoder.py
@@ -116,7 +116,7 @@ class JSONEncoder(object):
If check_circular is true, then lists, dicts, and custom encoded
objects will be checked for circular references during encoding to
- prevent an infinite recursion (which would cause an RecursionError).
+ prevent an infinite recursion (which would cause an RecursionError).
Otherwise, no such check takes place.
If allow_nan is true, then NaN, Infinity, and -Infinity will be
@@ -268,7 +268,7 @@ def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
list=list,
str=str,
tuple=tuple,
- _intstr=int.__repr__,
+ _intstr=int.__repr__,
):
if _indent is not None and not isinstance(_indent, str):
@@ -307,7 +307,7 @@ def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
elif value is False:
yield buf + 'false'
elif isinstance(value, int):
- # Subclasses of int/float may override __repr__, but we still
+ # Subclasses of int/float may override __repr__, but we still
# want to encode them as integers/floats in JSON. One example
# within the standard library is IntEnum.
yield buf + _intstr(value)
@@ -350,7 +350,7 @@ def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
item_separator = _item_separator
first = True
if _sort_keys:
- items = sorted(dct.items())
+ items = sorted(dct.items())
else:
items = dct.items()
for key, value in items:
diff --git a/contrib/tools/python3/src/Lib/json/tool.py b/contrib/tools/python3/src/Lib/json/tool.py
index 0490b8c0be..d204f960f5 100644
--- a/contrib/tools/python3/src/Lib/json/tool.py
+++ b/contrib/tools/python3/src/Lib/json/tool.py
@@ -13,7 +13,7 @@ Usage::
import argparse
import json
import sys
-from pathlib import Path
+from pathlib import Path
def main():
@@ -21,65 +21,65 @@ def main():
description = ('A simple command line interface for json module '
'to validate and pretty-print JSON objects.')
parser = argparse.ArgumentParser(prog=prog, description=description)
- parser.add_argument('infile', nargs='?',
- type=argparse.FileType(encoding="utf-8"),
- help='a JSON file to be validated or pretty-printed',
- default=sys.stdin)
- parser.add_argument('outfile', nargs='?',
- type=Path,
- help='write the output of infile to outfile',
- default=None)
+ parser.add_argument('infile', nargs='?',
+ type=argparse.FileType(encoding="utf-8"),
+ help='a JSON file to be validated or pretty-printed',
+ default=sys.stdin)
+ parser.add_argument('outfile', nargs='?',
+ type=Path,
+ help='write the output of infile to outfile',
+ default=None)
parser.add_argument('--sort-keys', action='store_true', default=False,
help='sort the output of dictionaries alphabetically by key')
- parser.add_argument('--no-ensure-ascii', dest='ensure_ascii', action='store_false',
- help='disable escaping of non-ASCII characters')
- parser.add_argument('--json-lines', action='store_true', default=False,
- help='parse input using the JSON Lines format. '
- 'Use with --no-indent or --compact to produce valid JSON Lines output.')
- group = parser.add_mutually_exclusive_group()
- group.add_argument('--indent', default=4, type=int,
- help='separate items with newlines and use this number '
- 'of spaces for indentation')
- group.add_argument('--tab', action='store_const', dest='indent',
- const='\t', help='separate items with newlines and use '
- 'tabs for indentation')
- group.add_argument('--no-indent', action='store_const', dest='indent',
- const=None,
- help='separate items with spaces rather than newlines')
- group.add_argument('--compact', action='store_true',
- help='suppress all whitespace separation (most compact)')
+ parser.add_argument('--no-ensure-ascii', dest='ensure_ascii', action='store_false',
+ help='disable escaping of non-ASCII characters')
+ parser.add_argument('--json-lines', action='store_true', default=False,
+ help='parse input using the JSON Lines format. '
+ 'Use with --no-indent or --compact to produce valid JSON Lines output.')
+ group = parser.add_mutually_exclusive_group()
+ group.add_argument('--indent', default=4, type=int,
+ help='separate items with newlines and use this number '
+ 'of spaces for indentation')
+ group.add_argument('--tab', action='store_const', dest='indent',
+ const='\t', help='separate items with newlines and use '
+ 'tabs for indentation')
+ group.add_argument('--no-indent', action='store_const', dest='indent',
+ const=None,
+ help='separate items with spaces rather than newlines')
+ group.add_argument('--compact', action='store_true',
+ help='suppress all whitespace separation (most compact)')
options = parser.parse_args()
- dump_args = {
- 'sort_keys': options.sort_keys,
- 'indent': options.indent,
- 'ensure_ascii': options.ensure_ascii,
- }
- if options.compact:
- dump_args['indent'] = None
- dump_args['separators'] = ',', ':'
-
- with options.infile as infile:
+ dump_args = {
+ 'sort_keys': options.sort_keys,
+ 'indent': options.indent,
+ 'ensure_ascii': options.ensure_ascii,
+ }
+ if options.compact:
+ dump_args['indent'] = None
+ dump_args['separators'] = ',', ':'
+
+ with options.infile as infile:
try:
- if options.json_lines:
- objs = (json.loads(line) for line in infile)
- else:
- objs = (json.load(infile),)
-
- if options.outfile is None:
- out = sys.stdout
- else:
- out = options.outfile.open('w', encoding='utf-8')
- with out as outfile:
- for obj in objs:
- json.dump(obj, outfile, **dump_args)
- outfile.write('\n')
+ if options.json_lines:
+ objs = (json.loads(line) for line in infile)
+ else:
+ objs = (json.load(infile),)
+
+ if options.outfile is None:
+ out = sys.stdout
+ else:
+ out = options.outfile.open('w', encoding='utf-8')
+ with out as outfile:
+ for obj in objs:
+ json.dump(obj, outfile, **dump_args)
+ outfile.write('\n')
except ValueError as e:
raise SystemExit(e)
if __name__ == '__main__':
- try:
- main()
- except BrokenPipeError as exc:
- sys.exit(exc.errno)
+ try:
+ main()
+ except BrokenPipeError as exc:
+ sys.exit(exc.errno)
diff --git a/contrib/tools/python3/src/Lib/keyword.py b/contrib/tools/python3/src/Lib/keyword.py
index 59fcfb0f0d..e82484d941 100644
--- a/contrib/tools/python3/src/Lib/keyword.py
+++ b/contrib/tools/python3/src/Lib/keyword.py
@@ -1,62 +1,62 @@
-"""Keywords (from "Grammar/python.gram")
+"""Keywords (from "Grammar/python.gram")
This file is automatically generated; please don't muck it up!
To update the symbols in this file, 'cd' to the top directory of
-the python source tree and run:
-
- PYTHONPATH=Tools/peg_generator python3 -m pegen.keywordgen \
- Grammar/Grammar \
- Grammar/Tokens \
- Lib/keyword.py
-
-Alternatively, you can run 'make regen-keyword'.
+the python source tree and run:
+
+ PYTHONPATH=Tools/peg_generator python3 -m pegen.keywordgen \
+ Grammar/Grammar \
+ Grammar/Tokens \
+ Lib/keyword.py
+
+Alternatively, you can run 'make regen-keyword'.
"""
-__all__ = ["iskeyword", "issoftkeyword", "kwlist", "softkwlist"]
+__all__ = ["iskeyword", "issoftkeyword", "kwlist", "softkwlist"]
kwlist = [
- 'False',
- 'None',
- 'True',
- '__peg_parser__',
- 'and',
- 'as',
- 'assert',
- 'async',
- 'await',
- 'break',
- 'class',
- 'continue',
- 'def',
- 'del',
- 'elif',
- 'else',
- 'except',
- 'finally',
- 'for',
- 'from',
- 'global',
- 'if',
- 'import',
- 'in',
- 'is',
- 'lambda',
- 'nonlocal',
- 'not',
- 'or',
- 'pass',
- 'raise',
- 'return',
- 'try',
- 'while',
- 'with',
- 'yield'
-]
-
-softkwlist = [
-
-]
-
+ 'False',
+ 'None',
+ 'True',
+ '__peg_parser__',
+ 'and',
+ 'as',
+ 'assert',
+ 'async',
+ 'await',
+ 'break',
+ 'class',
+ 'continue',
+ 'def',
+ 'del',
+ 'elif',
+ 'else',
+ 'except',
+ 'finally',
+ 'for',
+ 'from',
+ 'global',
+ 'if',
+ 'import',
+ 'in',
+ 'is',
+ 'lambda',
+ 'nonlocal',
+ 'not',
+ 'or',
+ 'pass',
+ 'raise',
+ 'return',
+ 'try',
+ 'while',
+ 'with',
+ 'yield'
+]
+
+softkwlist = [
+
+]
+
iskeyword = frozenset(kwlist).__contains__
-issoftkeyword = frozenset(softkwlist).__contains__
+issoftkeyword = frozenset(softkwlist).__contains__
diff --git a/contrib/tools/python3/src/Lib/lib2to3/__init__.py b/contrib/tools/python3/src/Lib/lib2to3/__init__.py
index 4224dffef4..659060d232 100644
--- a/contrib/tools/python3/src/Lib/lib2to3/__init__.py
+++ b/contrib/tools/python3/src/Lib/lib2to3/__init__.py
@@ -1,8 +1,8 @@
-import warnings
-
-
-warnings.warn(
- "lib2to3 package is deprecated and may not be able to parse Python 3.10+",
- PendingDeprecationWarning,
- stacklevel=2,
-)
+import warnings
+
+
+warnings.warn(
+ "lib2to3 package is deprecated and may not be able to parse Python 3.10+",
+ PendingDeprecationWarning,
+ stacklevel=2,
+)
diff --git a/contrib/tools/python3/src/Lib/lib2to3/fixer_util.py b/contrib/tools/python3/src/Lib/lib2to3/fixer_util.py
index c2a3a47f50..71c6a47ffc 100644
--- a/contrib/tools/python3/src/Lib/lib2to3/fixer_util.py
+++ b/contrib/tools/python3/src/Lib/lib2to3/fixer_util.py
@@ -412,7 +412,7 @@ def _find(name, node):
return None
def _is_import_binding(node, name, package=None):
- """ Will return node if node will import name, or node
+ """ Will return node if node will import name, or node
will import * from package. None is returned otherwise.
See test cases for examples. """
diff --git a/contrib/tools/python3/src/Lib/lib2to3/fixes/fix_apply.py b/contrib/tools/python3/src/Lib/lib2to3/fixes/fix_apply.py
index 6408582c42..a3db0f7e45 100644
--- a/contrib/tools/python3/src/Lib/lib2to3/fixes/fix_apply.py
+++ b/contrib/tools/python3/src/Lib/lib2to3/fixes/fix_apply.py
@@ -38,7 +38,7 @@ class FixApply(fixer_base.BaseFix):
# PATTERN above but I don't know how to do it so...
if args:
if (args.type == self.syms.argument and
- args.children[0].value in {'**', '*'}):
+ args.children[0].value in {'**', '*'}):
return # Make no change.
if kwds and (kwds.type == self.syms.argument and
kwds.children[0].value == '**'):
diff --git a/contrib/tools/python3/src/Lib/lib2to3/fixes/fix_filter.py b/contrib/tools/python3/src/Lib/lib2to3/fixes/fix_filter.py
index 38e9078f11..df43ead490 100644
--- a/contrib/tools/python3/src/Lib/lib2to3/fixes/fix_filter.py
+++ b/contrib/tools/python3/src/Lib/lib2to3/fixes/fix_filter.py
@@ -17,7 +17,7 @@ Python 2.6 figure it out.
from .. import fixer_base
from ..pytree import Node
from ..pygram import python_symbols as syms
-from ..fixer_util import Name, ArgList, ListComp, in_special_context, parenthesize
+from ..fixer_util import Name, ArgList, ListComp, in_special_context, parenthesize
class FixFilter(fixer_base.ConditionalFix):
@@ -65,14 +65,14 @@ class FixFilter(fixer_base.ConditionalFix):
trailers.append(t.clone())
if "filter_lambda" in results:
- xp = results.get("xp").clone()
- if xp.type == syms.test:
- xp.prefix = ""
- xp = parenthesize(xp)
-
+ xp = results.get("xp").clone()
+ if xp.type == syms.test:
+ xp.prefix = ""
+ xp = parenthesize(xp)
+
new = ListComp(results.get("fp").clone(),
results.get("fp").clone(),
- results.get("it").clone(), xp)
+ results.get("it").clone(), xp)
new = Node(syms.power, [new] + trailers, prefix="")
elif "none" in results:
diff --git a/contrib/tools/python3/src/Lib/lib2to3/fixes/fix_intern.py b/contrib/tools/python3/src/Lib/lib2to3/fixes/fix_intern.py
index d752843092..2e6a78b911 100644
--- a/contrib/tools/python3/src/Lib/lib2to3/fixes/fix_intern.py
+++ b/contrib/tools/python3/src/Lib/lib2to3/fixes/fix_intern.py
@@ -31,7 +31,7 @@ class FixIntern(fixer_base.BaseFix):
obj = results['obj']
if obj:
if (obj.type == self.syms.argument and
- obj.children[0].value in {'**', '*'}):
+ obj.children[0].value in {'**', '*'}):
return # Make no change.
names = ('sys', 'intern')
new = ImportAndCall(node, results, names)
diff --git a/contrib/tools/python3/src/Lib/lib2to3/fixes/fix_metaclass.py b/contrib/tools/python3/src/Lib/lib2to3/fixes/fix_metaclass.py
index fe547b2228..d2253ac79d 100644
--- a/contrib/tools/python3/src/Lib/lib2to3/fixes/fix_metaclass.py
+++ b/contrib/tools/python3/src/Lib/lib2to3/fixes/fix_metaclass.py
@@ -1,6 +1,6 @@
"""Fixer for __metaclass__ = X -> (metaclass=X) methods.
- The various forms of classef (inherits nothing, inherits once, inherits
+ The various forms of classef (inherits nothing, inherits once, inherits
many) don't parse the same in the CST so we look at ALL classes for
a __metaclass__ and if we find one normalize the inherits to all be
an arglist.
@@ -51,7 +51,7 @@ def fixup_parse_tree(cls_node):
# already in the preferred format, do nothing
return
- # !%@#! one-liners have no suite node, we have to fake one up
+ # !%@#! one-liners have no suite node, we have to fake one up
for i, node in enumerate(cls_node.children):
if node.type == token.COLON:
break
diff --git a/contrib/tools/python3/src/Lib/lib2to3/fixes/fix_paren.py b/contrib/tools/python3/src/Lib/lib2to3/fixes/fix_paren.py
index df3da5f523..be5129e67d 100644
--- a/contrib/tools/python3/src/Lib/lib2to3/fixes/fix_paren.py
+++ b/contrib/tools/python3/src/Lib/lib2to3/fixes/fix_paren.py
@@ -1,4 +1,4 @@
-"""Fixer that adds parentheses where they are required
+"""Fixer that adds parentheses where they are required
This converts ``[x for x in 1, 2]`` to ``[x for x in (1, 2)]``."""
diff --git a/contrib/tools/python3/src/Lib/lib2to3/fixes/fix_reload.py b/contrib/tools/python3/src/Lib/lib2to3/fixes/fix_reload.py
index b30841131c..c1649aedcc 100644
--- a/contrib/tools/python3/src/Lib/lib2to3/fixes/fix_reload.py
+++ b/contrib/tools/python3/src/Lib/lib2to3/fixes/fix_reload.py
@@ -28,7 +28,7 @@ class FixReload(fixer_base.BaseFix):
obj = results['obj']
if obj:
if (obj.type == self.syms.argument and
- obj.children[0].value in {'**', '*'}):
+ obj.children[0].value in {'**', '*'}):
return # Make no change.
names = ('importlib', 'reload')
new = ImportAndCall(node, results, names)
diff --git a/contrib/tools/python3/src/Lib/lib2to3/fixes/fix_urllib.py b/contrib/tools/python3/src/Lib/lib2to3/fixes/fix_urllib.py
index ab892bc524..1c754df047 100644
--- a/contrib/tools/python3/src/Lib/lib2to3/fixes/fix_urllib.py
+++ b/contrib/tools/python3/src/Lib/lib2to3/fixes/fix_urllib.py
@@ -13,7 +13,7 @@ MAPPING = {"urllib": [
("urllib.request",
["URLopener", "FancyURLopener", "urlretrieve",
"_urlopener", "urlopen", "urlcleanup",
- "pathname2url", "url2pathname", "getproxies"]),
+ "pathname2url", "url2pathname", "getproxies"]),
("urllib.parse",
["quote", "quote_plus", "unquote", "unquote_plus",
"urlencode", "splitattr", "splithost", "splitnport",
diff --git a/contrib/tools/python3/src/Lib/lib2to3/main.py b/contrib/tools/python3/src/Lib/lib2to3/main.py
index f2849fd6be..473ab6c841 100644
--- a/contrib/tools/python3/src/Lib/lib2to3/main.py
+++ b/contrib/tools/python3/src/Lib/lib2to3/main.py
@@ -90,11 +90,11 @@ class StdoutRefactoringTool(refactor.MultiprocessRefactoringTool):
if os.path.lexists(backup):
try:
os.remove(backup)
- except OSError:
+ except OSError:
self.log_message("Can't remove backup %s", backup)
try:
os.rename(filename, backup)
- except OSError:
+ except OSError:
self.log_message("Can't rename %s to %s", filename, backup)
# Actually write the new file
write = super(StdoutRefactoringTool, self).write_file
@@ -154,8 +154,8 @@ def main(fixer_pkg, args=None):
help="List available transformations")
parser.add_option("-p", "--print-function", action="store_true",
help="Modify the grammar so that print() is a function")
- parser.add_option("-e", "--exec-function", action="store_true",
- help="Modify the grammar so that exec() is a function")
+ parser.add_option("-e", "--exec-function", action="store_true",
+ help="Modify the grammar so that exec() is a function")
parser.add_option("-v", "--verbose", action="store_true",
help="More verbose logging")
parser.add_option("--no-diffs", action="store_true",
@@ -213,9 +213,9 @@ def main(fixer_pkg, args=None):
if options.print_function:
flags["print_function"] = True
- if options.exec_function:
- flags["exec_function"] = True
-
+ if options.exec_function:
+ flags["exec_function"] = True
+
# Set up logging handler
level = logging.DEBUG if options.verbose else logging.INFO
logging.basicConfig(format='%(name)s: %(message)s', level=level)
diff --git a/contrib/tools/python3/src/Lib/lib2to3/pgen2/grammar.py b/contrib/tools/python3/src/Lib/lib2to3/pgen2/grammar.py
index 6a4d575ac2..bddf785dbb 100644
--- a/contrib/tools/python3/src/Lib/lib2to3/pgen2/grammar.py
+++ b/contrib/tools/python3/src/Lib/lib2to3/pgen2/grammar.py
@@ -85,9 +85,9 @@ class Grammar(object):
self.start = 256
def dump(self, filename):
- """Dump the grammar tables to a pickle file."""
+ """Dump the grammar tables to a pickle file."""
with open(filename, "wb") as f:
- pickle.dump(self.__dict__, f, pickle.HIGHEST_PROTOCOL)
+ pickle.dump(self.__dict__, f, pickle.HIGHEST_PROTOCOL)
def load(self, filename):
"""Load the grammar tables from a pickle file."""
@@ -178,7 +178,7 @@ opmap_raw = """
// DOUBLESLASH
//= DOUBLESLASHEQUAL
-> RARROW
-:= COLONEQUAL
+:= COLONEQUAL
"""
opmap = {}
diff --git a/contrib/tools/python3/src/Lib/lib2to3/pgen2/parse.py b/contrib/tools/python3/src/Lib/lib2to3/pgen2/parse.py
index cf3fcf7e99..1b6117174e 100644
--- a/contrib/tools/python3/src/Lib/lib2to3/pgen2/parse.py
+++ b/contrib/tools/python3/src/Lib/lib2to3/pgen2/parse.py
@@ -24,9 +24,9 @@ class ParseError(Exception):
self.value = value
self.context = context
- def __reduce__(self):
- return type(self), (self.msg, self.type, self.value, self.context)
-
+ def __reduce__(self):
+ return type(self), (self.msg, self.type, self.value, self.context)
+
class Parser(object):
"""Parser engine.
diff --git a/contrib/tools/python3/src/Lib/lib2to3/pgen2/token.py b/contrib/tools/python3/src/Lib/lib2to3/pgen2/token.py
index 5f6612f5b3..1ab43f697d 100644
--- a/contrib/tools/python3/src/Lib/lib2to3/pgen2/token.py
+++ b/contrib/tools/python3/src/Lib/lib2to3/pgen2/token.py
@@ -65,8 +65,8 @@ RARROW = 55
AWAIT = 56
ASYNC = 57
ERRORTOKEN = 58
-COLONEQUAL = 59
-N_TOKENS = 60
+COLONEQUAL = 59
+N_TOKENS = 60
NT_OFFSET = 256
#--end constants--
diff --git a/contrib/tools/python3/src/Lib/lib2to3/pgen2/tokenize.py b/contrib/tools/python3/src/Lib/lib2to3/pgen2/tokenize.py
index 099dfa7798..19db93a72d 100644
--- a/contrib/tools/python3/src/Lib/lib2to3/pgen2/tokenize.py
+++ b/contrib/tools/python3/src/Lib/lib2to3/pgen2/tokenize.py
@@ -93,7 +93,7 @@ Operator = group(r"\*\*=?", r">>=?", r"<<=?", r"<>", r"!=",
r"~")
Bracket = '[][(){}]'
-Special = group(r'\r?\n', r':=', r'[:;.,`@]')
+Special = group(r'\r?\n', r':=', r'[:;.,`@]')
Funny = group(Operator, Bracket, Special)
PlainToken = group(Number, Funny, String, Name)
@@ -321,7 +321,7 @@ def untokenize(iterable):
Round-trip invariant for full input:
Untokenized source will match input source exactly
- Round-trip invariant for limited input:
+ Round-trip invariant for limited input:
# Output text will tokenize the back to the input
t1 = [tok[:2] for tok in generate_tokens(f.readline)]
newcode = untokenize(t1)
@@ -346,7 +346,7 @@ def generate_tokens(readline):
column where the token begins in the source; a 2-tuple (erow, ecol) of
ints specifying the row and column where the token ends in the source;
and the line on which the token was found. The line passed is the
- physical line.
+ physical line.
"""
lnum = parenlev = continued = 0
contstr, needcont = '', 0
@@ -512,14 +512,14 @@ def generate_tokens(readline):
stashed = tok
continue
- if token in ('def', 'for'):
+ if token in ('def', 'for'):
if (stashed
and stashed[0] == NAME
and stashed[1] == 'async'):
- if token == 'def':
- async_def = True
- async_def_indent = indents[-1]
+ if token == 'def':
+ async_def = True
+ async_def_indent = indents[-1]
yield (ASYNC, stashed[1],
stashed[2], stashed[3],
diff --git a/contrib/tools/python3/src/Lib/lib2to3/pygram.py b/contrib/tools/python3/src/Lib/lib2to3/pygram.py
index 24d9db9217..20f6cc01f1 100644
--- a/contrib/tools/python3/src/Lib/lib2to3/pygram.py
+++ b/contrib/tools/python3/src/Lib/lib2to3/pygram.py
@@ -36,8 +36,8 @@ python_symbols = Symbols(python_grammar)
python_grammar_no_print_statement = python_grammar.copy()
del python_grammar_no_print_statement.keywords["print"]
-python_grammar_no_print_and_exec_statement = python_grammar_no_print_statement.copy()
-del python_grammar_no_print_and_exec_statement.keywords["exec"]
-
+python_grammar_no_print_and_exec_statement = python_grammar_no_print_statement.copy()
+del python_grammar_no_print_and_exec_statement.keywords["exec"]
+
pattern_grammar = driver.load_packaged_grammar("lib2to3", _PATTERN_GRAMMAR_FILE)
pattern_symbols = Symbols(pattern_grammar)
diff --git a/contrib/tools/python3/src/Lib/lib2to3/refactor.py b/contrib/tools/python3/src/Lib/lib2to3/refactor.py
index 3a5aafffc6..07273d2c42 100644
--- a/contrib/tools/python3/src/Lib/lib2to3/refactor.py
+++ b/contrib/tools/python3/src/Lib/lib2to3/refactor.py
@@ -14,7 +14,7 @@ __author__ = "Guido van Rossum <guido@python.org>"
# Python imports
import io
import os
-import pkgutil
+import pkgutil
import sys
import logging
import operator
@@ -32,11 +32,11 @@ def get_all_fix_names(fixer_pkg, remove_prefix=True):
"""Return a sorted list of all available fix names in the given package."""
pkg = __import__(fixer_pkg, [], [], ["*"])
fix_names = []
- for finder, name, ispkg in pkgutil.iter_modules(pkg.__path__):
- if name.startswith("fix_"):
+ for finder, name, ispkg in pkgutil.iter_modules(pkg.__path__):
+ if name.startswith("fix_"):
if remove_prefix:
name = name[4:]
- fix_names.append(name)
+ fix_names.append(name)
return fix_names
@@ -155,7 +155,7 @@ class FixerError(Exception):
class RefactoringTool(object):
_default_options = {"print_function" : False,
- "exec_function": False,
+ "exec_function": False,
"write_unchanged_files" : False}
CLASS_PREFIX = "Fix" # The prefix for fixer classes
@@ -174,13 +174,13 @@ class RefactoringTool(object):
self.options = self._default_options.copy()
if options is not None:
self.options.update(options)
- self.grammar = pygram.python_grammar.copy()
-
- if self.options['print_function']:
- del self.grammar.keywords["print"]
- elif self.options['exec_function']:
- del self.grammar.keywords["exec"]
-
+ self.grammar = pygram.python_grammar.copy()
+
+ if self.options['print_function']:
+ del self.grammar.keywords["print"]
+ elif self.options['exec_function']:
+ del self.grammar.keywords["exec"]
+
# When this is True, the refactor*() methods will call write_file() for
# files processed even if they were not changed during refactoring. If
# and only if the refactor method's write parameter was True.
diff --git a/contrib/tools/python3/src/Lib/linecache.py b/contrib/tools/python3/src/Lib/linecache.py
index 6adce80e8f..958dceb6ca 100644
--- a/contrib/tools/python3/src/Lib/linecache.py
+++ b/contrib/tools/python3/src/Lib/linecache.py
@@ -10,7 +10,7 @@ import sys
import os
import tokenize
-__all__ = ["getline", "clearcache", "checkcache", "lazycache"]
+__all__ = ["getline", "clearcache", "checkcache", "lazycache"]
# The cache. Maps filenames to either a thunk which will provide source code,
@@ -20,19 +20,19 @@ cache = {}
def clearcache():
"""Clear the cache entirely."""
- cache.clear()
+ cache.clear()
-def getline(filename, lineno, module_globals=None):
- """Get a line for a Python source file from the cache.
- Update the cache if it doesn't contain an entry for this file already."""
-
- lines = getlines(filename, module_globals)
- if 1 <= lineno <= len(lines):
- return lines[lineno - 1]
- return ''
-
+def getline(filename, lineno, module_globals=None):
+ """Get a line for a Python source file from the cache.
+ Update the cache if it doesn't contain an entry for this file already."""
+ lines = getlines(filename, module_globals)
+ if 1 <= lineno <= len(lines):
+ return lines[lineno - 1]
+ return ''
+
+
def getlines(filename, module_globals=None):
"""Get the lines for a Python source file from the cache.
Update the cache if it doesn't contain an entry for this file already."""
@@ -55,10 +55,10 @@ def checkcache(filename=None):
if filename is None:
filenames = list(cache.keys())
- elif filename in cache:
- filenames = [filename]
+ elif filename in cache:
+ filenames = [filename]
else:
- return
+ return
for filename in filenames:
entry = cache[filename]
@@ -71,10 +71,10 @@ def checkcache(filename=None):
try:
stat = os.stat(fullname)
except OSError:
- cache.pop(filename, None)
+ cache.pop(filename, None)
continue
if size != stat.st_size or mtime != stat.st_mtime:
- cache.pop(filename, None)
+ cache.pop(filename, None)
def updatecache(filename, module_globals=None):
@@ -84,7 +84,7 @@ def updatecache(filename, module_globals=None):
if filename in cache:
if len(cache[filename]) != 1:
- cache.pop(filename, None)
+ cache.pop(filename, None)
if not filename or (filename.startswith('<') and filename.endswith('>')):
return []
@@ -119,10 +119,10 @@ def updatecache(filename, module_globals=None):
# for this module.
return []
cache[filename] = (
- len(data),
- None,
- [line + '\n' for line in data.splitlines()],
- fullname
+ len(data),
+ None,
+ [line + '\n' for line in data.splitlines()],
+ fullname
)
return cache[filename][2]
@@ -166,7 +166,7 @@ def lazycache(filename, module_globals):
:return: True if a lazy load is registered in the cache,
otherwise False. To register such a load a module loader with a
- get_source method must be found, the filename must be a cacheable
+ get_source method must be found, the filename must be a cacheable
filename, and the filename must not be already cached.
"""
if filename in cache:
diff --git a/contrib/tools/python3/src/Lib/locale.py b/contrib/tools/python3/src/Lib/locale.py
index 1a4e9f694f..a6bcaa3bc1 100644
--- a/contrib/tools/python3/src/Lib/locale.py
+++ b/contrib/tools/python3/src/Lib/locale.py
@@ -279,8 +279,8 @@ def currency(val, symbol=True, grouping=False, international=False):
if precedes:
s = smb + (separated and ' ' or '') + s
else:
- if international and smb[-1] == ' ':
- smb = smb[:-1]
+ if international and smb[-1] == ' ':
+ smb = smb[:-1]
s = s + (separated and ' ' or '') + smb
sign_pos = conv[val<0 and 'n_sign_posn' or 'p_sign_posn']
@@ -494,10 +494,10 @@ def _parse_localename(localename):
return tuple(code.split('.')[:2])
elif code == 'C':
return None, None
- elif code == 'UTF-8':
- # On macOS "LC_CTYPE=UTF-8" is a valid locale setting
- # for getting UTF-8 handling for text.
- return None, 'UTF-8'
+ elif code == 'UTF-8':
+ # On macOS "LC_CTYPE=UTF-8" is a valid locale setting
+ # for getting UTF-8 handling for text.
+ return None, 'UTF-8'
raise ValueError('unknown locale: %s' % localename)
def _build_localename(localetuple):
diff --git a/contrib/tools/python3/src/Lib/logging/__init__.py b/contrib/tools/python3/src/Lib/logging/__init__.py
index 1ab35a8c21..92cb8f08f9 100644
--- a/contrib/tools/python3/src/Lib/logging/__init__.py
+++ b/contrib/tools/python3/src/Lib/logging/__init__.py
@@ -1,4 +1,4 @@
-# Copyright 2001-2019 by Vinay Sajip. All Rights Reserved.
+# Copyright 2001-2019 by Vinay Sajip. All Rights Reserved.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose and without fee is hereby granted,
@@ -18,17 +18,17 @@
Logging package for Python. Based on PEP 282 and comments thereto in
comp.lang.python.
-Copyright (C) 2001-2019 Vinay Sajip. All Rights Reserved.
+Copyright (C) 2001-2019 Vinay Sajip. All Rights Reserved.
To use, simply 'import logging' and log away!
"""
-import sys, os, time, io, re, traceback, warnings, weakref, collections.abc
+import sys, os, time, io, re, traceback, warnings, weakref, collections.abc
from string import Template
-from string import Formatter as StrFormatter
-
+from string import Formatter as StrFormatter
+
__all__ = ['BASIC_FORMAT', 'BufferingFormatter', 'CRITICAL', 'DEBUG', 'ERROR',
'FATAL', 'FileHandler', 'Filter', 'Formatter', 'Handler', 'INFO',
'LogRecord', 'Logger', 'LoggerAdapter', 'NOTSET', 'NullHandler',
@@ -118,7 +118,7 @@ _nameToLevel = {
def getLevelName(level):
"""
- Return the textual or numeric representation of logging level 'level'.
+ Return the textual or numeric representation of logging level 'level'.
If the level is one of the predefined levels (CRITICAL, ERROR, WARNING,
INFO, DEBUG) then you get the corresponding string. If you have
@@ -128,11 +128,11 @@ def getLevelName(level):
If a numeric value corresponding to one of the defined levels is passed
in, the corresponding string representation is returned.
- If a string representation of the level is passed in, the corresponding
- numeric value is returned.
-
- If no matching numeric or string value is passed in, the string
- 'Level %s' % level is returned.
+ If a string representation of the level is passed in, the corresponding
+ numeric value is returned.
+
+ If no matching numeric or string value is passed in, the string
+ 'Level %s' % level is returned.
"""
# See Issues #22386, #27937 and #29220 for why it's this way
result = _levelToName.get(level)
@@ -235,32 +235,32 @@ def _releaseLock():
# Prevent a held logging lock from blocking a child from logging.
if not hasattr(os, 'register_at_fork'): # Windows and friends.
- def _register_at_fork_reinit_lock(instance):
+ def _register_at_fork_reinit_lock(instance):
pass # no-op when os.register_at_fork does not exist.
-else:
- # A collection of instances with a _at_fork_reinit method (logging.Handler)
- # to be called in the child after forking. The weakref avoids us keeping
- # discarded Handler instances alive.
- _at_fork_reinit_lock_weakset = weakref.WeakSet()
+else:
+ # A collection of instances with a _at_fork_reinit method (logging.Handler)
+ # to be called in the child after forking. The weakref avoids us keeping
+ # discarded Handler instances alive.
+ _at_fork_reinit_lock_weakset = weakref.WeakSet()
- def _register_at_fork_reinit_lock(instance):
- _acquireLock()
- try:
- _at_fork_reinit_lock_weakset.add(instance)
- finally:
- _releaseLock()
+ def _register_at_fork_reinit_lock(instance):
+ _acquireLock()
+ try:
+ _at_fork_reinit_lock_weakset.add(instance)
+ finally:
+ _releaseLock()
- def _after_at_fork_child_reinit_locks():
- for handler in _at_fork_reinit_lock_weakset:
- handler._at_fork_reinit()
+ def _after_at_fork_child_reinit_locks():
+ for handler in _at_fork_reinit_lock_weakset:
+ handler._at_fork_reinit()
- # _acquireLock() was called in the parent before forking.
- # The lock is reinitialized to unlocked state.
- _lock._at_fork_reinit()
+ # _acquireLock() was called in the parent before forking.
+ # The lock is reinitialized to unlocked state.
+ _lock._at_fork_reinit()
- os.register_at_fork(before=_acquireLock,
- after_in_child=_after_at_fork_child_reinit_locks,
- after_in_parent=_releaseLock)
+ os.register_at_fork(before=_acquireLock,
+ after_in_child=_after_at_fork_child_reinit_locks,
+ after_in_parent=_releaseLock)
#---------------------------------------------------------------------------
@@ -351,7 +351,7 @@ class LogRecord(object):
else:
self.process = None
- def __repr__(self):
+ def __repr__(self):
return '<LogRecord: %s, %s, %s, %s, "%s">'%(self.name, self.levelno,
self.pathname, self.lineno, self.msg)
@@ -400,20 +400,20 @@ def makeLogRecord(dict):
rv.__dict__.update(dict)
return rv
-
+
#---------------------------------------------------------------------------
# Formatter classes and functions
#---------------------------------------------------------------------------
-_str_formatter = StrFormatter()
-del StrFormatter
-
+_str_formatter = StrFormatter()
+del StrFormatter
+
class PercentStyle(object):
default_format = '%(message)s'
asctime_format = '%(asctime)s'
asctime_search = '%(asctime)'
- validation_pattern = re.compile(r'%\(\w+\)[#0+ -]*(\*|\d+)?(\.(\*|\d+))?[diouxefgcrsa%]', re.I)
+ validation_pattern = re.compile(r'%\(\w+\)[#0+ -]*(\*|\d+)?(\.(\*|\d+))?[diouxefgcrsa%]', re.I)
def __init__(self, fmt):
self._fmt = fmt or self.default_format
@@ -421,51 +421,51 @@ class PercentStyle(object):
def usesTime(self):
return self._fmt.find(self.asctime_search) >= 0
- def validate(self):
- """Validate the input format, ensure it matches the correct style"""
- if not self.validation_pattern.search(self._fmt):
- raise ValueError("Invalid format '%s' for '%s' style" % (self._fmt, self.default_format[0]))
-
- def _format(self, record):
+ def validate(self):
+ """Validate the input format, ensure it matches the correct style"""
+ if not self.validation_pattern.search(self._fmt):
+ raise ValueError("Invalid format '%s' for '%s' style" % (self._fmt, self.default_format[0]))
+
+ def _format(self, record):
return self._fmt % record.__dict__
- def format(self, record):
- try:
- return self._format(record)
- except KeyError as e:
- raise ValueError('Formatting field not found in record: %s' % e)
-
-
+ def format(self, record):
+ try:
+ return self._format(record)
+ except KeyError as e:
+ raise ValueError('Formatting field not found in record: %s' % e)
+
+
class StrFormatStyle(PercentStyle):
default_format = '{message}'
asctime_format = '{asctime}'
asctime_search = '{asctime'
- fmt_spec = re.compile(r'^(.?[<>=^])?[+ -]?#?0?(\d+|{\w+})?[,_]?(\.(\d+|{\w+}))?[bcdefgnosx%]?$', re.I)
- field_spec = re.compile(r'^(\d+|\w+)(\.\w+|\[[^]]+\])*$')
-
- def _format(self, record):
+ fmt_spec = re.compile(r'^(.?[<>=^])?[+ -]?#?0?(\d+|{\w+})?[,_]?(\.(\d+|{\w+}))?[bcdefgnosx%]?$', re.I)
+ field_spec = re.compile(r'^(\d+|\w+)(\.\w+|\[[^]]+\])*$')
+
+ def _format(self, record):
return self._fmt.format(**record.__dict__)
- def validate(self):
- """Validate the input format, ensure it is the correct string formatting style"""
- fields = set()
- try:
- for _, fieldname, spec, conversion in _str_formatter.parse(self._fmt):
- if fieldname:
- if not self.field_spec.match(fieldname):
- raise ValueError('invalid field name/expression: %r' % fieldname)
- fields.add(fieldname)
- if conversion and conversion not in 'rsa':
- raise ValueError('invalid conversion: %r' % conversion)
- if spec and not self.fmt_spec.match(spec):
- raise ValueError('bad specifier: %r' % spec)
- except ValueError as e:
- raise ValueError('invalid format: %s' % e)
- if not fields:
- raise ValueError('invalid format: no fields')
-
-
+ def validate(self):
+ """Validate the input format, ensure it is the correct string formatting style"""
+ fields = set()
+ try:
+ for _, fieldname, spec, conversion in _str_formatter.parse(self._fmt):
+ if fieldname:
+ if not self.field_spec.match(fieldname):
+ raise ValueError('invalid field name/expression: %r' % fieldname)
+ fields.add(fieldname)
+ if conversion and conversion not in 'rsa':
+ raise ValueError('invalid conversion: %r' % conversion)
+ if spec and not self.fmt_spec.match(spec):
+ raise ValueError('bad specifier: %r' % spec)
+ except ValueError as e:
+ raise ValueError('invalid format: %s' % e)
+ if not fields:
+ raise ValueError('invalid format: no fields')
+
+
class StringTemplateStyle(PercentStyle):
default_format = '${message}'
asctime_format = '${asctime}'
@@ -479,24 +479,24 @@ class StringTemplateStyle(PercentStyle):
fmt = self._fmt
return fmt.find('$asctime') >= 0 or fmt.find(self.asctime_format) >= 0
- def validate(self):
- pattern = Template.pattern
- fields = set()
- for m in pattern.finditer(self._fmt):
- d = m.groupdict()
- if d['named']:
- fields.add(d['named'])
- elif d['braced']:
- fields.add(d['braced'])
- elif m.group(0) == '$':
- raise ValueError('invalid format: bare \'$\' not allowed')
- if not fields:
- raise ValueError('invalid format: no fields')
-
- def _format(self, record):
+ def validate(self):
+ pattern = Template.pattern
+ fields = set()
+ for m in pattern.finditer(self._fmt):
+ d = m.groupdict()
+ if d['named']:
+ fields.add(d['named'])
+ elif d['braced']:
+ fields.add(d['braced'])
+ elif m.group(0) == '$':
+ raise ValueError('invalid format: bare \'$\' not allowed')
+ if not fields:
+ raise ValueError('invalid format: no fields')
+
+ def _format(self, record):
return self._tpl.substitute(**record.__dict__)
-
+
BASIC_FORMAT = "%(levelname)s:%(name)s:%(message)s"
_STYLES = {
@@ -513,7 +513,7 @@ class Formatter(object):
responsible for converting a LogRecord to (usually) a string which can
be interpreted by either a human or an external system. The base Formatter
allows a formatting string to be specified. If none is supplied, the
- style-dependent default value, "%(message)s", "{message}", or
+ style-dependent default value, "%(message)s", "{message}", or
"${message}", is used.
The Formatter can be initialized with a format string which makes use of
@@ -550,7 +550,7 @@ class Formatter(object):
converter = time.localtime
- def __init__(self, fmt=None, datefmt=None, style='%', validate=True):
+ def __init__(self, fmt=None, datefmt=None, style='%', validate=True):
"""
Initialize the formatter with specified format strings.
@@ -570,9 +570,9 @@ class Formatter(object):
raise ValueError('Style must be one of: %s' % ','.join(
_STYLES.keys()))
self._style = _STYLES[style][0](fmt)
- if validate:
- self._style.validate()
-
+ if validate:
+ self._style.validate()
+
self._fmt = self._style._fmt
self.datefmt = datefmt
@@ -601,9 +601,9 @@ class Formatter(object):
if datefmt:
s = time.strftime(datefmt, ct)
else:
- s = time.strftime(self.default_time_format, ct)
- if self.default_msec_format:
- s = self.default_msec_format % (s, record.msecs)
+ s = time.strftime(self.default_time_format, ct)
+ if self.default_msec_format:
+ s = self.default_msec_format % (s, record.msecs)
return s
def formatException(self, ei):
@@ -752,8 +752,8 @@ class Filter(object):
"""
Determine if the specified record is to be logged.
- Returns True if the record should be logged, or False otherwise.
- If deemed appropriate, the record may be modified in-place.
+ Returns True if the record should be logged, or False otherwise.
+ If deemed appropriate, the record may be modified in-place.
"""
if self.nlen == 0:
return True
@@ -888,11 +888,11 @@ class Handler(Filterer):
Acquire a thread lock for serializing access to the underlying I/O.
"""
self.lock = threading.RLock()
- _register_at_fork_reinit_lock(self)
-
- def _at_fork_reinit(self):
- self.lock._at_fork_reinit()
+ _register_at_fork_reinit_lock(self)
+ def _at_fork_reinit(self):
+ self.lock._at_fork_reinit()
+
def acquire(self):
"""
Acquire the I/O thread lock.
@@ -1021,8 +1021,8 @@ class Handler(Filterer):
sys.stderr.write('Message: %r\n'
'Arguments: %s\n' % (record.msg,
record.args))
- except RecursionError: # See issue 36272
- raise
+ except RecursionError: # See issue 36272
+ raise
except Exception:
sys.stderr.write('Unable to print the message and arguments'
' - possible formatting error.\nUse the'
@@ -1085,8 +1085,8 @@ class StreamHandler(Handler):
# issue 35046: merged two stream.writes into one.
stream.write(msg + self.terminator)
self.flush()
- except RecursionError: # See issue 36272
- raise
+ except RecursionError: # See issue 36272
+ raise
except Exception:
self.handleError(record)
@@ -1113,8 +1113,8 @@ class StreamHandler(Handler):
def __repr__(self):
level = getLevelName(self.level)
name = getattr(self.stream, 'name', '')
- # bpo-36015: name can be an int
- name = str(name)
+ # bpo-36015: name can be an int
+ name = str(name)
if name:
name += ' '
return '<%s %s(%s)>' % (self.__class__.__name__, name, level)
@@ -1124,7 +1124,7 @@ class FileHandler(StreamHandler):
"""
A handler class which writes formatted logging records to disk files.
"""
- def __init__(self, filename, mode='a', encoding=None, delay=False, errors=None):
+ def __init__(self, filename, mode='a', encoding=None, delay=False, errors=None):
"""
Open the specified file and use it as the stream for logging.
"""
@@ -1135,7 +1135,7 @@ class FileHandler(StreamHandler):
self.baseFilename = os.path.abspath(filename)
self.mode = mode
self.encoding = encoding
- self.errors = errors
+ self.errors = errors
self.delay = delay
if delay:
#We don't open the stream, but we still need to call the
@@ -1172,8 +1172,8 @@ class FileHandler(StreamHandler):
Open the current base file with the (original) mode and encoding.
Return the resulting stream.
"""
- return open(self.baseFilename, self.mode, encoding=self.encoding,
- errors=self.errors)
+ return open(self.baseFilename, self.mode, encoding=self.encoding,
+ errors=self.errors)
def emit(self, record):
"""
@@ -1273,14 +1273,14 @@ class Manager(object):
self.loggerClass = None
self.logRecordFactory = None
- @property
- def disable(self):
- return self._disable
-
- @disable.setter
- def disable(self, value):
- self._disable = _checkLevel(value)
-
+ @property
+ def disable(self):
+ return self._disable
+
+ @disable.setter
+ def disable(self, value):
+ self._disable = _checkLevel(value)
+
def getLogger(self, name):
"""
Get a logger with the specified name (channel name), creating it
@@ -1511,7 +1511,7 @@ class Logger(Filterer):
if self.isEnabledFor(level):
self._log(level, msg, args, **kwargs)
- def findCaller(self, stack_info=False, stacklevel=1):
+ def findCaller(self, stack_info=False, stacklevel=1):
"""
Find the stack frame of the caller so that we can note the source
file name, line number and function name.
@@ -1521,12 +1521,12 @@ class Logger(Filterer):
#IronPython isn't run with -X:Frames.
if f is not None:
f = f.f_back
- orig_f = f
- while f and stacklevel > 1:
- f = f.f_back
- stacklevel -= 1
- if not f:
- f = orig_f
+ orig_f = f
+ while f and stacklevel > 1:
+ f = f.f_back
+ stacklevel -= 1
+ if not f:
+ f = orig_f
rv = "(unknown file)", 0, "(unknown function)", None
while hasattr(f, "f_code"):
co = f.f_code
@@ -1562,8 +1562,8 @@ class Logger(Filterer):
rv.__dict__[key] = extra[key]
return rv
- def _log(self, level, msg, args, exc_info=None, extra=None, stack_info=False,
- stacklevel=1):
+ def _log(self, level, msg, args, exc_info=None, extra=None, stack_info=False,
+ stacklevel=1):
"""
Low-level logging routine which creates a LogRecord and then calls
all the handlers of this logger to handle the record.
@@ -1574,7 +1574,7 @@ class Logger(Filterer):
#exception on some versions of IronPython. We trap it here so that
#IronPython can use logging.
try:
- fn, lno, func, sinfo = self.findCaller(stack_info, stacklevel)
+ fn, lno, func, sinfo = self.findCaller(stack_info, stacklevel)
except ValueError: # pragma: no cover
fn, lno, func = "(unknown file)", 0, "(unknown function)"
else: # pragma: no cover
@@ -1690,22 +1690,22 @@ class Logger(Filterer):
"""
Is this logger enabled for level 'level'?
"""
- if self.disabled:
- return False
-
+ if self.disabled:
+ return False
+
try:
return self._cache[level]
except KeyError:
_acquireLock()
- try:
- if self.manager.disable >= level:
- is_enabled = self._cache[level] = False
- else:
- is_enabled = self._cache[level] = (
- level >= self.getEffectiveLevel()
- )
- finally:
- _releaseLock()
+ try:
+ if self.manager.disable >= level:
+ is_enabled = self._cache[level] = False
+ else:
+ is_enabled = self._cache[level] = (
+ level >= self.getEffectiveLevel()
+ )
+ finally:
+ _releaseLock()
return is_enabled
def getChild(self, suffix):
@@ -1910,8 +1910,8 @@ def basicConfig(**kwargs):
Do basic configuration for the logging system.
This function does nothing if the root logger already has handlers
- configured, unless the keyword argument *force* is set to ``True``.
- It is a convenience method intended for use by simple scripts
+ configured, unless the keyword argument *force* is set to ``True``.
+ It is a convenience method intended for use by simple scripts
to do one-shot configuration of the logging package.
The default behaviour is to create a StreamHandler which writes to
@@ -1939,18 +1939,18 @@ def basicConfig(**kwargs):
handlers, which will be added to the root handler. Any handler
in the list which does not have a formatter assigned will be
assigned the formatter created in this function.
- force If this keyword is specified as true, any existing handlers
- attached to the root logger are removed and closed, before
- carrying out the configuration as specified by the other
- arguments.
- encoding If specified together with a filename, this encoding is passed to
- the created FileHandler, causing it to be used when the file is
- opened.
- errors If specified together with a filename, this value is passed to the
- created FileHandler, causing it to be used when the file is
- opened in text mode. If not specified, the default value is
- `backslashreplace`.
-
+ force If this keyword is specified as true, any existing handlers
+ attached to the root logger are removed and closed, before
+ carrying out the configuration as specified by the other
+ arguments.
+ encoding If specified together with a filename, this encoding is passed to
+ the created FileHandler, causing it to be used when the file is
+ opened.
+ errors If specified together with a filename, this value is passed to the
+ created FileHandler, causing it to be used when the file is
+ opened in text mode. If not specified, the default value is
+ `backslashreplace`.
+
Note that you could specify a stream created using open(filename, mode)
rather than passing the filename and mode in. However, it should be
remembered that StreamHandler does not close its stream (since it may be
@@ -1966,24 +1966,24 @@ def basicConfig(**kwargs):
``filename``/``filemode``, or ``filename``/``filemode`` specified
together with ``stream``, or ``handlers`` specified together with
``stream``.
-
- .. versionchanged:: 3.8
- Added the ``force`` parameter.
-
- .. versionchanged:: 3.9
- Added the ``encoding`` and ``errors`` parameters.
+
+ .. versionchanged:: 3.8
+ Added the ``force`` parameter.
+
+ .. versionchanged:: 3.9
+ Added the ``encoding`` and ``errors`` parameters.
"""
# Add thread safety in case someone mistakenly calls
# basicConfig() from multiple threads
_acquireLock()
try:
- force = kwargs.pop('force', False)
- encoding = kwargs.pop('encoding', None)
- errors = kwargs.pop('errors', 'backslashreplace')
- if force:
- for h in root.handlers[:]:
- root.removeHandler(h)
- h.close()
+ force = kwargs.pop('force', False)
+ encoding = kwargs.pop('encoding', None)
+ errors = kwargs.pop('errors', 'backslashreplace')
+ if force:
+ for h in root.handlers[:]:
+ root.removeHandler(h)
+ h.close()
if len(root.handlers) == 0:
handlers = kwargs.pop("handlers", None)
if handlers is None:
@@ -1998,10 +1998,10 @@ def basicConfig(**kwargs):
filename = kwargs.pop("filename", None)
mode = kwargs.pop("filemode", 'a')
if filename:
- if 'b'in mode:
- errors = None
- h = FileHandler(filename, mode,
- encoding=encoding, errors=errors)
+ if 'b'in mode:
+ errors = None
+ h = FileHandler(filename, mode,
+ encoding=encoding, errors=errors)
else:
stream = kwargs.pop("stream", None)
h = StreamHandler(stream)
@@ -2037,9 +2037,9 @@ def getLogger(name=None):
If no name is specified, return the root logger.
"""
- if not name or isinstance(name, str) and name == root.name:
+ if not name or isinstance(name, str) and name == root.name:
return root
- return Logger.manager.getLogger(name)
+ return Logger.manager.getLogger(name)
def critical(msg, *args, **kwargs):
"""
@@ -2178,9 +2178,9 @@ class NullHandler(Handler):
def createLock(self):
self.lock = None
- def _at_fork_reinit(self):
- pass
-
+ def _at_fork_reinit(self):
+ pass
+
# Warnings integration
_warnings_showwarning = None
diff --git a/contrib/tools/python3/src/Lib/logging/config.py b/contrib/tools/python3/src/Lib/logging/config.py
index fd3aded760..6cdcc022ad 100644
--- a/contrib/tools/python3/src/Lib/logging/config.py
+++ b/contrib/tools/python3/src/Lib/logging/config.py
@@ -1,4 +1,4 @@
-# Copyright 2001-2019 by Vinay Sajip. All Rights Reserved.
+# Copyright 2001-2019 by Vinay Sajip. All Rights Reserved.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose and without fee is hereby granted,
@@ -19,7 +19,7 @@ Configuration functions for the logging package for Python. The core package
is based on PEP 282 and comments thereto in comp.lang.python, and influenced
by Apache's log4j system.
-Copyright (C) 2001-2019 Vinay Sajip. All Rights Reserved.
+Copyright (C) 2001-2019 Vinay Sajip. All Rights Reserved.
To use, simply 'import logging' and log away!
"""
@@ -143,7 +143,7 @@ def _install_handlers(cp, formatters):
kwargs = section.get("kwargs", '{}')
kwargs = eval(kwargs, vars(logging))
h = klass(*args, **kwargs)
- h.name = hand
+ h.name = hand
if "level" in section:
level = section["level"]
h.setLevel(level)
@@ -174,10 +174,10 @@ def _handle_existing_loggers(existing, child_loggers, disable_existing):
for log in existing:
logger = root.manager.loggerDict[log]
if log in child_loggers:
- if not isinstance(logger, logging.PlaceHolder):
- logger.setLevel(logging.NOTSET)
- logger.handlers = []
- logger.propagate = True
+ if not isinstance(logger, logging.PlaceHolder):
+ logger.setLevel(logging.NOTSET)
+ logger.handlers = []
+ logger.propagate = True
else:
logger.disabled = disable_existing
@@ -448,7 +448,7 @@ class BaseConfigurator(object):
value = ConvertingList(value)
value.configurator = self
elif not isinstance(value, ConvertingTuple) and\
- isinstance(value, tuple) and not hasattr(value, '_fields'):
+ isinstance(value, tuple) and not hasattr(value, '_fields'):
value = ConvertingTuple(value)
value.configurator = self
elif isinstance(value, str): # str for py3k
@@ -668,19 +668,19 @@ class DictConfigurator(BaseConfigurator):
dfmt = config.get('datefmt', None)
style = config.get('style', '%')
cname = config.get('class', None)
-
+
if not cname:
c = logging.Formatter
else:
c = _resolve(cname)
-
- # A TypeError would be raised if "validate" key is passed in with a formatter callable
- # that does not accept "validate" as a parameter
- if 'validate' in config: # if user hasn't mentioned it, the default will be fine
- result = c(fmt, dfmt, style, config['validate'])
- else:
- result = c(fmt, dfmt, style)
-
+
+ # A TypeError would be raised if "validate" key is passed in with a formatter callable
+ # that does not accept "validate" as a parameter
+ if 'validate' in config: # if user hasn't mentioned it, the default will be fine
+ result = c(fmt, dfmt, style, config['validate'])
+ else:
+ result = c(fmt, dfmt, style)
+
return result
def configure_filter(self, config):
diff --git a/contrib/tools/python3/src/Lib/logging/handlers.py b/contrib/tools/python3/src/Lib/logging/handlers.py
index 572e370110..af0b49ee70 100644
--- a/contrib/tools/python3/src/Lib/logging/handlers.py
+++ b/contrib/tools/python3/src/Lib/logging/handlers.py
@@ -1,4 +1,4 @@
-# Copyright 2001-2021 by Vinay Sajip. All Rights Reserved.
+# Copyright 2001-2021 by Vinay Sajip. All Rights Reserved.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose and without fee is hereby granted,
@@ -18,7 +18,7 @@
Additional handlers for the logging package for Python. The core package is
based on PEP 282 and comments thereto in comp.lang.python.
-Copyright (C) 2001-2021 Vinay Sajip. All Rights Reserved.
+Copyright (C) 2001-2021 Vinay Sajip. All Rights Reserved.
To use, simply 'import logging.handlers' and log away!
"""
@@ -27,7 +27,7 @@ import logging, socket, os, pickle, struct, time, re
from stat import ST_DEV, ST_INO, ST_MTIME
import queue
import threading
-import copy
+import copy
#
# Some constants...
@@ -48,19 +48,19 @@ class BaseRotatingHandler(logging.FileHandler):
Not meant to be instantiated directly. Instead, use RotatingFileHandler
or TimedRotatingFileHandler.
"""
- namer = None
- rotator = None
-
- def __init__(self, filename, mode, encoding=None, delay=False, errors=None):
+ namer = None
+ rotator = None
+
+ def __init__(self, filename, mode, encoding=None, delay=False, errors=None):
"""
Use the specified filename for streamed logging
"""
- logging.FileHandler.__init__(self, filename, mode=mode,
- encoding=encoding, delay=delay,
- errors=errors)
+ logging.FileHandler.__init__(self, filename, mode=mode,
+ encoding=encoding, delay=delay,
+ errors=errors)
self.mode = mode
self.encoding = encoding
- self.errors = errors
+ self.errors = errors
def emit(self, record):
"""
@@ -121,8 +121,8 @@ class RotatingFileHandler(BaseRotatingHandler):
Handler for logging to a set of files, which switches from one file
to the next when the current file reaches a certain size.
"""
- def __init__(self, filename, mode='a', maxBytes=0, backupCount=0,
- encoding=None, delay=False, errors=None):
+ def __init__(self, filename, mode='a', maxBytes=0, backupCount=0,
+ encoding=None, delay=False, errors=None):
"""
Open the specified file and use it as the stream for logging.
@@ -150,8 +150,8 @@ class RotatingFileHandler(BaseRotatingHandler):
# on each run.
if maxBytes > 0:
mode = 'a'
- BaseRotatingHandler.__init__(self, filename, mode, encoding=encoding,
- delay=delay, errors=errors)
+ BaseRotatingHandler.__init__(self, filename, mode, encoding=encoding,
+ delay=delay, errors=errors)
self.maxBytes = maxBytes
self.backupCount = backupCount
@@ -185,17 +185,17 @@ class RotatingFileHandler(BaseRotatingHandler):
Basically, see if the supplied record would cause the file to exceed
the size limit we have.
"""
- # See bpo-45401: Never rollover anything other than regular files
- if os.path.exists(self.baseFilename) and not os.path.isfile(self.baseFilename):
- return False
+ # See bpo-45401: Never rollover anything other than regular files
+ if os.path.exists(self.baseFilename) and not os.path.isfile(self.baseFilename):
+ return False
if self.stream is None: # delay was set...
self.stream = self._open()
if self.maxBytes > 0: # are we rolling over?
msg = "%s\n" % self.format(record)
self.stream.seek(0, 2) #due to non-posix-compliant Windows feature
if self.stream.tell() + len(msg) >= self.maxBytes:
- return True
- return False
+ return True
+ return False
class TimedRotatingFileHandler(BaseRotatingHandler):
"""
@@ -205,11 +205,11 @@ class TimedRotatingFileHandler(BaseRotatingHandler):
If backupCount is > 0, when rollover is done, no more than backupCount
files are kept - the oldest ones are deleted.
"""
- def __init__(self, filename, when='h', interval=1, backupCount=0,
- encoding=None, delay=False, utc=False, atTime=None,
- errors=None):
- BaseRotatingHandler.__init__(self, filename, 'a', encoding=encoding,
- delay=delay, errors=errors)
+ def __init__(self, filename, when='h', interval=1, backupCount=0,
+ encoding=None, delay=False, utc=False, atTime=None,
+ errors=None):
+ BaseRotatingHandler.__init__(self, filename, 'a', encoding=encoding,
+ delay=delay, errors=errors)
self.when = when.upper()
self.backupCount = backupCount
self.utc = utc
@@ -345,13 +345,13 @@ class TimedRotatingFileHandler(BaseRotatingHandler):
record is not used, as we are just comparing times, but it is needed so
the method signatures are the same
"""
- # See bpo-45401: Never rollover anything other than regular files
- if os.path.exists(self.baseFilename) and not os.path.isfile(self.baseFilename):
- return False
+ # See bpo-45401: Never rollover anything other than regular files
+ if os.path.exists(self.baseFilename) and not os.path.isfile(self.baseFilename):
+ return False
t = int(time.time())
if t >= self.rolloverAt:
- return True
- return False
+ return True
+ return False
def getFilesToDelete(self):
"""
@@ -362,32 +362,32 @@ class TimedRotatingFileHandler(BaseRotatingHandler):
dirName, baseName = os.path.split(self.baseFilename)
fileNames = os.listdir(dirName)
result = []
- # See bpo-44753: Don't use the extension when computing the prefix.
- n, e = os.path.splitext(baseName)
- prefix = n + '.'
+ # See bpo-44753: Don't use the extension when computing the prefix.
+ n, e = os.path.splitext(baseName)
+ prefix = n + '.'
plen = len(prefix)
for fileName in fileNames:
- if self.namer is None:
- # Our files will always start with baseName
- if not fileName.startswith(baseName):
- continue
- else:
- # Our files could be just about anything after custom naming, but
- # likely candidates are of the form
- # foo.log.DATETIME_SUFFIX or foo.DATETIME_SUFFIX.log
- if (not fileName.startswith(baseName) and fileName.endswith(e) and
- len(fileName) > (plen + 1) and not fileName[plen+1].isdigit()):
- continue
-
+ if self.namer is None:
+ # Our files will always start with baseName
+ if not fileName.startswith(baseName):
+ continue
+ else:
+ # Our files could be just about anything after custom naming, but
+ # likely candidates are of the form
+ # foo.log.DATETIME_SUFFIX or foo.DATETIME_SUFFIX.log
+ if (not fileName.startswith(baseName) and fileName.endswith(e) and
+ len(fileName) > (plen + 1) and not fileName[plen+1].isdigit()):
+ continue
+
if fileName[:plen] == prefix:
suffix = fileName[plen:]
- # See bpo-45628: The date/time suffix could be anywhere in the
- # filename
- parts = suffix.split('.')
- for part in parts:
- if self.extMatch.match(part):
- result.append(os.path.join(dirName, fileName))
- break
+ # See bpo-45628: The date/time suffix could be anywhere in the
+ # filename
+ parts = suffix.split('.')
+ for part in parts:
+ if self.extMatch.match(part):
+ result.append(os.path.join(dirName, fileName))
+ break
if len(result) < self.backupCount:
result = []
else:
@@ -465,11 +465,11 @@ class WatchedFileHandler(logging.FileHandler):
This handler is based on a suggestion and patch by Chad J.
Schroeder.
"""
- def __init__(self, filename, mode='a', encoding=None, delay=False,
- errors=None):
- logging.FileHandler.__init__(self, filename, mode=mode,
- encoding=encoding, delay=delay,
- errors=errors)
+ def __init__(self, filename, mode='a', encoding=None, delay=False,
+ errors=None):
+ logging.FileHandler.__init__(self, filename, mode=mode,
+ encoding=encoding, delay=delay,
+ errors=errors)
self.dev, self.ino = -1, -1
self._statstream()
@@ -767,10 +767,10 @@ class SysLogHandler(logging.Handler):
LOG_CRON = 9 # clock daemon
LOG_AUTHPRIV = 10 # security/authorization messages (private)
LOG_FTP = 11 # FTP daemon
- LOG_NTP = 12 # NTP subsystem
- LOG_SECURITY = 13 # Log audit
- LOG_CONSOLE = 14 # Log alert
- LOG_SOLCRON = 15 # Scheduling daemon (Solaris)
+ LOG_NTP = 12 # NTP subsystem
+ LOG_SECURITY = 13 # Log audit
+ LOG_CONSOLE = 14 # Log alert
+ LOG_SOLCRON = 15 # Scheduling daemon (Solaris)
# other codes through 15 reserved for system use
LOG_LOCAL0 = 16 # reserved for local use
@@ -798,30 +798,30 @@ class SysLogHandler(logging.Handler):
}
facility_names = {
- "auth": LOG_AUTH,
- "authpriv": LOG_AUTHPRIV,
- "console": LOG_CONSOLE,
- "cron": LOG_CRON,
- "daemon": LOG_DAEMON,
- "ftp": LOG_FTP,
- "kern": LOG_KERN,
- "lpr": LOG_LPR,
- "mail": LOG_MAIL,
- "news": LOG_NEWS,
- "ntp": LOG_NTP,
- "security": LOG_SECURITY,
- "solaris-cron": LOG_SOLCRON,
- "syslog": LOG_SYSLOG,
- "user": LOG_USER,
- "uucp": LOG_UUCP,
- "local0": LOG_LOCAL0,
- "local1": LOG_LOCAL1,
- "local2": LOG_LOCAL2,
- "local3": LOG_LOCAL3,
- "local4": LOG_LOCAL4,
- "local5": LOG_LOCAL5,
- "local6": LOG_LOCAL6,
- "local7": LOG_LOCAL7,
+ "auth": LOG_AUTH,
+ "authpriv": LOG_AUTHPRIV,
+ "console": LOG_CONSOLE,
+ "cron": LOG_CRON,
+ "daemon": LOG_DAEMON,
+ "ftp": LOG_FTP,
+ "kern": LOG_KERN,
+ "lpr": LOG_LPR,
+ "mail": LOG_MAIL,
+ "news": LOG_NEWS,
+ "ntp": LOG_NTP,
+ "security": LOG_SECURITY,
+ "solaris-cron": LOG_SOLCRON,
+ "syslog": LOG_SYSLOG,
+ "user": LOG_USER,
+ "uucp": LOG_UUCP,
+ "local0": LOG_LOCAL0,
+ "local1": LOG_LOCAL1,
+ "local2": LOG_LOCAL2,
+ "local3": LOG_LOCAL3,
+ "local4": LOG_LOCAL4,
+ "local5": LOG_LOCAL5,
+ "local6": LOG_LOCAL6,
+ "local7": LOG_LOCAL7,
}
#The map below appears to be trivially lowercasing the key. However,
@@ -1198,20 +1198,20 @@ class HTTPHandler(logging.Handler):
"""
return record.__dict__
- def getConnection(self, host, secure):
- """
- get a HTTP[S]Connection.
-
- Override when a custom connection is required, for example if
- there is a proxy.
- """
- import http.client
- if secure:
- connection = http.client.HTTPSConnection(host, context=self.context)
- else:
- connection = http.client.HTTPConnection(host)
- return connection
-
+ def getConnection(self, host, secure):
+ """
+ get a HTTP[S]Connection.
+
+ Override when a custom connection is required, for example if
+ there is a proxy.
+ """
+ import http.client
+ if secure:
+ connection = http.client.HTTPSConnection(host, context=self.context)
+ else:
+ connection = http.client.HTTPConnection(host)
+ return connection
+
def emit(self, record):
"""
Emit a record.
@@ -1219,9 +1219,9 @@ class HTTPHandler(logging.Handler):
Send the record to the Web server as a percent-encoded dictionary
"""
try:
- import urllib.parse
+ import urllib.parse
host = self.host
- h = self.getConnection(host, self.secure)
+ h = self.getConnection(host, self.secure)
url = self.url
data = urllib.parse.urlencode(self.mapLogRecord(record))
if self.method == "GET":
@@ -1297,7 +1297,7 @@ class BufferingHandler(logging.Handler):
"""
self.acquire()
try:
- self.buffer.clear()
+ self.buffer.clear()
finally:
self.release()
@@ -1349,11 +1349,11 @@ class MemoryHandler(BufferingHandler):
"""
Set the target handler for this handler.
"""
- self.acquire()
- try:
- self.target = target
- finally:
- self.release()
+ self.acquire()
+ try:
+ self.target = target
+ finally:
+ self.release()
def flush(self):
"""
@@ -1368,7 +1368,7 @@ class MemoryHandler(BufferingHandler):
if self.target:
for record in self.buffer:
self.target.handle(record)
- self.buffer.clear()
+ self.buffer.clear()
finally:
self.release()
@@ -1437,8 +1437,8 @@ class QueueHandler(logging.Handler):
# exc_info and exc_text attributes, as they are no longer
# needed and, if not None, will typically not be pickleable.
msg = self.format(record)
- # bpo-35726: make copy of record to avoid affecting other handlers in the chain.
- record = copy.copy(record)
+ # bpo-35726: make copy of record to avoid affecting other handlers in the chain.
+ record = copy.copy(record)
record.message = msg
record.msg = msg
record.args = None
@@ -1496,7 +1496,7 @@ class QueueListener(object):
t.daemon = True
t.start()
- def prepare(self, record):
+ def prepare(self, record):
"""
Prepare a record for handling.
@@ -1536,8 +1536,8 @@ class QueueListener(object):
try:
record = self.dequeue(True)
if record is self._sentinel:
- if has_task_done:
- q.task_done()
+ if has_task_done:
+ q.task_done()
break
self.handle(record)
if has_task_done:
diff --git a/contrib/tools/python3/src/Lib/lzma.py b/contrib/tools/python3/src/Lib/lzma.py
index 0aa30fe87f..85dc528f65 100644
--- a/contrib/tools/python3/src/Lib/lzma.py
+++ b/contrib/tools/python3/src/Lib/lzma.py
@@ -225,22 +225,22 @@ class LZMAFile(_compression.BaseStream):
"""Write a bytes object to the file.
Returns the number of uncompressed bytes written, which is
- always the length of data in bytes. Note that due to buffering,
- the file on disk may not reflect the data written until close()
- is called.
+ always the length of data in bytes. Note that due to buffering,
+ the file on disk may not reflect the data written until close()
+ is called.
"""
self._check_can_write()
- if isinstance(data, (bytes, bytearray)):
- length = len(data)
- else:
- # accept any data that supports the buffer protocol
- data = memoryview(data)
- length = data.nbytes
-
+ if isinstance(data, (bytes, bytearray)):
+ length = len(data)
+ else:
+ # accept any data that supports the buffer protocol
+ data = memoryview(data)
+ length = data.nbytes
+
compressed = self._compressor.compress(data)
self._fp.write(compressed)
- self._pos += length
- return length
+ self._pos += length
+ return length
def seek(self, offset, whence=io.SEEK_SET):
"""Change the file position.
diff --git a/contrib/tools/python3/src/Lib/mailbox.py b/contrib/tools/python3/src/Lib/mailbox.py
index 70da07ed2e..bbb41d5c0e 100644
--- a/contrib/tools/python3/src/Lib/mailbox.py
+++ b/contrib/tools/python3/src/Lib/mailbox.py
@@ -18,7 +18,7 @@ import email.message
import email.generator
import io
import contextlib
-from types import GenericAlias
+from types import GenericAlias
try:
import fcntl
except ImportError:
@@ -261,9 +261,9 @@ class Mailbox:
else:
raise TypeError('Invalid message type: %s' % type(message))
- __class_getitem__ = classmethod(GenericAlias)
-
+ __class_getitem__ = classmethod(GenericAlias)
+
class Maildir(Mailbox):
"""A qmail-style Maildir mailbox."""
@@ -787,7 +787,7 @@ class _mboxMMDF(_singlefileMailbox):
def get_string(self, key, from_=False):
"""Return a string representation or raise a KeyError."""
return email.message_from_bytes(
- self.get_bytes(key, from_)).as_string(unixfrom=from_)
+ self.get_bytes(key, from_)).as_string(unixfrom=from_)
def get_bytes(self, key, from_=False):
"""Return a string representation or raise a KeyError."""
@@ -2018,9 +2018,9 @@ class _ProxyFile:
return False
return self._file.closed
- __class_getitem__ = classmethod(GenericAlias)
-
+ __class_getitem__ = classmethod(GenericAlias)
+
class _PartialFile(_ProxyFile):
"""A read-only wrapper of part of a file."""
diff --git a/contrib/tools/python3/src/Lib/mailcap.py b/contrib/tools/python3/src/Lib/mailcap.py
index ae416a8e9f..250fdf9a27 100644
--- a/contrib/tools/python3/src/Lib/mailcap.py
+++ b/contrib/tools/python3/src/Lib/mailcap.py
@@ -251,7 +251,7 @@ def test():
else:
print("Executing:", command)
sts = os.system(command)
- sts = os.waitstatus_to_exitcode(sts)
+ sts = os.waitstatus_to_exitcode(sts)
if sts:
print("Exit status:", sts)
diff --git a/contrib/tools/python3/src/Lib/mimetypes.py b/contrib/tools/python3/src/Lib/mimetypes.py
index d36e1664cd..b29e6b7f9d 100644
--- a/contrib/tools/python3/src/Lib/mimetypes.py
+++ b/contrib/tools/python3/src/Lib/mimetypes.py
@@ -66,13 +66,13 @@ class MimeTypes:
def __init__(self, filenames=(), strict=True):
if not inited:
init()
- self.encodings_map = _encodings_map_default.copy()
- self.suffix_map = _suffix_map_default.copy()
+ self.encodings_map = _encodings_map_default.copy()
+ self.suffix_map = _suffix_map_default.copy()
self.types_map = ({}, {}) # dict for (non-strict, strict)
self.types_map_inv = ({}, {})
- for (ext, type) in _types_map_default.items():
+ for (ext, type) in _types_map_default.items():
self.add_type(type, ext, True)
- for (ext, type) in _common_types_default.items():
+ for (ext, type) in _common_types_default.items():
self.add_type(type, ext, False)
for name in filenames:
self.read(name, strict)
@@ -95,7 +95,7 @@ class MimeTypes:
exts.append(ext)
def guess_type(self, url, strict=True):
- """Guess the type of a file which is either a URL or a path-like object.
+ """Guess the type of a file which is either a URL or a path-like object.
Return value is a tuple (type, encoding) where type is None if
the type can't be guessed (no or unknown suffix) or a string
@@ -113,8 +113,8 @@ class MimeTypes:
Optional `strict' argument when False adds a bunch of commonly found,
but non-standard types.
"""
- url = os.fspath(url)
- scheme, url = urllib.parse._splittype(url)
+ url = os.fspath(url)
+ scheme, url = urllib.parse._splittype(url)
if scheme == 'data':
# syntax of data URLs:
# dataurl := "data:" [ mediatype ] [ ";base64" ] "," data
@@ -169,7 +169,7 @@ class MimeTypes:
but non-standard types.
"""
type = type.lower()
- extensions = list(self.types_map_inv[True].get(type, []))
+ extensions = list(self.types_map_inv[True].get(type, []))
if not strict:
for ext in self.types_map_inv[False].get(type, []):
if ext not in extensions:
@@ -346,19 +346,19 @@ def init(files=None):
global suffix_map, types_map, encodings_map, common_types
global inited, _db
inited = True # so that MimeTypes.__init__() doesn't call us again
-
- if files is None or _db is None:
- db = MimeTypes()
+
+ if files is None or _db is None:
+ db = MimeTypes()
if _winreg:
db.read_windows_registry()
-
- if files is None:
- files = knownfiles
- else:
- files = knownfiles + list(files)
- else:
- db = _db
-
+
+ if files is None:
+ files = knownfiles
+ else:
+ files = knownfiles + list(files)
+ else:
+ db = _db
+
for file in files:
if os.path.isfile(file):
db.read(file)
@@ -372,7 +372,7 @@ def init(files=None):
def read_mime_types(file):
try:
- f = open(file, encoding='utf-8')
+ f = open(file, encoding='utf-8')
except OSError:
return None
with f:
@@ -382,12 +382,12 @@ def read_mime_types(file):
def _default_mime_types():
- global suffix_map, _suffix_map_default
- global encodings_map, _encodings_map_default
- global types_map, _types_map_default
- global common_types, _common_types_default
+ global suffix_map, _suffix_map_default
+ global encodings_map, _encodings_map_default
+ global types_map, _types_map_default
+ global common_types, _common_types_default
- suffix_map = _suffix_map_default = {
+ suffix_map = _suffix_map_default = {
'.svgz': '.svg.gz',
'.tgz': '.tar.gz',
'.taz': '.tar.gz',
@@ -396,52 +396,52 @@ def _default_mime_types():
'.txz': '.tar.xz',
}
- encodings_map = _encodings_map_default = {
+ encodings_map = _encodings_map_default = {
'.gz': 'gzip',
'.Z': 'compress',
'.bz2': 'bzip2',
'.xz': 'xz',
- '.br': 'br',
+ '.br': 'br',
}
# Before adding new types, make sure they are either registered with IANA,
# at http://www.iana.org/assignments/media-types
# or extensions, i.e. using the x- prefix
- # If you add to these, please keep them sorted by mime type.
- # Make sure the entry with the preferred file extension for a particular mime type
- # appears before any others of the same mimetype.
- types_map = _types_map_default = {
- '.js' : 'application/javascript',
- '.mjs' : 'application/javascript',
- '.json' : 'application/json',
- '.webmanifest': 'application/manifest+json',
- '.doc' : 'application/msword',
- '.dot' : 'application/msword',
- '.wiz' : 'application/msword',
- '.bin' : 'application/octet-stream',
+ # If you add to these, please keep them sorted by mime type.
+ # Make sure the entry with the preferred file extension for a particular mime type
+ # appears before any others of the same mimetype.
+ types_map = _types_map_default = {
+ '.js' : 'application/javascript',
+ '.mjs' : 'application/javascript',
+ '.json' : 'application/json',
+ '.webmanifest': 'application/manifest+json',
+ '.doc' : 'application/msword',
+ '.dot' : 'application/msword',
+ '.wiz' : 'application/msword',
+ '.bin' : 'application/octet-stream',
'.a' : 'application/octet-stream',
- '.dll' : 'application/octet-stream',
- '.exe' : 'application/octet-stream',
- '.o' : 'application/octet-stream',
- '.obj' : 'application/octet-stream',
- '.so' : 'application/octet-stream',
- '.oda' : 'application/oda',
- '.pdf' : 'application/pdf',
- '.p7c' : 'application/pkcs7-mime',
- '.ps' : 'application/postscript',
+ '.dll' : 'application/octet-stream',
+ '.exe' : 'application/octet-stream',
+ '.o' : 'application/octet-stream',
+ '.obj' : 'application/octet-stream',
+ '.so' : 'application/octet-stream',
+ '.oda' : 'application/oda',
+ '.pdf' : 'application/pdf',
+ '.p7c' : 'application/pkcs7-mime',
+ '.ps' : 'application/postscript',
'.ai' : 'application/postscript',
- '.eps' : 'application/postscript',
- '.m3u' : 'application/vnd.apple.mpegurl',
- '.m3u8' : 'application/vnd.apple.mpegurl',
- '.xls' : 'application/vnd.ms-excel',
- '.xlb' : 'application/vnd.ms-excel',
- '.ppt' : 'application/vnd.ms-powerpoint',
- '.pot' : 'application/vnd.ms-powerpoint',
- '.ppa' : 'application/vnd.ms-powerpoint',
- '.pps' : 'application/vnd.ms-powerpoint',
- '.pwz' : 'application/vnd.ms-powerpoint',
- '.wasm' : 'application/wasm',
+ '.eps' : 'application/postscript',
+ '.m3u' : 'application/vnd.apple.mpegurl',
+ '.m3u8' : 'application/vnd.apple.mpegurl',
+ '.xls' : 'application/vnd.ms-excel',
+ '.xlb' : 'application/vnd.ms-excel',
+ '.ppt' : 'application/vnd.ms-powerpoint',
+ '.pot' : 'application/vnd.ms-powerpoint',
+ '.ppa' : 'application/vnd.ms-powerpoint',
+ '.pps' : 'application/vnd.ms-powerpoint',
+ '.pwz' : 'application/vnd.ms-powerpoint',
+ '.wasm' : 'application/wasm',
'.bcpio' : 'application/x-bcpio',
'.cpio' : 'application/x-cpio',
'.csh' : 'application/x-csh',
@@ -450,16 +450,16 @@ def _default_mime_types():
'.hdf' : 'application/x-hdf',
'.latex' : 'application/x-latex',
'.mif' : 'application/x-mif',
- '.cdf' : 'application/x-netcdf',
+ '.cdf' : 'application/x-netcdf',
'.nc' : 'application/x-netcdf',
'.p12' : 'application/x-pkcs12',
'.pfx' : 'application/x-pkcs12',
- '.ram' : 'application/x-pn-realaudio',
+ '.ram' : 'application/x-pn-realaudio',
'.pyc' : 'application/x-python-code',
'.pyo' : 'application/x-python-code',
'.sh' : 'application/x-sh',
'.shar' : 'application/x-shar',
- '.swf' : 'application/x-shockwave-flash',
+ '.swf' : 'application/x-shockwave-flash',
'.sv4cpio': 'application/x-sv4cpio',
'.sv4crc' : 'application/x-sv4crc',
'.tar' : 'application/x-tar',
@@ -467,104 +467,104 @@ def _default_mime_types():
'.tex' : 'application/x-tex',
'.texi' : 'application/x-texinfo',
'.texinfo': 'application/x-texinfo',
- '.roff' : 'application/x-troff',
- '.t' : 'application/x-troff',
+ '.roff' : 'application/x-troff',
+ '.t' : 'application/x-troff',
'.tr' : 'application/x-troff',
- '.man' : 'application/x-troff-man',
- '.me' : 'application/x-troff-me',
- '.ms' : 'application/x-troff-ms',
+ '.man' : 'application/x-troff-man',
+ '.me' : 'application/x-troff-me',
+ '.ms' : 'application/x-troff-ms',
'.ustar' : 'application/x-ustar',
- '.src' : 'application/x-wais-source',
- '.xsl' : 'application/xml',
- '.rdf' : 'application/xml',
- '.wsdl' : 'application/xml',
- '.xpdl' : 'application/xml',
- '.zip' : 'application/zip',
- '.au' : 'audio/basic',
- '.snd' : 'audio/basic',
- '.mp3' : 'audio/mpeg',
- '.mp2' : 'audio/mpeg',
- '.aif' : 'audio/x-aiff',
- '.aifc' : 'audio/x-aiff',
- '.aiff' : 'audio/x-aiff',
- '.ra' : 'audio/x-pn-realaudio',
+ '.src' : 'application/x-wais-source',
+ '.xsl' : 'application/xml',
+ '.rdf' : 'application/xml',
+ '.wsdl' : 'application/xml',
+ '.xpdl' : 'application/xml',
+ '.zip' : 'application/zip',
+ '.au' : 'audio/basic',
+ '.snd' : 'audio/basic',
+ '.mp3' : 'audio/mpeg',
+ '.mp2' : 'audio/mpeg',
+ '.aif' : 'audio/x-aiff',
+ '.aifc' : 'audio/x-aiff',
+ '.aiff' : 'audio/x-aiff',
+ '.ra' : 'audio/x-pn-realaudio',
'.wav' : 'audio/x-wav',
- '.bmp' : 'image/bmp',
- '.gif' : 'image/gif',
- '.ief' : 'image/ief',
- '.jpg' : 'image/jpeg',
- '.jpe' : 'image/jpeg',
- '.jpeg' : 'image/jpeg',
- '.png' : 'image/png',
- '.svg' : 'image/svg+xml',
- '.tiff' : 'image/tiff',
- '.tif' : 'image/tiff',
- '.ico' : 'image/vnd.microsoft.icon',
- '.ras' : 'image/x-cmu-raster',
- '.bmp' : 'image/x-ms-bmp',
- '.pnm' : 'image/x-portable-anymap',
- '.pbm' : 'image/x-portable-bitmap',
- '.pgm' : 'image/x-portable-graymap',
- '.ppm' : 'image/x-portable-pixmap',
- '.rgb' : 'image/x-rgb',
+ '.bmp' : 'image/bmp',
+ '.gif' : 'image/gif',
+ '.ief' : 'image/ief',
+ '.jpg' : 'image/jpeg',
+ '.jpe' : 'image/jpeg',
+ '.jpeg' : 'image/jpeg',
+ '.png' : 'image/png',
+ '.svg' : 'image/svg+xml',
+ '.tiff' : 'image/tiff',
+ '.tif' : 'image/tiff',
+ '.ico' : 'image/vnd.microsoft.icon',
+ '.ras' : 'image/x-cmu-raster',
+ '.bmp' : 'image/x-ms-bmp',
+ '.pnm' : 'image/x-portable-anymap',
+ '.pbm' : 'image/x-portable-bitmap',
+ '.pgm' : 'image/x-portable-graymap',
+ '.ppm' : 'image/x-portable-pixmap',
+ '.rgb' : 'image/x-rgb',
'.xbm' : 'image/x-xbitmap',
'.xpm' : 'image/x-xpixmap',
'.xwd' : 'image/x-xwindowdump',
- '.eml' : 'message/rfc822',
- '.mht' : 'message/rfc822',
- '.mhtml' : 'message/rfc822',
- '.nws' : 'message/rfc822',
- '.css' : 'text/css',
- '.csv' : 'text/csv',
- '.html' : 'text/html',
- '.htm' : 'text/html',
- '.txt' : 'text/plain',
- '.bat' : 'text/plain',
- '.c' : 'text/plain',
- '.h' : 'text/plain',
- '.ksh' : 'text/plain',
- '.pl' : 'text/plain',
- '.rtx' : 'text/richtext',
- '.tsv' : 'text/tab-separated-values',
- '.py' : 'text/x-python',
- '.etx' : 'text/x-setext',
- '.sgm' : 'text/x-sgml',
- '.sgml' : 'text/x-sgml',
- '.vcf' : 'text/x-vcard',
- '.xml' : 'text/xml',
- '.mp4' : 'video/mp4',
- '.mpeg' : 'video/mpeg',
- '.m1v' : 'video/mpeg',
- '.mpa' : 'video/mpeg',
- '.mpe' : 'video/mpeg',
- '.mpg' : 'video/mpeg',
- '.mov' : 'video/quicktime',
- '.qt' : 'video/quicktime',
- '.webm' : 'video/webm',
- '.avi' : 'video/x-msvideo',
- '.movie' : 'video/x-sgi-movie',
+ '.eml' : 'message/rfc822',
+ '.mht' : 'message/rfc822',
+ '.mhtml' : 'message/rfc822',
+ '.nws' : 'message/rfc822',
+ '.css' : 'text/css',
+ '.csv' : 'text/csv',
+ '.html' : 'text/html',
+ '.htm' : 'text/html',
+ '.txt' : 'text/plain',
+ '.bat' : 'text/plain',
+ '.c' : 'text/plain',
+ '.h' : 'text/plain',
+ '.ksh' : 'text/plain',
+ '.pl' : 'text/plain',
+ '.rtx' : 'text/richtext',
+ '.tsv' : 'text/tab-separated-values',
+ '.py' : 'text/x-python',
+ '.etx' : 'text/x-setext',
+ '.sgm' : 'text/x-sgml',
+ '.sgml' : 'text/x-sgml',
+ '.vcf' : 'text/x-vcard',
+ '.xml' : 'text/xml',
+ '.mp4' : 'video/mp4',
+ '.mpeg' : 'video/mpeg',
+ '.m1v' : 'video/mpeg',
+ '.mpa' : 'video/mpeg',
+ '.mpe' : 'video/mpeg',
+ '.mpg' : 'video/mpeg',
+ '.mov' : 'video/quicktime',
+ '.qt' : 'video/quicktime',
+ '.webm' : 'video/webm',
+ '.avi' : 'video/x-msvideo',
+ '.movie' : 'video/x-sgi-movie',
}
# These are non-standard types, commonly found in the wild. They will
# only match if strict=0 flag is given to the API methods.
# Please sort these too
- common_types = _common_types_default = {
- '.rtf' : 'application/rtf',
- '.midi': 'audio/midi',
- '.mid' : 'audio/midi',
+ common_types = _common_types_default = {
+ '.rtf' : 'application/rtf',
+ '.midi': 'audio/midi',
+ '.mid' : 'audio/midi',
'.jpg' : 'image/jpg',
- '.pict': 'image/pict',
+ '.pict': 'image/pict',
'.pct' : 'image/pict',
'.pic' : 'image/pict',
- '.xul' : 'text/xul',
+ '.xul' : 'text/xul',
}
_default_mime_types()
-def _main():
+def _main():
import getopt
USAGE = """\
@@ -608,7 +608,7 @@ More than one type argument may be given.
guess, encoding = guess_type(gtype, strict)
if not guess: print("I don't know anything about type", gtype)
else: print('type:', guess, 'encoding:', encoding)
-
-
-if __name__ == '__main__':
- _main()
+
+
+if __name__ == '__main__':
+ _main()
diff --git a/contrib/tools/python3/src/Lib/modulefinder.py b/contrib/tools/python3/src/Lib/modulefinder.py
index cb455f40c4..4da51252f3 100644
--- a/contrib/tools/python3/src/Lib/modulefinder.py
+++ b/contrib/tools/python3/src/Lib/modulefinder.py
@@ -5,10 +5,10 @@ import importlib._bootstrap_external
import importlib.machinery
import marshal
import os
-import io
+import io
import sys
-
+
LOAD_CONST = dis.opmap['LOAD_CONST']
IMPORT_NAME = dis.opmap['IMPORT_NAME']
STORE_NAME = dis.opmap['STORE_NAME']
@@ -16,16 +16,16 @@ STORE_GLOBAL = dis.opmap['STORE_GLOBAL']
STORE_OPS = STORE_NAME, STORE_GLOBAL
EXTENDED_ARG = dis.EXTENDED_ARG
-# Old imp constants:
-
-_SEARCH_ERROR = 0
-_PY_SOURCE = 1
-_PY_COMPILED = 2
-_C_EXTENSION = 3
-_PKG_DIRECTORY = 5
-_C_BUILTIN = 6
-_PY_FROZEN = 7
-
+# Old imp constants:
+
+_SEARCH_ERROR = 0
+_PY_SOURCE = 1
+_PY_COMPILED = 2
+_C_EXTENSION = 3
+_PKG_DIRECTORY = 5
+_C_BUILTIN = 6
+_PY_FROZEN = 7
+
# Modulefinder does a good job at simulating Python's, but it can not
# handle __path__ modifications packages make at runtime. Therefore there
# is a mechanism whereby you can register extra paths in this map for a
@@ -50,51 +50,51 @@ def ReplacePackage(oldname, newname):
replacePackageMap[oldname] = newname
-def _find_module(name, path=None):
- """An importlib reimplementation of imp.find_module (for our purposes)."""
-
- # It's necessary to clear the caches for our Finder first, in case any
- # modules are being added/deleted/modified at runtime. In particular,
- # test_modulefinder.py changes file tree contents in a cache-breaking way:
-
- importlib.machinery.PathFinder.invalidate_caches()
-
- spec = importlib.machinery.PathFinder.find_spec(name, path)
-
- if spec is None:
- raise ImportError("No module named {name!r}".format(name=name), name=name)
-
- # Some special cases:
-
- if spec.loader is importlib.machinery.BuiltinImporter:
- return None, None, ("", "", _C_BUILTIN)
-
- if spec.loader is importlib.machinery.FrozenImporter:
- return None, None, ("", "", _PY_FROZEN)
-
- file_path = spec.origin
-
- if spec.loader.is_package(name):
- return None, os.path.dirname(file_path), ("", "", _PKG_DIRECTORY)
-
- if isinstance(spec.loader, importlib.machinery.SourceFileLoader):
- kind = _PY_SOURCE
-
- elif isinstance(spec.loader, importlib.machinery.ExtensionFileLoader):
- kind = _C_EXTENSION
-
- elif isinstance(spec.loader, importlib.machinery.SourcelessFileLoader):
- kind = _PY_COMPILED
-
- else: # Should never happen.
- return None, None, ("", "", _SEARCH_ERROR)
-
- file = io.open_code(file_path)
- suffix = os.path.splitext(file_path)[-1]
-
- return file, file_path, (suffix, "rb", kind)
-
-
+def _find_module(name, path=None):
+ """An importlib reimplementation of imp.find_module (for our purposes)."""
+
+ # It's necessary to clear the caches for our Finder first, in case any
+ # modules are being added/deleted/modified at runtime. In particular,
+ # test_modulefinder.py changes file tree contents in a cache-breaking way:
+
+ importlib.machinery.PathFinder.invalidate_caches()
+
+ spec = importlib.machinery.PathFinder.find_spec(name, path)
+
+ if spec is None:
+ raise ImportError("No module named {name!r}".format(name=name), name=name)
+
+ # Some special cases:
+
+ if spec.loader is importlib.machinery.BuiltinImporter:
+ return None, None, ("", "", _C_BUILTIN)
+
+ if spec.loader is importlib.machinery.FrozenImporter:
+ return None, None, ("", "", _PY_FROZEN)
+
+ file_path = spec.origin
+
+ if spec.loader.is_package(name):
+ return None, os.path.dirname(file_path), ("", "", _PKG_DIRECTORY)
+
+ if isinstance(spec.loader, importlib.machinery.SourceFileLoader):
+ kind = _PY_SOURCE
+
+ elif isinstance(spec.loader, importlib.machinery.ExtensionFileLoader):
+ kind = _C_EXTENSION
+
+ elif isinstance(spec.loader, importlib.machinery.SourcelessFileLoader):
+ kind = _PY_COMPILED
+
+ else: # Should never happen.
+ return None, None, ("", "", _SEARCH_ERROR)
+
+ file = io.open_code(file_path)
+ suffix = os.path.splitext(file_path)[-1]
+
+ return file, file_path, (suffix, "rb", kind)
+
+
class Module:
def __init__(self, name, file=None, path=None):
@@ -121,7 +121,7 @@ class Module:
class ModuleFinder:
- def __init__(self, path=None, debug=0, excludes=None, replace_paths=None):
+ def __init__(self, path=None, debug=0, excludes=None, replace_paths=None):
if path is None:
path = sys.path
self.path = path
@@ -129,8 +129,8 @@ class ModuleFinder:
self.badmodules = {}
self.debug = debug
self.indent = 0
- self.excludes = excludes if excludes is not None else []
- self.replace_paths = replace_paths if replace_paths is not None else []
+ self.excludes = excludes if excludes is not None else []
+ self.replace_paths = replace_paths if replace_paths is not None else []
self.processed_paths = [] # Used in debugging only
def msg(self, level, str, *args):
@@ -156,15 +156,15 @@ class ModuleFinder:
def run_script(self, pathname):
self.msg(2, "run_script", pathname)
- with io.open_code(pathname) as fp:
- stuff = ("", "rb", _PY_SOURCE)
+ with io.open_code(pathname) as fp:
+ stuff = ("", "rb", _PY_SOURCE)
self.load_module('__main__', fp, pathname, stuff)
def load_file(self, pathname):
dir, name = os.path.split(pathname)
name, ext = os.path.splitext(name)
- with io.open_code(pathname) as fp:
- stuff = (ext, "rb", _PY_SOURCE)
+ with io.open_code(pathname) as fp:
+ stuff = (ext, "rb", _PY_SOURCE)
self.load_module(name, fp, pathname, stuff)
def import_hook(self, name, caller=None, fromlist=None, level=-1):
@@ -318,7 +318,7 @@ class ModuleFinder:
except ImportError:
self.msgout(3, "import_module ->", None)
return None
-
+
try:
m = self.load_module(fqname, fp, pathname, stuff)
finally:
@@ -332,13 +332,13 @@ class ModuleFinder:
def load_module(self, fqname, fp, pathname, file_info):
suffix, mode, type = file_info
self.msgin(2, "load_module", fqname, fp and "fp", pathname)
- if type == _PKG_DIRECTORY:
+ if type == _PKG_DIRECTORY:
m = self.load_package(fqname, pathname)
self.msgout(2, "load_module ->", m)
return m
- if type == _PY_SOURCE:
- co = compile(fp.read(), pathname, 'exec')
- elif type == _PY_COMPILED:
+ if type == _PY_SOURCE:
+ co = compile(fp.read(), pathname, 'exec')
+ elif type == _PY_COMPILED:
try:
data = fp.read()
importlib._bootstrap_external._classify_pyc(data, fqname, {})
@@ -376,15 +376,15 @@ class ModuleFinder:
except ImportError as msg:
self.msg(2, "ImportError:", str(msg))
self._add_badmodule(name, caller)
- except SyntaxError as msg:
- self.msg(2, "SyntaxError:", str(msg))
- self._add_badmodule(name, caller)
+ except SyntaxError as msg:
+ self.msg(2, "SyntaxError:", str(msg))
+ self._add_badmodule(name, caller)
else:
if fromlist:
for sub in fromlist:
- fullname = name + "." + sub
- if fullname in self.badmodules:
- self._add_badmodule(fullname, caller)
+ fullname = name + "." + sub
+ if fullname in self.badmodules:
+ self._add_badmodule(fullname, caller)
continue
try:
self.import_hook(name, caller, [sub], level=level)
@@ -501,12 +501,12 @@ class ModuleFinder:
if path is None:
if name in sys.builtin_module_names:
- return (None, None, ("", "", _C_BUILTIN))
+ return (None, None, ("", "", _C_BUILTIN))
path = self.path
- return _find_module(name, path)
-
+ return _find_module(name, path)
+
def report(self):
"""Print a report to stdout, listing the found modules with their
paths, as well as modules that are missing, or seem to be missing.
@@ -616,7 +616,7 @@ class ModuleFinder:
if isinstance(consts[i], type(co)):
consts[i] = self.replace_paths_in_code(consts[i])
- return co.replace(co_consts=tuple(consts), co_filename=new_filename)
+ return co.replace(co_consts=tuple(consts), co_filename=new_filename)
def test():
diff --git a/contrib/tools/python3/src/Lib/msilib/__init__.py b/contrib/tools/python3/src/Lib/msilib/__init__.py
index 0e85aa2896..83ffe6b575 100644
--- a/contrib/tools/python3/src/Lib/msilib/__init__.py
+++ b/contrib/tools/python3/src/Lib/msilib/__init__.py
@@ -116,7 +116,7 @@ def add_data(db, table, values):
raise TypeError("Unsupported type %s" % field.__class__.__name__)
try:
v.Modify(MSIMODIFY_INSERT, r)
- except Exception:
+ except Exception:
raise MSIError("Could not insert "+repr(values)+" into "+table)
r.ClearData()
@@ -273,7 +273,7 @@ class Directory:
if AMD64:
flags |= 256
if keyfile:
- keyid = self.cab.gen_id(keyfile)
+ keyid = self.cab.gen_id(keyfile)
self.keyfiles[keyfile] = keyid
else:
keyid = None
diff --git a/contrib/tools/python3/src/Lib/msilib/schema.py b/contrib/tools/python3/src/Lib/msilib/schema.py
index 9f5745cfb1..b49ac16564 100644
--- a/contrib/tools/python3/src/Lib/msilib/schema.py
+++ b/contrib/tools/python3/src/Lib/msilib/schema.py
@@ -664,7 +664,7 @@ _Validation_records = [
('Class','Component_','N',None, None, 'Component',1,'Identifier',None, 'Required foreign key into the Component Table, specifying the component for which to return a path when called through LocateComponent.',),
('Class','Context','N',None, None, None, None, 'Identifier',None, 'The numeric server context for this server. CLSCTX_xxxx',),
('Class','DefInprocHandler','Y',None, None, None, None, 'Filename','1;2;3','Optional default inproc handler. Only optionally provided if Context=CLSCTX_LOCAL_SERVER. Typically "ole32.dll" or "mapi32.dll"',),
-('Class','FileTypeMask','Y',None, None, None, None, 'Text',None, 'Optional string containing information for the HKCRthis CLSID key. If multiple patterns exist, they must be delimited by a semicolon, and numeric subkeys will be generated: 0,1,2...',),
+('Class','FileTypeMask','Y',None, None, None, None, 'Text',None, 'Optional string containing information for the HKCRthis CLSID key. If multiple patterns exist, they must be delimited by a semicolon, and numeric subkeys will be generated: 0,1,2...',),
('Class','Icon_','Y',None, None, 'Icon',1,'Identifier',None, 'Optional foreign key into the Icon Table, specifying the icon file associated with this CLSID. Will be written under the DefaultIcon key.',),
('Class','IconIndex','Y',-32767,32767,None, None, None, None, 'Optional icon index.',),
('Class','ProgId_Default','Y',None, None, 'ProgId',1,'Text',None, 'Optional ProgId associated with this CLSID.',),
diff --git a/contrib/tools/python3/src/Lib/multiprocessing/__init__.py b/contrib/tools/python3/src/Lib/multiprocessing/__init__.py
index 8336f381de..d412235024 100644
--- a/contrib/tools/python3/src/Lib/multiprocessing/__init__.py
+++ b/contrib/tools/python3/src/Lib/multiprocessing/__init__.py
@@ -19,8 +19,8 @@ from . import context
# Copy stuff from default context
#
-__all__ = [x for x in dir(context._default_context) if not x.startswith('_')]
-globals().update((name, getattr(context._default_context, name)) for name in __all__)
+__all__ = [x for x in dir(context._default_context) if not x.startswith('_')]
+globals().update((name, getattr(context._default_context, name)) for name in __all__)
#
# XXX These should not really be documented or public.
diff --git a/contrib/tools/python3/src/Lib/multiprocessing/connection.py b/contrib/tools/python3/src/Lib/multiprocessing/connection.py
index 510e4b5aba..850c317da5 100644
--- a/contrib/tools/python3/src/Lib/multiprocessing/connection.py
+++ b/contrib/tools/python3/src/Lib/multiprocessing/connection.py
@@ -73,11 +73,11 @@ def arbitrary_address(family):
if family == 'AF_INET':
return ('localhost', 0)
elif family == 'AF_UNIX':
- # Prefer abstract sockets if possible to avoid problems with the address
- # size. When coding portable applications, some implementations have
- # sun_path as short as 92 bytes in the sockaddr_un struct.
- if util.abstract_sockets_supported:
- return f"\0listener-{os.getpid()}-{next(_mmap_counter)}"
+ # Prefer abstract sockets if possible to avoid problems with the address
+ # size. When coding portable applications, some implementations have
+ # sun_path as short as 92 bytes in the sockaddr_un struct.
+ if util.abstract_sockets_supported:
+ return f"\0listener-{os.getpid()}-{next(_mmap_counter)}"
return tempfile.mktemp(prefix='listener-', dir=util.get_temp_dir())
elif family == 'AF_PIPE':
return tempfile.mktemp(prefix=r'\\.\pipe\pyc-%d-%d-' %
@@ -107,7 +107,7 @@ def address_type(address):
return 'AF_INET'
elif type(address) is str and address.startswith('\\\\'):
return 'AF_PIPE'
- elif type(address) is str or util.is_abstract_socket_namespace(address):
+ elif type(address) is str or util.is_abstract_socket_namespace(address):
return 'AF_UNIX'
else:
raise ValueError('address type of %r unrecognized' % address)
@@ -394,33 +394,33 @@ class Connection(_ConnectionBase):
def _send_bytes(self, buf):
n = len(buf)
- if n > 0x7fffffff:
- pre_header = struct.pack("!i", -1)
- header = struct.pack("!Q", n)
- self._send(pre_header)
+ if n > 0x7fffffff:
+ pre_header = struct.pack("!i", -1)
+ header = struct.pack("!Q", n)
+ self._send(pre_header)
self._send(header)
self._send(buf)
else:
- # For wire compatibility with 3.7 and lower
- header = struct.pack("!i", n)
- if n > 16384:
- # The payload is large so Nagle's algorithm won't be triggered
- # and we'd better avoid the cost of concatenation.
- self._send(header)
- self._send(buf)
- else:
- # Issue #20540: concatenate before sending, to avoid delays due
- # to Nagle's algorithm on a TCP socket.
- # Also note we want to avoid sending a 0-length buffer separately,
- # to avoid "broken pipe" errors if the other end closed the pipe.
- self._send(header + buf)
+ # For wire compatibility with 3.7 and lower
+ header = struct.pack("!i", n)
+ if n > 16384:
+ # The payload is large so Nagle's algorithm won't be triggered
+ # and we'd better avoid the cost of concatenation.
+ self._send(header)
+ self._send(buf)
+ else:
+ # Issue #20540: concatenate before sending, to avoid delays due
+ # to Nagle's algorithm on a TCP socket.
+ # Also note we want to avoid sending a 0-length buffer separately,
+ # to avoid "broken pipe" errors if the other end closed the pipe.
+ self._send(header + buf)
def _recv_bytes(self, maxsize=None):
buf = self._recv(4)
size, = struct.unpack("!i", buf.getvalue())
- if size == -1:
- buf = self._recv(8)
- size, = struct.unpack("!Q", buf.getvalue())
+ if size == -1:
+ buf = self._recv(8)
+ size, = struct.unpack("!Q", buf.getvalue())
if maxsize is not None and size > maxsize:
return None
return self._recv(size)
@@ -602,8 +602,8 @@ class SocketListener(object):
self._family = family
self._last_accepted = None
- if family == 'AF_UNIX' and not util.is_abstract_socket_namespace(address):
- # Linux abstract socket namespaces do not need to be explicitly unlinked
+ if family == 'AF_UNIX' and not util.is_abstract_socket_namespace(address):
+ # Linux abstract socket namespaces do not need to be explicitly unlinked
self._unlink = util.Finalize(
self, os.unlink, args=(address,), exitpriority=0
)
diff --git a/contrib/tools/python3/src/Lib/multiprocessing/context.py b/contrib/tools/python3/src/Lib/multiprocessing/context.py
index 8d0525d5d6..eb1917bcbf 100644
--- a/contrib/tools/python3/src/Lib/multiprocessing/context.py
+++ b/contrib/tools/python3/src/Lib/multiprocessing/context.py
@@ -5,7 +5,7 @@ import threading
from . import process
from . import reduction
-__all__ = ()
+__all__ = ()
#
# Exceptions
@@ -24,7 +24,7 @@ class AuthenticationError(ProcessError):
pass
#
-# Base type for contexts. Bound methods of an instance of this type are included in __all__ of __init__.py
+# Base type for contexts. Bound methods of an instance of this type are included in __all__ of __init__.py
#
class BaseContext(object):
@@ -35,7 +35,7 @@ class BaseContext(object):
AuthenticationError = AuthenticationError
current_process = staticmethod(process.current_process)
- parent_process = staticmethod(process.parent_process)
+ parent_process = staticmethod(process.parent_process)
active_children = staticmethod(process.active_children)
def cpu_count(self):
@@ -257,10 +257,10 @@ class DefaultContext(BaseContext):
if sys.platform == 'win32':
return ['spawn']
else:
- methods = ['spawn', 'fork'] if sys.platform == 'darwin' else ['fork', 'spawn']
+ methods = ['spawn', 'fork'] if sys.platform == 'darwin' else ['fork', 'spawn']
if reduction.HAVE_SEND_HANDLE:
- methods.append('forkserver')
- return methods
+ methods.append('forkserver')
+ return methods
#
@@ -310,12 +310,12 @@ if sys.platform != 'win32':
'spawn': SpawnContext(),
'forkserver': ForkServerContext(),
}
- if sys.platform == 'darwin':
- # bpo-33725: running arbitrary code after fork() is no longer reliable
- # on macOS since macOS 10.14 (Mojave). Use spawn by default instead.
- _default_context = DefaultContext(_concrete_contexts['spawn'])
- else:
- _default_context = DefaultContext(_concrete_contexts['fork'])
+ if sys.platform == 'darwin':
+ # bpo-33725: running arbitrary code after fork() is no longer reliable
+ # on macOS since macOS 10.14 (Mojave). Use spawn by default instead.
+ _default_context = DefaultContext(_concrete_contexts['spawn'])
+ else:
+ _default_context = DefaultContext(_concrete_contexts['fork'])
else:
diff --git a/contrib/tools/python3/src/Lib/multiprocessing/dummy/__init__.py b/contrib/tools/python3/src/Lib/multiprocessing/dummy/__init__.py
index 6a1468609e..1ecf8376d3 100644
--- a/contrib/tools/python3/src/Lib/multiprocessing/dummy/__init__.py
+++ b/contrib/tools/python3/src/Lib/multiprocessing/dummy/__init__.py
@@ -80,7 +80,7 @@ def freeze_support():
#
class Namespace(object):
- def __init__(self, /, **kwds):
+ def __init__(self, /, **kwds):
self.__dict__.update(kwds)
def __repr__(self):
items = list(self.__dict__.items())
diff --git a/contrib/tools/python3/src/Lib/multiprocessing/forkserver.py b/contrib/tools/python3/src/Lib/multiprocessing/forkserver.py
index 22a911a7a2..45193a1235 100644
--- a/contrib/tools/python3/src/Lib/multiprocessing/forkserver.py
+++ b/contrib/tools/python3/src/Lib/multiprocessing/forkserver.py
@@ -11,7 +11,7 @@ import warnings
from . import connection
from . import process
from .context import reduction
-from . import resource_tracker
+from . import resource_tracker
from . import spawn
from . import util
@@ -39,26 +39,26 @@ class ForkServer(object):
self._lock = threading.Lock()
self._preload_modules = ['__main__']
- def _stop(self):
- # Method used by unit tests to stop the server
- with self._lock:
- self._stop_unlocked()
-
- def _stop_unlocked(self):
- if self._forkserver_pid is None:
- return
-
- # close the "alive" file descriptor asks the server to stop
- os.close(self._forkserver_alive_fd)
- self._forkserver_alive_fd = None
-
- os.waitpid(self._forkserver_pid, 0)
- self._forkserver_pid = None
-
- if not util.is_abstract_socket_namespace(self._forkserver_address):
- os.unlink(self._forkserver_address)
- self._forkserver_address = None
-
+ def _stop(self):
+ # Method used by unit tests to stop the server
+ with self._lock:
+ self._stop_unlocked()
+
+ def _stop_unlocked(self):
+ if self._forkserver_pid is None:
+ return
+
+ # close the "alive" file descriptor asks the server to stop
+ os.close(self._forkserver_alive_fd)
+ self._forkserver_alive_fd = None
+
+ os.waitpid(self._forkserver_pid, 0)
+ self._forkserver_pid = None
+
+ if not util.is_abstract_socket_namespace(self._forkserver_address):
+ os.unlink(self._forkserver_address)
+ self._forkserver_address = None
+
def set_forkserver_preload(self, modules_names):
'''Set list of module names to try to load in forkserver process.'''
if not all(type(mod) is str for mod in self._preload_modules):
@@ -89,7 +89,7 @@ class ForkServer(object):
parent_r, child_w = os.pipe()
child_r, parent_w = os.pipe()
allfds = [child_r, child_w, self._forkserver_alive_fd,
- resource_tracker.getfd()]
+ resource_tracker.getfd()]
allfds += fds
try:
reduction.sendfds(client, allfds)
@@ -110,7 +110,7 @@ class ForkServer(object):
ensure_running() will do nothing.
'''
with self._lock:
- resource_tracker.ensure_running()
+ resource_tracker.ensure_running()
if self._forkserver_pid is not None:
# forkserver was launched before, is it still running?
pid, status = os.waitpid(self._forkserver_pid, os.WNOHANG)
@@ -136,8 +136,8 @@ class ForkServer(object):
with socket.socket(socket.AF_UNIX) as listener:
address = connection.arbitrary_address('AF_UNIX')
listener.bind(address)
- if not util.is_abstract_socket_namespace(address):
- os.chmod(address, 0o600)
+ if not util.is_abstract_socket_namespace(address):
+ os.chmod(address, 0o600)
listener.listen()
# all client processes own the write end of the "alive" pipe;
@@ -237,8 +237,8 @@ def main(listener_fd, alive_r, preload, main_path=None, sys_path=None):
break
child_w = pid_to_fd.pop(pid, None)
if child_w is not None:
- returncode = os.waitstatus_to_exitcode(sts)
-
+ returncode = os.waitstatus_to_exitcode(sts)
+
# Send exit code to client process
try:
write_signed(child_w, returncode)
@@ -305,12 +305,12 @@ def _serve_one(child_r, fds, unused_fds, handlers):
os.close(fd)
(_forkserver._forkserver_alive_fd,
- resource_tracker._resource_tracker._fd,
+ resource_tracker._resource_tracker._fd,
*_forkserver._inherited_fds) = fds
# Run process object received over pipe
- parent_sentinel = os.dup(child_r)
- code = spawn._main(child_r, parent_sentinel)
+ parent_sentinel = os.dup(child_r)
+ code = spawn._main(child_r, parent_sentinel)
return code
diff --git a/contrib/tools/python3/src/Lib/multiprocessing/heap.py b/contrib/tools/python3/src/Lib/multiprocessing/heap.py
index 6217dfe126..909c1e1df9 100644
--- a/contrib/tools/python3/src/Lib/multiprocessing/heap.py
+++ b/contrib/tools/python3/src/Lib/multiprocessing/heap.py
@@ -8,7 +8,7 @@
#
import bisect
-from collections import defaultdict
+from collections import defaultdict
import mmap
import os
import sys
@@ -29,9 +29,9 @@ if sys.platform == 'win32':
import _winapi
class Arena(object):
- """
- A shared memory area backed by anonymous memory (Windows).
- """
+ """
+ A shared memory area backed by anonymous memory (Windows).
+ """
_rand = tempfile._RandomNameSequence()
@@ -56,7 +56,7 @@ if sys.platform == 'win32':
def __setstate__(self, state):
self.size, self.name = self._state = state
- # Reopen existing mmap
+ # Reopen existing mmap
self.buffer = mmap.mmap(-1, self.size, tagname=self.name)
# XXX Temporarily preventing buildbot failures while determining
# XXX the correct long-term fix. See issue 23060
@@ -65,10 +65,10 @@ if sys.platform == 'win32':
else:
class Arena(object):
- """
- A shared memory area backed by a temporary file (POSIX).
- """
-
+ """
+ A shared memory area backed by a temporary file (POSIX).
+ """
+
if sys.platform == 'linux':
_dir_candidates = ['/dev/shm']
else:
@@ -78,8 +78,8 @@ else:
self.size = size
self.fd = fd
if fd == -1:
- # Arena is created anew (if fd != -1, it means we're coming
- # from rebuild_arena() below)
+ # Arena is created anew (if fd != -1, it means we're coming
+ # from rebuild_arena() below)
self.fd, name = tempfile.mkstemp(
prefix='pym-%d-'%os.getpid(),
dir=self._choose_dir(size))
@@ -114,82 +114,82 @@ else:
class Heap(object):
- # Minimum malloc() alignment
+ # Minimum malloc() alignment
_alignment = 8
- _DISCARD_FREE_SPACE_LARGER_THAN = 4 * 1024 ** 2 # 4 MB
- _DOUBLE_ARENA_SIZE_UNTIL = 4 * 1024 ** 2
-
+ _DISCARD_FREE_SPACE_LARGER_THAN = 4 * 1024 ** 2 # 4 MB
+ _DOUBLE_ARENA_SIZE_UNTIL = 4 * 1024 ** 2
+
def __init__(self, size=mmap.PAGESIZE):
self._lastpid = os.getpid()
self._lock = threading.Lock()
- # Current arena allocation size
+ # Current arena allocation size
self._size = size
- # A sorted list of available block sizes in arenas
+ # A sorted list of available block sizes in arenas
self._lengths = []
-
- # Free block management:
- # - map each block size to a list of `(Arena, start, stop)` blocks
+
+ # Free block management:
+ # - map each block size to a list of `(Arena, start, stop)` blocks
self._len_to_seq = {}
- # - map `(Arena, start)` tuple to the `(Arena, start, stop)` block
- # starting at that offset
+ # - map `(Arena, start)` tuple to the `(Arena, start, stop)` block
+ # starting at that offset
self._start_to_block = {}
- # - map `(Arena, stop)` tuple to the `(Arena, start, stop)` block
- # ending at that offset
+ # - map `(Arena, stop)` tuple to the `(Arena, start, stop)` block
+ # ending at that offset
self._stop_to_block = {}
-
- # Map arenas to their `(Arena, start, stop)` blocks in use
- self._allocated_blocks = defaultdict(set)
+
+ # Map arenas to their `(Arena, start, stop)` blocks in use
+ self._allocated_blocks = defaultdict(set)
self._arenas = []
-
- # List of pending blocks to free - see comment in free() below
+
+ # List of pending blocks to free - see comment in free() below
self._pending_free_blocks = []
- # Statistics
- self._n_mallocs = 0
- self._n_frees = 0
-
+ # Statistics
+ self._n_mallocs = 0
+ self._n_frees = 0
+
@staticmethod
def _roundup(n, alignment):
# alignment must be a power of 2
mask = alignment - 1
return (n + mask) & ~mask
- def _new_arena(self, size):
- # Create a new arena with at least the given *size*
- length = self._roundup(max(self._size, size), mmap.PAGESIZE)
- # We carve larger and larger arenas, for efficiency, until we
- # reach a large-ish size (roughly L3 cache-sized)
- if self._size < self._DOUBLE_ARENA_SIZE_UNTIL:
- self._size *= 2
- util.info('allocating a new mmap of length %d', length)
- arena = Arena(length)
- self._arenas.append(arena)
- return (arena, 0, length)
-
- def _discard_arena(self, arena):
- # Possibly delete the given (unused) arena
- length = arena.size
- # Reusing an existing arena is faster than creating a new one, so
- # we only reclaim space if it's large enough.
- if length < self._DISCARD_FREE_SPACE_LARGER_THAN:
- return
- blocks = self._allocated_blocks.pop(arena)
- assert not blocks
- del self._start_to_block[(arena, 0)]
- del self._stop_to_block[(arena, length)]
- self._arenas.remove(arena)
- seq = self._len_to_seq[length]
- seq.remove((arena, 0, length))
- if not seq:
- del self._len_to_seq[length]
- self._lengths.remove(length)
-
+ def _new_arena(self, size):
+ # Create a new arena with at least the given *size*
+ length = self._roundup(max(self._size, size), mmap.PAGESIZE)
+ # We carve larger and larger arenas, for efficiency, until we
+ # reach a large-ish size (roughly L3 cache-sized)
+ if self._size < self._DOUBLE_ARENA_SIZE_UNTIL:
+ self._size *= 2
+ util.info('allocating a new mmap of length %d', length)
+ arena = Arena(length)
+ self._arenas.append(arena)
+ return (arena, 0, length)
+
+ def _discard_arena(self, arena):
+ # Possibly delete the given (unused) arena
+ length = arena.size
+ # Reusing an existing arena is faster than creating a new one, so
+ # we only reclaim space if it's large enough.
+ if length < self._DISCARD_FREE_SPACE_LARGER_THAN:
+ return
+ blocks = self._allocated_blocks.pop(arena)
+ assert not blocks
+ del self._start_to_block[(arena, 0)]
+ del self._stop_to_block[(arena, length)]
+ self._arenas.remove(arena)
+ seq = self._len_to_seq[length]
+ seq.remove((arena, 0, length))
+ if not seq:
+ del self._len_to_seq[length]
+ self._lengths.remove(length)
+
def _malloc(self, size):
# returns a large enough block -- it might be much larger
i = bisect.bisect_left(self._lengths, size)
if i == len(self._lengths):
- return self._new_arena(size)
+ return self._new_arena(size)
else:
length = self._lengths[i]
seq = self._len_to_seq[length]
@@ -202,8 +202,8 @@ class Heap(object):
del self._stop_to_block[(arena, stop)]
return block
- def _add_free_block(self, block):
- # make block available and try to merge with its neighbours in the arena
+ def _add_free_block(self, block):
+ # make block available and try to merge with its neighbours in the arena
(arena, start, stop) = block
try:
@@ -247,14 +247,14 @@ class Heap(object):
return start, stop
- def _remove_allocated_block(self, block):
- arena, start, stop = block
- blocks = self._allocated_blocks[arena]
- blocks.remove((start, stop))
- if not blocks:
- # Arena is entirely free, discard it from this process
- self._discard_arena(arena)
-
+ def _remove_allocated_block(self, block):
+ arena, start, stop = block
+ blocks = self._allocated_blocks[arena]
+ blocks.remove((start, stop))
+ if not blocks:
+ # Arena is entirely free, discard it from this process
+ self._discard_arena(arena)
+
def _free_pending_blocks(self):
# Free all the blocks in the pending list - called with the lock held.
while True:
@@ -262,8 +262,8 @@ class Heap(object):
block = self._pending_free_blocks.pop()
except IndexError:
break
- self._add_free_block(block)
- self._remove_allocated_block(block)
+ self._add_free_block(block)
+ self._remove_allocated_block(block)
def free(self, block):
# free a block returned by malloc()
@@ -274,7 +274,7 @@ class Heap(object):
# immediately, the block is added to a list of blocks to be freed
# synchronously sometimes later from malloc() or free(), by calling
# _free_pending_blocks() (appending and retrieving from a list is not
- # strictly thread-safe but under CPython it's atomic thanks to the GIL).
+ # strictly thread-safe but under CPython it's atomic thanks to the GIL).
if os.getpid() != self._lastpid:
raise ValueError(
"My pid ({0:n}) is not last pid {1:n}".format(
@@ -286,10 +286,10 @@ class Heap(object):
else:
# we hold the lock
try:
- self._n_frees += 1
+ self._n_frees += 1
self._free_pending_blocks()
- self._add_free_block(block)
- self._remove_allocated_block(block)
+ self._add_free_block(block)
+ self._remove_allocated_block(block)
finally:
self._lock.release()
@@ -302,21 +302,21 @@ class Heap(object):
if os.getpid() != self._lastpid:
self.__init__() # reinitialize after fork
with self._lock:
- self._n_mallocs += 1
- # allow pending blocks to be marked available
+ self._n_mallocs += 1
+ # allow pending blocks to be marked available
self._free_pending_blocks()
- size = self._roundup(max(size, 1), self._alignment)
+ size = self._roundup(max(size, 1), self._alignment)
(arena, start, stop) = self._malloc(size)
- real_stop = start + size
- if real_stop < stop:
- # if the returned block is larger than necessary, mark
- # the remainder available
- self._add_free_block((arena, real_stop, stop))
- self._allocated_blocks[arena].add((start, real_stop))
- return (arena, start, real_stop)
+ real_stop = start + size
+ if real_stop < stop:
+ # if the returned block is larger than necessary, mark
+ # the remainder available
+ self._add_free_block((arena, real_stop, stop))
+ self._allocated_blocks[arena].add((start, real_stop))
+ return (arena, start, real_stop)
#
-# Class wrapping a block allocated out of a Heap -- can be inherited by child process
+# Class wrapping a block allocated out of a Heap -- can be inherited by child process
#
class BufferWrapper(object):
diff --git a/contrib/tools/python3/src/Lib/multiprocessing/managers.py b/contrib/tools/python3/src/Lib/multiprocessing/managers.py
index dfa566c6fc..047ea27672 100644
--- a/contrib/tools/python3/src/Lib/multiprocessing/managers.py
+++ b/contrib/tools/python3/src/Lib/multiprocessing/managers.py
@@ -1,5 +1,5 @@
#
-# Module providing manager classes for dealing
+# Module providing manager classes for dealing
# with shared objects
#
# multiprocessing/managers.py
@@ -8,7 +8,7 @@
# Licensed to PSF under a Contributor Agreement.
#
-__all__ = [ 'BaseManager', 'SyncManager', 'BaseProxy', 'Token' ]
+__all__ = [ 'BaseManager', 'SyncManager', 'BaseProxy', 'Token' ]
#
# Imports
@@ -16,13 +16,13 @@ __all__ = [ 'BaseManager', 'SyncManager', 'BaseProxy', 'Token' ]
import sys
import threading
-import signal
+import signal
import array
import queue
import time
-import types
-import os
-from os import getpid
+import types
+import os
+from os import getpid
from traceback import format_exc
@@ -32,13 +32,13 @@ from . import pool
from . import process
from . import util
from . import get_context
-try:
- from . import shared_memory
-except ImportError:
- HAS_SHMEM = False
-else:
- HAS_SHMEM = True
- __all__.append('SharedMemoryManager')
+try:
+ from . import shared_memory
+except ImportError:
+ HAS_SHMEM = False
+else:
+ HAS_SHMEM = True
+ __all__.append('SharedMemoryManager')
#
# Register some things for pickling
@@ -61,7 +61,7 @@ if view_types[0] is not list: # only needed in Py3.0
class Token(object):
'''
- Type to uniquely identify a shared object
+ Type to uniquely identify a shared object
'''
__slots__ = ('typeid', 'address', 'id')
@@ -250,7 +250,7 @@ class Server(object):
try:
obj, exposed, gettypeid = \
self.id_to_local_proxy_obj[ident]
- except KeyError:
+ except KeyError:
raise ke
if methodname not in exposed:
@@ -298,7 +298,7 @@ class Server(object):
try:
try:
send(msg)
- except Exception:
+ except Exception:
send(('#UNSERIALIZABLE', format_exc()))
except Exception as e:
util.info('exception in thread serving %r',
@@ -362,7 +362,7 @@ class Server(object):
finally:
self.stop_event.set()
- def create(self, c, typeid, /, *args, **kwds):
+ def create(self, c, typeid, /, *args, **kwds):
'''
Create a new shared object and return its id
'''
@@ -573,9 +573,9 @@ class BaseManager(object):
'''
Create a server, report its address and run it
'''
- # bpo-36368: protect server process from KeyboardInterrupt signals
- signal.signal(signal.SIGINT, signal.SIG_IGN)
-
+ # bpo-36368: protect server process from KeyboardInterrupt signals
+ signal.signal(signal.SIGINT, signal.SIG_IGN)
+
if initializer is not None:
initializer(*initargs)
@@ -590,7 +590,7 @@ class BaseManager(object):
util.info('manager serving at %r', server.address)
server.serve_forever()
- def _create(self, typeid, /, *args, **kwds):
+ def _create(self, typeid, /, *args, **kwds):
'''
Create a new shared object; return the token and exposed tuple
'''
@@ -710,7 +710,7 @@ class BaseManager(object):
)
if create_method:
- def temp(self, /, *args, **kwds):
+ def temp(self, /, *args, **kwds):
util.debug('requesting creation of a shared %r object', typeid)
token, exp = self._create(typeid, *args, **kwds)
proxy = proxytype(
@@ -796,7 +796,7 @@ class BaseProxy(object):
def _callmethod(self, methodname, args=(), kwds={}):
'''
- Try to call a method of the referent and return a copy of the result
+ Try to call a method of the referent and return a copy of the result
'''
try:
conn = self._tls.connection
@@ -950,7 +950,7 @@ def MakeProxyType(name, exposed, _cache={}):
dic = {}
for meth in exposed:
- exec('''def %s(self, /, *args, **kwds):
+ exec('''def %s(self, /, *args, **kwds):
return self._callmethod(%r, args, kwds)''' % (meth, meth), dic)
ProxyType = type(name, (BaseProxy,), dic)
@@ -989,7 +989,7 @@ def AutoProxy(token, serializer, manager=None, authkey=None,
#
class Namespace(object):
- def __init__(self, /, **kwds):
+ def __init__(self, /, **kwds):
self.__dict__.update(kwds)
def __repr__(self):
items = list(self.__dict__.items())
@@ -1131,9 +1131,9 @@ class ValueProxy(BaseProxy):
return self._callmethod('set', (value,))
value = property(get, set)
- __class_getitem__ = classmethod(types.GenericAlias)
-
+ __class_getitem__ = classmethod(types.GenericAlias)
+
BaseListProxy = MakeProxyType('BaseListProxy', (
'__add__', '__contains__', '__delitem__', '__getitem__', '__len__',
'__mul__', '__reversed__', '__rmul__', '__setitem__',
@@ -1216,155 +1216,155 @@ SyncManager.register('Namespace', Namespace, NamespaceProxy)
# types returned by methods of PoolProxy
SyncManager.register('Iterator', proxytype=IteratorProxy, create_method=False)
SyncManager.register('AsyncResult', create_method=False)
-
-#
-# Definition of SharedMemoryManager and SharedMemoryServer
-#
-
-if HAS_SHMEM:
- class _SharedMemoryTracker:
- "Manages one or more shared memory segments."
-
- def __init__(self, name, segment_names=[]):
- self.shared_memory_context_name = name
- self.segment_names = segment_names
-
- def register_segment(self, segment_name):
- "Adds the supplied shared memory block name to tracker."
- util.debug(f"Register segment {segment_name!r} in pid {getpid()}")
- self.segment_names.append(segment_name)
-
- def destroy_segment(self, segment_name):
- """Calls unlink() on the shared memory block with the supplied name
- and removes it from the list of blocks being tracked."""
- util.debug(f"Destroy segment {segment_name!r} in pid {getpid()}")
- self.segment_names.remove(segment_name)
- segment = shared_memory.SharedMemory(segment_name)
- segment.close()
- segment.unlink()
-
- def unlink(self):
- "Calls destroy_segment() on all tracked shared memory blocks."
- for segment_name in self.segment_names[:]:
- self.destroy_segment(segment_name)
-
- def __del__(self):
- util.debug(f"Call {self.__class__.__name__}.__del__ in {getpid()}")
- self.unlink()
-
- def __getstate__(self):
- return (self.shared_memory_context_name, self.segment_names)
-
- def __setstate__(self, state):
- self.__init__(*state)
-
-
- class SharedMemoryServer(Server):
-
- public = Server.public + \
- ['track_segment', 'release_segment', 'list_segments']
-
- def __init__(self, *args, **kwargs):
- Server.__init__(self, *args, **kwargs)
- address = self.address
- # The address of Linux abstract namespaces can be bytes
- if isinstance(address, bytes):
- address = os.fsdecode(address)
- self.shared_memory_context = \
- _SharedMemoryTracker(f"shm_{address}_{getpid()}")
- util.debug(f"SharedMemoryServer started by pid {getpid()}")
-
- def create(self, c, typeid, /, *args, **kwargs):
- """Create a new distributed-shared object (not backed by a shared
- memory block) and return its id to be used in a Proxy Object."""
- # Unless set up as a shared proxy, don't make shared_memory_context
- # a standard part of kwargs. This makes things easier for supplying
- # simple functions.
- if hasattr(self.registry[typeid][-1], "_shared_memory_proxy"):
- kwargs['shared_memory_context'] = self.shared_memory_context
- return Server.create(self, c, typeid, *args, **kwargs)
-
- def shutdown(self, c):
- "Call unlink() on all tracked shared memory, terminate the Server."
- self.shared_memory_context.unlink()
- return Server.shutdown(self, c)
-
- def track_segment(self, c, segment_name):
- "Adds the supplied shared memory block name to Server's tracker."
- self.shared_memory_context.register_segment(segment_name)
-
- def release_segment(self, c, segment_name):
- """Calls unlink() on the shared memory block with the supplied name
- and removes it from the tracker instance inside the Server."""
- self.shared_memory_context.destroy_segment(segment_name)
-
- def list_segments(self, c):
- """Returns a list of names of shared memory blocks that the Server
- is currently tracking."""
- return self.shared_memory_context.segment_names
-
-
- class SharedMemoryManager(BaseManager):
- """Like SyncManager but uses SharedMemoryServer instead of Server.
-
- It provides methods for creating and returning SharedMemory instances
- and for creating a list-like object (ShareableList) backed by shared
- memory. It also provides methods that create and return Proxy Objects
- that support synchronization across processes (i.e. multi-process-safe
- locks and semaphores).
- """
-
- _Server = SharedMemoryServer
-
- def __init__(self, *args, **kwargs):
- if os.name == "posix":
- # bpo-36867: Ensure the resource_tracker is running before
- # launching the manager process, so that concurrent
- # shared_memory manipulation both in the manager and in the
- # current process does not create two resource_tracker
- # processes.
- from . import resource_tracker
- resource_tracker.ensure_running()
- BaseManager.__init__(self, *args, **kwargs)
- util.debug(f"{self.__class__.__name__} created by pid {getpid()}")
-
- def __del__(self):
- util.debug(f"{self.__class__.__name__}.__del__ by pid {getpid()}")
- pass
-
- def get_server(self):
- 'Better than monkeypatching for now; merge into Server ultimately'
- if self._state.value != State.INITIAL:
- if self._state.value == State.STARTED:
- raise ProcessError("Already started SharedMemoryServer")
- elif self._state.value == State.SHUTDOWN:
- raise ProcessError("SharedMemoryManager has shut down")
- else:
- raise ProcessError(
- "Unknown state {!r}".format(self._state.value))
- return self._Server(self._registry, self._address,
- self._authkey, self._serializer)
-
- def SharedMemory(self, size):
- """Returns a new SharedMemory instance with the specified size in
- bytes, to be tracked by the manager."""
- with self._Client(self._address, authkey=self._authkey) as conn:
- sms = shared_memory.SharedMemory(None, create=True, size=size)
- try:
- dispatch(conn, None, 'track_segment', (sms.name,))
- except BaseException as e:
- sms.unlink()
- raise e
- return sms
-
- def ShareableList(self, sequence):
- """Returns a new ShareableList instance populated with the values
- from the input sequence, to be tracked by the manager."""
- with self._Client(self._address, authkey=self._authkey) as conn:
- sl = shared_memory.ShareableList(sequence)
- try:
- dispatch(conn, None, 'track_segment', (sl.shm.name,))
- except BaseException as e:
- sl.shm.unlink()
- raise e
- return sl
+
+#
+# Definition of SharedMemoryManager and SharedMemoryServer
+#
+
+if HAS_SHMEM:
+ class _SharedMemoryTracker:
+ "Manages one or more shared memory segments."
+
+ def __init__(self, name, segment_names=[]):
+ self.shared_memory_context_name = name
+ self.segment_names = segment_names
+
+ def register_segment(self, segment_name):
+ "Adds the supplied shared memory block name to tracker."
+ util.debug(f"Register segment {segment_name!r} in pid {getpid()}")
+ self.segment_names.append(segment_name)
+
+ def destroy_segment(self, segment_name):
+ """Calls unlink() on the shared memory block with the supplied name
+ and removes it from the list of blocks being tracked."""
+ util.debug(f"Destroy segment {segment_name!r} in pid {getpid()}")
+ self.segment_names.remove(segment_name)
+ segment = shared_memory.SharedMemory(segment_name)
+ segment.close()
+ segment.unlink()
+
+ def unlink(self):
+ "Calls destroy_segment() on all tracked shared memory blocks."
+ for segment_name in self.segment_names[:]:
+ self.destroy_segment(segment_name)
+
+ def __del__(self):
+ util.debug(f"Call {self.__class__.__name__}.__del__ in {getpid()}")
+ self.unlink()
+
+ def __getstate__(self):
+ return (self.shared_memory_context_name, self.segment_names)
+
+ def __setstate__(self, state):
+ self.__init__(*state)
+
+
+ class SharedMemoryServer(Server):
+
+ public = Server.public + \
+ ['track_segment', 'release_segment', 'list_segments']
+
+ def __init__(self, *args, **kwargs):
+ Server.__init__(self, *args, **kwargs)
+ address = self.address
+ # The address of Linux abstract namespaces can be bytes
+ if isinstance(address, bytes):
+ address = os.fsdecode(address)
+ self.shared_memory_context = \
+ _SharedMemoryTracker(f"shm_{address}_{getpid()}")
+ util.debug(f"SharedMemoryServer started by pid {getpid()}")
+
+ def create(self, c, typeid, /, *args, **kwargs):
+ """Create a new distributed-shared object (not backed by a shared
+ memory block) and return its id to be used in a Proxy Object."""
+ # Unless set up as a shared proxy, don't make shared_memory_context
+ # a standard part of kwargs. This makes things easier for supplying
+ # simple functions.
+ if hasattr(self.registry[typeid][-1], "_shared_memory_proxy"):
+ kwargs['shared_memory_context'] = self.shared_memory_context
+ return Server.create(self, c, typeid, *args, **kwargs)
+
+ def shutdown(self, c):
+ "Call unlink() on all tracked shared memory, terminate the Server."
+ self.shared_memory_context.unlink()
+ return Server.shutdown(self, c)
+
+ def track_segment(self, c, segment_name):
+ "Adds the supplied shared memory block name to Server's tracker."
+ self.shared_memory_context.register_segment(segment_name)
+
+ def release_segment(self, c, segment_name):
+ """Calls unlink() on the shared memory block with the supplied name
+ and removes it from the tracker instance inside the Server."""
+ self.shared_memory_context.destroy_segment(segment_name)
+
+ def list_segments(self, c):
+ """Returns a list of names of shared memory blocks that the Server
+ is currently tracking."""
+ return self.shared_memory_context.segment_names
+
+
+ class SharedMemoryManager(BaseManager):
+ """Like SyncManager but uses SharedMemoryServer instead of Server.
+
+ It provides methods for creating and returning SharedMemory instances
+ and for creating a list-like object (ShareableList) backed by shared
+ memory. It also provides methods that create and return Proxy Objects
+ that support synchronization across processes (i.e. multi-process-safe
+ locks and semaphores).
+ """
+
+ _Server = SharedMemoryServer
+
+ def __init__(self, *args, **kwargs):
+ if os.name == "posix":
+ # bpo-36867: Ensure the resource_tracker is running before
+ # launching the manager process, so that concurrent
+ # shared_memory manipulation both in the manager and in the
+ # current process does not create two resource_tracker
+ # processes.
+ from . import resource_tracker
+ resource_tracker.ensure_running()
+ BaseManager.__init__(self, *args, **kwargs)
+ util.debug(f"{self.__class__.__name__} created by pid {getpid()}")
+
+ def __del__(self):
+ util.debug(f"{self.__class__.__name__}.__del__ by pid {getpid()}")
+ pass
+
+ def get_server(self):
+ 'Better than monkeypatching for now; merge into Server ultimately'
+ if self._state.value != State.INITIAL:
+ if self._state.value == State.STARTED:
+ raise ProcessError("Already started SharedMemoryServer")
+ elif self._state.value == State.SHUTDOWN:
+ raise ProcessError("SharedMemoryManager has shut down")
+ else:
+ raise ProcessError(
+ "Unknown state {!r}".format(self._state.value))
+ return self._Server(self._registry, self._address,
+ self._authkey, self._serializer)
+
+ def SharedMemory(self, size):
+ """Returns a new SharedMemory instance with the specified size in
+ bytes, to be tracked by the manager."""
+ with self._Client(self._address, authkey=self._authkey) as conn:
+ sms = shared_memory.SharedMemory(None, create=True, size=size)
+ try:
+ dispatch(conn, None, 'track_segment', (sms.name,))
+ except BaseException as e:
+ sms.unlink()
+ raise e
+ return sms
+
+ def ShareableList(self, sequence):
+ """Returns a new ShareableList instance populated with the values
+ from the input sequence, to be tracked by the manager."""
+ with self._Client(self._address, authkey=self._authkey) as conn:
+ sl = shared_memory.ShareableList(sequence)
+ try:
+ dispatch(conn, None, 'track_segment', (sl.shm.name,))
+ except BaseException as e:
+ sl.shm.unlink()
+ raise e
+ return sl
diff --git a/contrib/tools/python3/src/Lib/multiprocessing/pool.py b/contrib/tools/python3/src/Lib/multiprocessing/pool.py
index bbe05a550c..5ae69156f7 100644
--- a/contrib/tools/python3/src/Lib/multiprocessing/pool.py
+++ b/contrib/tools/python3/src/Lib/multiprocessing/pool.py
@@ -13,30 +13,30 @@ __all__ = ['Pool', 'ThreadPool']
# Imports
#
-import collections
+import collections
import itertools
import os
-import queue
-import threading
+import queue
+import threading
import time
import traceback
-import types
-import warnings
+import types
+import warnings
# If threading is available then ThreadPool should be provided. Therefore
# we avoid top-level imports which are liable to fail on some systems.
from . import util
from . import get_context, TimeoutError
-from .connection import wait
+from .connection import wait
#
# Constants representing the state of a pool
#
-INIT = "INIT"
-RUN = "RUN"
-CLOSE = "CLOSE"
-TERMINATE = "TERMINATE"
+INIT = "INIT"
+RUN = "RUN"
+CLOSE = "CLOSE"
+TERMINATE = "TERMINATE"
#
# Miscellaneous
@@ -147,54 +147,54 @@ def _helper_reraises_exception(ex):
# Class representing a process pool
#
-class _PoolCache(dict):
- """
- Class that implements a cache for the Pool class that will notify
- the pool management threads every time the cache is emptied. The
- notification is done by the use of a queue that is provided when
- instantiating the cache.
- """
- def __init__(self, /, *args, notifier=None, **kwds):
- self.notifier = notifier
- super().__init__(*args, **kwds)
-
- def __delitem__(self, item):
- super().__delitem__(item)
-
- # Notify that the cache is empty. This is important because the
- # pool keeps maintaining workers until the cache gets drained. This
- # eliminates a race condition in which a task is finished after the
- # the pool's _handle_workers method has enter another iteration of the
- # loop. In this situation, the only event that can wake up the pool
- # is the cache to be emptied (no more tasks available).
- if not self:
- self.notifier.put(None)
-
+class _PoolCache(dict):
+ """
+ Class that implements a cache for the Pool class that will notify
+ the pool management threads every time the cache is emptied. The
+ notification is done by the use of a queue that is provided when
+ instantiating the cache.
+ """
+ def __init__(self, /, *args, notifier=None, **kwds):
+ self.notifier = notifier
+ super().__init__(*args, **kwds)
+
+ def __delitem__(self, item):
+ super().__delitem__(item)
+
+ # Notify that the cache is empty. This is important because the
+ # pool keeps maintaining workers until the cache gets drained. This
+ # eliminates a race condition in which a task is finished after the
+ # the pool's _handle_workers method has enter another iteration of the
+ # loop. In this situation, the only event that can wake up the pool
+ # is the cache to be emptied (no more tasks available).
+ if not self:
+ self.notifier.put(None)
+
class Pool(object):
'''
Class which supports an async version of applying functions to arguments.
'''
_wrap_exception = True
- @staticmethod
- def Process(ctx, *args, **kwds):
- return ctx.Process(*args, **kwds)
+ @staticmethod
+ def Process(ctx, *args, **kwds):
+ return ctx.Process(*args, **kwds)
def __init__(self, processes=None, initializer=None, initargs=(),
maxtasksperchild=None, context=None):
- # Attributes initialized early to make sure that they exist in
- # __del__() if __init__() raises an exception
- self._pool = []
- self._state = INIT
-
+ # Attributes initialized early to make sure that they exist in
+ # __del__() if __init__() raises an exception
+ self._pool = []
+ self._state = INIT
+
self._ctx = context or get_context()
self._setup_queues()
self._taskqueue = queue.SimpleQueue()
- # The _change_notifier queue exist to wake up self._handle_workers()
- # when the cache (self._cache) is empty or when there is a change in
- # the _state variable of the thread that runs _handle_workers.
- self._change_notifier = self._ctx.SimpleQueue()
- self._cache = _PoolCache(notifier=self._change_notifier)
+ # The _change_notifier queue exist to wake up self._handle_workers()
+ # when the cache (self._cache) is empty or when there is a change in
+ # the _state variable of the thread that runs _handle_workers.
+ self._change_notifier = self._ctx.SimpleQueue()
+ self._cache = _PoolCache(notifier=self._change_notifier)
self._maxtasksperchild = maxtasksperchild
self._initializer = initializer
self._initargs = initargs
@@ -208,24 +208,24 @@ class Pool(object):
raise TypeError('initializer must be a callable')
self._processes = processes
- try:
- self._repopulate_pool()
- except Exception:
- for p in self._pool:
- if p.exitcode is None:
- p.terminate()
- for p in self._pool:
- p.join()
- raise
-
- sentinels = self._get_sentinels()
-
+ try:
+ self._repopulate_pool()
+ except Exception:
+ for p in self._pool:
+ if p.exitcode is None:
+ p.terminate()
+ for p in self._pool:
+ p.join()
+ raise
+
+ sentinels = self._get_sentinels()
+
self._worker_handler = threading.Thread(
target=Pool._handle_workers,
- args=(self._cache, self._taskqueue, self._ctx, self.Process,
- self._processes, self._pool, self._inqueue, self._outqueue,
- self._initializer, self._initargs, self._maxtasksperchild,
- self._wrap_exception, sentinels, self._change_notifier)
+ args=(self._cache, self._taskqueue, self._ctx, self.Process,
+ self._processes, self._pool, self._inqueue, self._outqueue,
+ self._initializer, self._initargs, self._maxtasksperchild,
+ self._wrap_exception, sentinels, self._change_notifier)
)
self._worker_handler.daemon = True
self._worker_handler._state = RUN
@@ -252,92 +252,92 @@ class Pool(object):
self._terminate = util.Finalize(
self, self._terminate_pool,
args=(self._taskqueue, self._inqueue, self._outqueue, self._pool,
- self._change_notifier, self._worker_handler, self._task_handler,
+ self._change_notifier, self._worker_handler, self._task_handler,
self._result_handler, self._cache),
exitpriority=15
)
- self._state = RUN
-
- # Copy globals as function locals to make sure that they are available
- # during Python shutdown when the Pool is destroyed.
- def __del__(self, _warn=warnings.warn, RUN=RUN):
- if self._state == RUN:
- _warn(f"unclosed running multiprocessing pool {self!r}",
- ResourceWarning, source=self)
- if getattr(self, '_change_notifier', None) is not None:
- self._change_notifier.put(None)
-
- def __repr__(self):
- cls = self.__class__
- return (f'<{cls.__module__}.{cls.__qualname__} '
- f'state={self._state} '
- f'pool_size={len(self._pool)}>')
-
- def _get_sentinels(self):
- task_queue_sentinels = [self._outqueue._reader]
- self_notifier_sentinels = [self._change_notifier._reader]
- return [*task_queue_sentinels, *self_notifier_sentinels]
-
- @staticmethod
- def _get_worker_sentinels(workers):
- return [worker.sentinel for worker in
- workers if hasattr(worker, "sentinel")]
-
- @staticmethod
- def _join_exited_workers(pool):
+ self._state = RUN
+
+ # Copy globals as function locals to make sure that they are available
+ # during Python shutdown when the Pool is destroyed.
+ def __del__(self, _warn=warnings.warn, RUN=RUN):
+ if self._state == RUN:
+ _warn(f"unclosed running multiprocessing pool {self!r}",
+ ResourceWarning, source=self)
+ if getattr(self, '_change_notifier', None) is not None:
+ self._change_notifier.put(None)
+
+ def __repr__(self):
+ cls = self.__class__
+ return (f'<{cls.__module__}.{cls.__qualname__} '
+ f'state={self._state} '
+ f'pool_size={len(self._pool)}>')
+
+ def _get_sentinels(self):
+ task_queue_sentinels = [self._outqueue._reader]
+ self_notifier_sentinels = [self._change_notifier._reader]
+ return [*task_queue_sentinels, *self_notifier_sentinels]
+
+ @staticmethod
+ def _get_worker_sentinels(workers):
+ return [worker.sentinel for worker in
+ workers if hasattr(worker, "sentinel")]
+
+ @staticmethod
+ def _join_exited_workers(pool):
"""Cleanup after any worker processes which have exited due to reaching
their specified lifetime. Returns True if any workers were cleaned up.
"""
cleaned = False
- for i in reversed(range(len(pool))):
- worker = pool[i]
+ for i in reversed(range(len(pool))):
+ worker = pool[i]
if worker.exitcode is not None:
# worker exited
util.debug('cleaning up worker %d' % i)
worker.join()
cleaned = True
- del pool[i]
+ del pool[i]
return cleaned
def _repopulate_pool(self):
- return self._repopulate_pool_static(self._ctx, self.Process,
- self._processes,
- self._pool, self._inqueue,
- self._outqueue, self._initializer,
- self._initargs,
- self._maxtasksperchild,
- self._wrap_exception)
-
- @staticmethod
- def _repopulate_pool_static(ctx, Process, processes, pool, inqueue,
- outqueue, initializer, initargs,
- maxtasksperchild, wrap_exception):
+ return self._repopulate_pool_static(self._ctx, self.Process,
+ self._processes,
+ self._pool, self._inqueue,
+ self._outqueue, self._initializer,
+ self._initargs,
+ self._maxtasksperchild,
+ self._wrap_exception)
+
+ @staticmethod
+ def _repopulate_pool_static(ctx, Process, processes, pool, inqueue,
+ outqueue, initializer, initargs,
+ maxtasksperchild, wrap_exception):
"""Bring the number of pool processes up to the specified number,
for use after reaping workers which have exited.
"""
- for i in range(processes - len(pool)):
- w = Process(ctx, target=worker,
- args=(inqueue, outqueue,
- initializer,
- initargs, maxtasksperchild,
- wrap_exception))
+ for i in range(processes - len(pool)):
+ w = Process(ctx, target=worker,
+ args=(inqueue, outqueue,
+ initializer,
+ initargs, maxtasksperchild,
+ wrap_exception))
w.name = w.name.replace('Process', 'PoolWorker')
w.daemon = True
w.start()
- pool.append(w)
+ pool.append(w)
util.debug('added worker')
- @staticmethod
- def _maintain_pool(ctx, Process, processes, pool, inqueue, outqueue,
- initializer, initargs, maxtasksperchild,
- wrap_exception):
+ @staticmethod
+ def _maintain_pool(ctx, Process, processes, pool, inqueue, outqueue,
+ initializer, initargs, maxtasksperchild,
+ wrap_exception):
"""Clean up any exited workers and start replacements for them.
"""
- if Pool._join_exited_workers(pool):
- Pool._repopulate_pool_static(ctx, Process, processes, pool,
- inqueue, outqueue, initializer,
- initargs, maxtasksperchild,
- wrap_exception)
+ if Pool._join_exited_workers(pool):
+ Pool._repopulate_pool_static(ctx, Process, processes, pool,
+ inqueue, outqueue, initializer,
+ initargs, maxtasksperchild,
+ wrap_exception)
def _setup_queues(self):
self._inqueue = self._ctx.SimpleQueue()
@@ -345,10 +345,10 @@ class Pool(object):
self._quick_put = self._inqueue._writer.send
self._quick_get = self._outqueue._reader.recv
- def _check_running(self):
- if self._state != RUN:
- raise ValueError("Pool not running")
-
+ def _check_running(self):
+ if self._state != RUN:
+ raise ValueError("Pool not running")
+
def apply(self, func, args=(), kwds={}):
'''
Equivalent of `func(*args, **kwds)`.
@@ -394,9 +394,9 @@ class Pool(object):
'''
Equivalent of `map()` -- can be MUCH slower than `Pool.map()`.
'''
- self._check_running()
+ self._check_running()
if chunksize == 1:
- result = IMapIterator(self)
+ result = IMapIterator(self)
self._taskqueue.put(
(
self._guarded_task_generation(result._job, func, iterable),
@@ -409,7 +409,7 @@ class Pool(object):
"Chunksize must be 1+, not {0:n}".format(
chunksize))
task_batches = Pool._get_tasks(func, iterable, chunksize)
- result = IMapIterator(self)
+ result = IMapIterator(self)
self._taskqueue.put(
(
self._guarded_task_generation(result._job,
@@ -423,9 +423,9 @@ class Pool(object):
'''
Like `imap()` method but ordering of results is arbitrary.
'''
- self._check_running()
+ self._check_running()
if chunksize == 1:
- result = IMapUnorderedIterator(self)
+ result = IMapUnorderedIterator(self)
self._taskqueue.put(
(
self._guarded_task_generation(result._job, func, iterable),
@@ -437,7 +437,7 @@ class Pool(object):
raise ValueError(
"Chunksize must be 1+, not {0!r}".format(chunksize))
task_batches = Pool._get_tasks(func, iterable, chunksize)
- result = IMapUnorderedIterator(self)
+ result = IMapUnorderedIterator(self)
self._taskqueue.put(
(
self._guarded_task_generation(result._job,
@@ -452,8 +452,8 @@ class Pool(object):
'''
Asynchronous version of `apply()` method.
'''
- self._check_running()
- result = ApplyResult(self, callback, error_callback)
+ self._check_running()
+ result = ApplyResult(self, callback, error_callback)
self._taskqueue.put(([(result._job, 0, func, args, kwds)], None))
return result
@@ -470,7 +470,7 @@ class Pool(object):
'''
Helper function to implement map, starmap and their async counterparts.
'''
- self._check_running()
+ self._check_running()
if not hasattr(iterable, '__len__'):
iterable = list(iterable)
@@ -482,7 +482,7 @@ class Pool(object):
chunksize = 0
task_batches = Pool._get_tasks(func, iterable, chunksize)
- result = MapResult(self, chunksize, len(iterable), callback,
+ result = MapResult(self, chunksize, len(iterable), callback,
error_callback=error_callback)
self._taskqueue.put(
(
@@ -495,30 +495,30 @@ class Pool(object):
return result
@staticmethod
- def _wait_for_updates(sentinels, change_notifier, timeout=None):
- wait(sentinels, timeout=timeout)
- while not change_notifier.empty():
- change_notifier.get()
-
- @classmethod
- def _handle_workers(cls, cache, taskqueue, ctx, Process, processes,
- pool, inqueue, outqueue, initializer, initargs,
- maxtasksperchild, wrap_exception, sentinels,
- change_notifier):
+ def _wait_for_updates(sentinels, change_notifier, timeout=None):
+ wait(sentinels, timeout=timeout)
+ while not change_notifier.empty():
+ change_notifier.get()
+
+ @classmethod
+ def _handle_workers(cls, cache, taskqueue, ctx, Process, processes,
+ pool, inqueue, outqueue, initializer, initargs,
+ maxtasksperchild, wrap_exception, sentinels,
+ change_notifier):
thread = threading.current_thread()
# Keep maintaining workers until the cache gets drained, unless the pool
# is terminated.
- while thread._state == RUN or (cache and thread._state != TERMINATE):
- cls._maintain_pool(ctx, Process, processes, pool, inqueue,
- outqueue, initializer, initargs,
- maxtasksperchild, wrap_exception)
-
- current_sentinels = [*cls._get_worker_sentinels(pool), *sentinels]
-
- cls._wait_for_updates(current_sentinels, change_notifier)
+ while thread._state == RUN or (cache and thread._state != TERMINATE):
+ cls._maintain_pool(ctx, Process, processes, pool, inqueue,
+ outqueue, initializer, initargs,
+ maxtasksperchild, wrap_exception)
+
+ current_sentinels = [*cls._get_worker_sentinels(pool), *sentinels]
+
+ cls._wait_for_updates(current_sentinels, change_notifier)
# send sentinel to stop workers
- taskqueue.put(None)
+ taskqueue.put(None)
util.debug('worker handler exiting')
@staticmethod
@@ -530,7 +530,7 @@ class Pool(object):
try:
# iterating taskseq cannot fail
for task in taskseq:
- if thread._state != RUN:
+ if thread._state != RUN:
util.debug('task handler found thread._state != RUN')
break
try:
@@ -578,7 +578,7 @@ class Pool(object):
util.debug('result handler got EOFError/OSError -- exiting')
return
- if thread._state != RUN:
+ if thread._state != RUN:
assert thread._state == TERMINATE, "Thread not in TERMINATE"
util.debug('result handler found thread._state=TERMINATE')
break
@@ -646,7 +646,7 @@ class Pool(object):
if self._state == RUN:
self._state = CLOSE
self._worker_handler._state = CLOSE
- self._change_notifier.put(None)
+ self._change_notifier.put(None)
def terminate(self):
util.debug('terminating pool')
@@ -675,17 +675,17 @@ class Pool(object):
time.sleep(0)
@classmethod
- def _terminate_pool(cls, taskqueue, inqueue, outqueue, pool, change_notifier,
+ def _terminate_pool(cls, taskqueue, inqueue, outqueue, pool, change_notifier,
worker_handler, task_handler, result_handler, cache):
# this is guaranteed to only be called once
util.debug('finalizing pool')
- # Notify that the worker_handler state has been changed so the
- # _handle_workers loop can be unblocked (and exited) in order to
- # send the finalization sentinel all the workers.
+ # Notify that the worker_handler state has been changed so the
+ # _handle_workers loop can be unblocked (and exited) in order to
+ # send the finalization sentinel all the workers.
worker_handler._state = TERMINATE
- change_notifier.put(None)
-
+ change_notifier.put(None)
+
task_handler._state = TERMINATE
util.debug('helping task handler/workers to finish')
@@ -696,7 +696,7 @@ class Pool(object):
"Cannot have cache with result_hander not alive")
result_handler._state = TERMINATE
- change_notifier.put(None)
+ change_notifier.put(None)
outqueue.put(None) # sentinel
# We must wait for the worker handler to exit before terminating
@@ -729,7 +729,7 @@ class Pool(object):
p.join()
def __enter__(self):
- self._check_running()
+ self._check_running()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
@@ -741,14 +741,14 @@ class Pool(object):
class ApplyResult(object):
- def __init__(self, pool, callback, error_callback):
- self._pool = pool
+ def __init__(self, pool, callback, error_callback):
+ self._pool = pool
self._event = threading.Event()
self._job = next(job_counter)
- self._cache = pool._cache
+ self._cache = pool._cache
self._callback = callback
self._error_callback = error_callback
- self._cache[self._job] = self
+ self._cache[self._job] = self
def ready(self):
return self._event.is_set()
@@ -778,10 +778,10 @@ class ApplyResult(object):
self._error_callback(self._value)
self._event.set()
del self._cache[self._job]
- self._pool = None
-
- __class_getitem__ = classmethod(types.GenericAlias)
+ self._pool = None
+ __class_getitem__ = classmethod(types.GenericAlias)
+
AsyncResult = ApplyResult # create alias -- see #17805
#
@@ -790,8 +790,8 @@ AsyncResult = ApplyResult # create alias -- see #17805
class MapResult(ApplyResult):
- def __init__(self, pool, chunksize, length, callback, error_callback):
- ApplyResult.__init__(self, pool, callback,
+ def __init__(self, pool, chunksize, length, callback, error_callback):
+ ApplyResult.__init__(self, pool, callback,
error_callback=error_callback)
self._success = True
self._value = [None] * length
@@ -799,7 +799,7 @@ class MapResult(ApplyResult):
if chunksize <= 0:
self._number_left = 0
self._event.set()
- del self._cache[self._job]
+ del self._cache[self._job]
else:
self._number_left = length//chunksize + bool(length % chunksize)
@@ -813,7 +813,7 @@ class MapResult(ApplyResult):
self._callback(self._value)
del self._cache[self._job]
self._event.set()
- self._pool = None
+ self._pool = None
else:
if not success and self._success:
# only store first exception
@@ -825,7 +825,7 @@ class MapResult(ApplyResult):
self._error_callback(self._value)
del self._cache[self._job]
self._event.set()
- self._pool = None
+ self._pool = None
#
# Class whose instances are returned by `Pool.imap()`
@@ -833,16 +833,16 @@ class MapResult(ApplyResult):
class IMapIterator(object):
- def __init__(self, pool):
- self._pool = pool
+ def __init__(self, pool):
+ self._pool = pool
self._cond = threading.Condition(threading.Lock())
self._job = next(job_counter)
- self._cache = pool._cache
+ self._cache = pool._cache
self._items = collections.deque()
self._index = 0
self._length = None
self._unsorted = {}
- self._cache[self._job] = self
+ self._cache[self._job] = self
def __iter__(self):
return self
@@ -853,14 +853,14 @@ class IMapIterator(object):
item = self._items.popleft()
except IndexError:
if self._index == self._length:
- self._pool = None
+ self._pool = None
raise StopIteration from None
self._cond.wait(timeout)
try:
item = self._items.popleft()
except IndexError:
if self._index == self._length:
- self._pool = None
+ self._pool = None
raise StopIteration from None
raise TimeoutError from None
@@ -886,7 +886,7 @@ class IMapIterator(object):
if self._index == self._length:
del self._cache[self._job]
- self._pool = None
+ self._pool = None
def _set_length(self, length):
with self._cond:
@@ -894,7 +894,7 @@ class IMapIterator(object):
if self._index == self._length:
self._cond.notify()
del self._cache[self._job]
- self._pool = None
+ self._pool = None
#
# Class whose instances are returned by `Pool.imap_unordered()`
@@ -909,7 +909,7 @@ class IMapUnorderedIterator(IMapIterator):
self._cond.notify()
if self._index == self._length:
del self._cache[self._job]
- self._pool = None
+ self._pool = None
#
#
@@ -919,7 +919,7 @@ class ThreadPool(Pool):
_wrap_exception = False
@staticmethod
- def Process(ctx, *args, **kwds):
+ def Process(ctx, *args, **kwds):
from .dummy import Process
return Process(*args, **kwds)
@@ -932,14 +932,14 @@ class ThreadPool(Pool):
self._quick_put = self._inqueue.put
self._quick_get = self._outqueue.get
- def _get_sentinels(self):
- return [self._change_notifier._reader]
-
- @staticmethod
- def _get_worker_sentinels(workers):
- return []
-
+ def _get_sentinels(self):
+ return [self._change_notifier._reader]
+
@staticmethod
+ def _get_worker_sentinels(workers):
+ return []
+
+ @staticmethod
def _help_stuff_finish(inqueue, task_handler, size):
# drain inqueue, and put sentinels at its head to make workers finish
try:
@@ -949,6 +949,6 @@ class ThreadPool(Pool):
pass
for i in range(size):
inqueue.put(None)
-
- def _wait_for_updates(self, sentinels, change_notifier, timeout):
- time.sleep(timeout)
+
+ def _wait_for_updates(self, sentinels, change_notifier, timeout):
+ time.sleep(timeout)
diff --git a/contrib/tools/python3/src/Lib/multiprocessing/popen_fork.py b/contrib/tools/python3/src/Lib/multiprocessing/popen_fork.py
index 625981cf47..a3da92ef0a 100644
--- a/contrib/tools/python3/src/Lib/multiprocessing/popen_fork.py
+++ b/contrib/tools/python3/src/Lib/multiprocessing/popen_fork.py
@@ -25,12 +25,12 @@ class Popen(object):
if self.returncode is None:
try:
pid, sts = os.waitpid(self.pid, flag)
- except OSError:
+ except OSError:
# Child process not yet created. See #1731717
# e.errno == errno.ECHILD == 10
return None
if pid == self.pid:
- self.returncode = os.waitstatus_to_exitcode(sts)
+ self.returncode = os.waitstatus_to_exitcode(sts)
return self.returncode
def wait(self, timeout=None):
@@ -62,20 +62,20 @@ class Popen(object):
def _launch(self, process_obj):
code = 1
parent_r, child_w = os.pipe()
- child_r, parent_w = os.pipe()
+ child_r, parent_w = os.pipe()
self.pid = os.fork()
if self.pid == 0:
try:
os.close(parent_r)
- os.close(parent_w)
- code = process_obj._bootstrap(parent_sentinel=child_r)
+ os.close(parent_w)
+ code = process_obj._bootstrap(parent_sentinel=child_r)
finally:
os._exit(code)
else:
os.close(child_w)
- os.close(child_r)
- self.finalizer = util.Finalize(self, util.close_fds,
- (parent_r, parent_w,))
+ os.close(child_r)
+ self.finalizer = util.Finalize(self, util.close_fds,
+ (parent_r, parent_w,))
self.sentinel = parent_r
def close(self):
diff --git a/contrib/tools/python3/src/Lib/multiprocessing/popen_forkserver.py b/contrib/tools/python3/src/Lib/multiprocessing/popen_forkserver.py
index a56eb9bf11..4cd65bad76 100644
--- a/contrib/tools/python3/src/Lib/multiprocessing/popen_forkserver.py
+++ b/contrib/tools/python3/src/Lib/multiprocessing/popen_forkserver.py
@@ -49,11 +49,11 @@ class Popen(popen_fork.Popen):
set_spawning_popen(None)
self.sentinel, w = forkserver.connect_to_new_process(self._fds)
- # Keep a duplicate of the data pipe's write end as a sentinel of the
- # parent process used by the child process.
- _parent_w = os.dup(w)
- self.finalizer = util.Finalize(self, util.close_fds,
- (_parent_w, self.sentinel))
+ # Keep a duplicate of the data pipe's write end as a sentinel of the
+ # parent process used by the child process.
+ _parent_w = os.dup(w)
+ self.finalizer = util.Finalize(self, util.close_fds,
+ (_parent_w, self.sentinel))
with open(w, 'wb', closefd=True) as f:
f.write(buf.getbuffer())
self.pid = forkserver.read_signed(self.sentinel)
diff --git a/contrib/tools/python3/src/Lib/multiprocessing/popen_spawn_posix.py b/contrib/tools/python3/src/Lib/multiprocessing/popen_spawn_posix.py
index 24b8634523..898b689c38 100644
--- a/contrib/tools/python3/src/Lib/multiprocessing/popen_spawn_posix.py
+++ b/contrib/tools/python3/src/Lib/multiprocessing/popen_spawn_posix.py
@@ -36,8 +36,8 @@ class Popen(popen_fork.Popen):
return fd
def _launch(self, process_obj):
- from . import resource_tracker
- tracker_fd = resource_tracker.getfd()
+ from . import resource_tracker
+ tracker_fd = resource_tracker.getfd()
self._fds.append(tracker_fd)
prep_data = spawn.get_preparation_data(process_obj._name)
fp = io.BytesIO()
@@ -61,12 +61,12 @@ class Popen(popen_fork.Popen):
with open(parent_w, 'wb', closefd=False) as f:
f.write(fp.getbuffer())
finally:
- fds_to_close = []
- for fd in (parent_r, parent_w):
- if fd is not None:
- fds_to_close.append(fd)
- self.finalizer = util.Finalize(self, util.close_fds, fds_to_close)
-
- for fd in (child_r, child_w):
+ fds_to_close = []
+ for fd in (parent_r, parent_w):
if fd is not None:
+ fds_to_close.append(fd)
+ self.finalizer = util.Finalize(self, util.close_fds, fds_to_close)
+
+ for fd in (child_r, child_w):
+ if fd is not None:
os.close(fd)
diff --git a/contrib/tools/python3/src/Lib/multiprocessing/popen_spawn_win32.py b/contrib/tools/python3/src/Lib/multiprocessing/popen_spawn_win32.py
index 27fe064290..f866c59743 100644
--- a/contrib/tools/python3/src/Lib/multiprocessing/popen_spawn_win32.py
+++ b/contrib/tools/python3/src/Lib/multiprocessing/popen_spawn_win32.py
@@ -22,7 +22,7 @@ WINSERVICE = sys.executable.lower().endswith("pythonservice.exe")
def _path_eq(p1, p2):
return p1 == p2 or os.path.normcase(p1) == os.path.normcase(p2)
-WINENV = not _path_eq(sys.executable, sys._base_executable)
+WINENV = not _path_eq(sys.executable, sys._base_executable)
def _close_handles(*handles):
@@ -44,12 +44,12 @@ class Popen(object):
def __init__(self, process_obj):
prep_data = spawn.get_preparation_data(process_obj._name)
- # read end of pipe will be duplicated by the child process
+ # read end of pipe will be duplicated by the child process
# -- see spawn_main() in spawn.py.
- #
- # bpo-33929: Previously, the read end of pipe was "stolen" by the child
- # process, but it leaked a handle if the child process had been
- # terminated before it could steal the handle from the parent process.
+ #
+ # bpo-33929: Previously, the read end of pipe was "stolen" by the child
+ # process, but it leaked a handle if the child process had been
+ # terminated before it could steal the handle from the parent process.
rhandle, whandle = _winapi.CreatePipe(None, 0)
wfd = msvcrt.open_osfhandle(whandle, 0)
cmd = spawn.get_command_line(parent_pid=os.getpid(),
@@ -73,7 +73,7 @@ class Popen(object):
try:
hp, ht, pid, tid = _winapi.CreateProcess(
python_exe, cmd,
- None, None, False, 0, env, None, None)
+ None, None, False, 0, env, None, None)
_winapi.CloseHandle(ht)
except:
_winapi.CloseHandle(rhandle)
@@ -84,8 +84,8 @@ class Popen(object):
self.returncode = None
self._handle = hp
self.sentinel = int(hp)
- self.finalizer = util.Finalize(self, _close_handles,
- (self.sentinel, int(rhandle)))
+ self.finalizer = util.Finalize(self, _close_handles,
+ (self.sentinel, int(rhandle)))
# send information to child
set_spawning_popen(self)
diff --git a/contrib/tools/python3/src/Lib/multiprocessing/process.py b/contrib/tools/python3/src/Lib/multiprocessing/process.py
index 0b2e0b45b2..8177dcd7be 100644
--- a/contrib/tools/python3/src/Lib/multiprocessing/process.py
+++ b/contrib/tools/python3/src/Lib/multiprocessing/process.py
@@ -7,8 +7,8 @@
# Licensed to PSF under a Contributor Agreement.
#
-__all__ = ['BaseProcess', 'current_process', 'active_children',
- 'parent_process']
+__all__ = ['BaseProcess', 'current_process', 'active_children',
+ 'parent_process']
#
# Imports
@@ -47,13 +47,13 @@ def active_children():
_cleanup()
return list(_children)
-
-def parent_process():
- '''
- Return process object representing the parent process
- '''
- return _parent_process
-
+
+def parent_process():
+ '''
+ Return process object representing the parent process
+ '''
+ return _parent_process
+
#
#
#
@@ -84,7 +84,7 @@ class BaseProcess(object):
self._identity = _current_process._identity + (count,)
self._config = _current_process._config.copy()
self._parent_pid = os.getpid()
- self._parent_name = _current_process.name
+ self._parent_name = _current_process.name
self._popen = None
self._closed = False
self._target = target
@@ -257,7 +257,7 @@ class BaseProcess(object):
raise ValueError("process not started") from None
def __repr__(self):
- exitcode = None
+ exitcode = None
if self is _current_process:
status = 'started'
elif self._closed:
@@ -267,29 +267,29 @@ class BaseProcess(object):
elif self._popen is None:
status = 'initial'
else:
- exitcode = self._popen.poll()
- if exitcode is not None:
- status = 'stopped'
+ exitcode = self._popen.poll()
+ if exitcode is not None:
+ status = 'stopped'
else:
status = 'started'
- info = [type(self).__name__, 'name=%r' % self._name]
- if self._popen is not None:
- info.append('pid=%s' % self._popen.pid)
- info.append('parent=%s' % self._parent_pid)
- info.append(status)
- if exitcode is not None:
- exitcode = _exitcode_to_name.get(exitcode, exitcode)
- info.append('exitcode=%s' % exitcode)
- if self.daemon:
- info.append('daemon')
- return '<%s>' % ' '.join(info)
+ info = [type(self).__name__, 'name=%r' % self._name]
+ if self._popen is not None:
+ info.append('pid=%s' % self._popen.pid)
+ info.append('parent=%s' % self._parent_pid)
+ info.append(status)
+ if exitcode is not None:
+ exitcode = _exitcode_to_name.get(exitcode, exitcode)
+ info.append('exitcode=%s' % exitcode)
+ if self.daemon:
+ info.append('daemon')
+ return '<%s>' % ' '.join(info)
##
- def _bootstrap(self, parent_sentinel=None):
+ def _bootstrap(self, parent_sentinel=None):
from . import util, context
- global _current_process, _parent_process, _process_counter, _children
+ global _current_process, _parent_process, _process_counter, _children
try:
if self._start_method is not None:
@@ -299,10 +299,10 @@ class BaseProcess(object):
util._close_stdin()
old_process = _current_process
_current_process = self
- _parent_process = _ParentProcess(
- self._parent_name, self._parent_pid, parent_sentinel)
- if threading._HAVE_THREAD_NATIVE_ID:
- threading.main_thread()._set_native_id()
+ _parent_process = _ParentProcess(
+ self._parent_name, self._parent_pid, parent_sentinel)
+ if threading._HAVE_THREAD_NATIVE_ID:
+ threading.main_thread()._set_native_id()
try:
util._finalizer_registry.clear()
util._run_after_forkers()
@@ -317,12 +317,12 @@ class BaseProcess(object):
finally:
util._exit_function()
except SystemExit as e:
- if e.code is None:
- exitcode = 0
- elif isinstance(e.code, int):
- exitcode = e.code
+ if e.code is None:
+ exitcode = 0
+ elif isinstance(e.code, int):
+ exitcode = e.code
else:
- sys.stderr.write(str(e.code) + '\n')
+ sys.stderr.write(str(e.code) + '\n')
exitcode = 1
except:
exitcode = 1
@@ -350,41 +350,41 @@ class AuthenticationString(bytes):
)
return AuthenticationString, (bytes(self),)
-
-#
-# Create object representing the parent process
-#
-
-class _ParentProcess(BaseProcess):
-
- def __init__(self, name, pid, sentinel):
- self._identity = ()
- self._name = name
- self._pid = pid
- self._parent_pid = None
- self._popen = None
- self._closed = False
- self._sentinel = sentinel
- self._config = {}
-
- def is_alive(self):
- from multiprocessing.connection import wait
- return not wait([self._sentinel], timeout=0)
-
- @property
- def ident(self):
- return self._pid
-
- def join(self, timeout=None):
- '''
- Wait until parent process terminates
- '''
- from multiprocessing.connection import wait
- wait([self._sentinel], timeout=timeout)
-
- pid = ident
-
+
#
+# Create object representing the parent process
+#
+
+class _ParentProcess(BaseProcess):
+
+ def __init__(self, name, pid, sentinel):
+ self._identity = ()
+ self._name = name
+ self._pid = pid
+ self._parent_pid = None
+ self._popen = None
+ self._closed = False
+ self._sentinel = sentinel
+ self._config = {}
+
+ def is_alive(self):
+ from multiprocessing.connection import wait
+ return not wait([self._sentinel], timeout=0)
+
+ @property
+ def ident(self):
+ return self._pid
+
+ def join(self, timeout=None):
+ '''
+ Wait until parent process terminates
+ '''
+ from multiprocessing.connection import wait
+ wait([self._sentinel], timeout=timeout)
+
+ pid = ident
+
+#
# Create object representing the main process
#
@@ -412,7 +412,7 @@ class _MainProcess(BaseProcess):
pass
-_parent_process = None
+_parent_process = None
_current_process = _MainProcess()
_process_counter = itertools.count(1)
_children = set()
@@ -426,7 +426,7 @@ _exitcode_to_name = {}
for name, signum in list(signal.__dict__.items()):
if name[:3]=='SIG' and '_' not in name:
- _exitcode_to_name[-signum] = f'-{name}'
+ _exitcode_to_name[-signum] = f'-{name}'
# For debug and leak testing
_dangling = WeakSet()
diff --git a/contrib/tools/python3/src/Lib/multiprocessing/queues.py b/contrib/tools/python3/src/Lib/multiprocessing/queues.py
index a290181487..1646d270ec 100644
--- a/contrib/tools/python3/src/Lib/multiprocessing/queues.py
+++ b/contrib/tools/python3/src/Lib/multiprocessing/queues.py
@@ -14,7 +14,7 @@ import os
import threading
import collections
import time
-import types
+import types
import weakref
import errno
@@ -49,7 +49,7 @@ class Queue(object):
self._sem = ctx.BoundedSemaphore(maxsize)
# For use by concurrent.futures
self._ignore_epipe = False
- self._reset()
+ self._reset()
if sys.platform != 'win32':
register_after_fork(self, Queue._after_fork)
@@ -62,17 +62,17 @@ class Queue(object):
def __setstate__(self, state):
(self._ignore_epipe, self._maxsize, self._reader, self._writer,
self._rlock, self._wlock, self._sem, self._opid) = state
- self._reset()
+ self._reset()
def _after_fork(self):
debug('Queue._after_fork()')
- self._reset(after_fork=True)
-
- def _reset(self, after_fork=False):
- if after_fork:
- self._notempty._at_fork_reinit()
- else:
- self._notempty = threading.Condition(threading.Lock())
+ self._reset(after_fork=True)
+
+ def _reset(self, after_fork=False):
+ if after_fork:
+ self._notempty._at_fork_reinit()
+ else:
+ self._notempty = threading.Condition(threading.Lock())
self._buffer = collections.deque()
self._thread = None
self._jointhread = None
@@ -84,8 +84,8 @@ class Queue(object):
self._poll = self._reader.poll
def put(self, obj, block=True, timeout=None):
- if self._closed:
- raise ValueError(f"Queue {self!r} is closed")
+ if self._closed:
+ raise ValueError(f"Queue {self!r} is closed")
if not self._sem.acquire(block, timeout):
raise Full
@@ -96,8 +96,8 @@ class Queue(object):
self._notempty.notify()
def get(self, block=True, timeout=None):
- if self._closed:
- raise ValueError(f"Queue {self!r} is closed")
+ if self._closed:
+ raise ValueError(f"Queue {self!r} is closed")
if block and timeout is None:
with self._rlock:
res = self._recv_bytes()
@@ -307,8 +307,8 @@ class JoinableQueue(Queue):
self._cond, self._unfinished_tasks = state[-2:]
def put(self, obj, block=True, timeout=None):
- if self._closed:
- raise ValueError(f"Queue {self!r} is closed")
+ if self._closed:
+ raise ValueError(f"Queue {self!r} is closed")
if not self._sem.acquire(block, timeout):
raise Full
@@ -346,10 +346,10 @@ class SimpleQueue(object):
else:
self._wlock = ctx.Lock()
- def close(self):
- self._reader.close()
- self._writer.close()
-
+ def close(self):
+ self._reader.close()
+ self._writer.close()
+
def empty(self):
return not self._poll()
@@ -376,5 +376,5 @@ class SimpleQueue(object):
else:
with self._wlock:
self._writer.send_bytes(obj)
-
- __class_getitem__ = classmethod(types.GenericAlias)
+
+ __class_getitem__ = classmethod(types.GenericAlias)
diff --git a/contrib/tools/python3/src/Lib/multiprocessing/reduction.py b/contrib/tools/python3/src/Lib/multiprocessing/reduction.py
index 5593f0682f..d4a7f802ba 100644
--- a/contrib/tools/python3/src/Lib/multiprocessing/reduction.py
+++ b/contrib/tools/python3/src/Lib/multiprocessing/reduction.py
@@ -68,16 +68,16 @@ if sys.platform == 'win32':
__all__ += ['DupHandle', 'duplicate', 'steal_handle']
import _winapi
- def duplicate(handle, target_process=None, inheritable=False,
- *, source_process=None):
+ def duplicate(handle, target_process=None, inheritable=False,
+ *, source_process=None):
'''Duplicate a handle. (target_process is a handle not a pid!)'''
- current_process = _winapi.GetCurrentProcess()
- if source_process is None:
- source_process = current_process
+ current_process = _winapi.GetCurrentProcess()
+ if source_process is None:
+ source_process = current_process
if target_process is None:
- target_process = current_process
+ target_process = current_process
return _winapi.DuplicateHandle(
- source_process, handle, target_process,
+ source_process, handle, target_process,
0, inheritable, _winapi.DUPLICATE_SAME_ACCESS)
def steal_handle(source_pid, handle):
diff --git a/contrib/tools/python3/src/Lib/multiprocessing/resource_sharer.py b/contrib/tools/python3/src/Lib/multiprocessing/resource_sharer.py
index 66076509a1..aef0c1e84f 100644
--- a/contrib/tools/python3/src/Lib/multiprocessing/resource_sharer.py
+++ b/contrib/tools/python3/src/Lib/multiprocessing/resource_sharer.py
@@ -59,7 +59,7 @@ else:
class _ResourceSharer(object):
- '''Manager for resources using background thread.'''
+ '''Manager for resources using background thread.'''
def __init__(self):
self._key = 0
self._cache = {}
@@ -112,7 +112,7 @@ class _ResourceSharer(object):
for key, (send, close) in self._cache.items():
close()
self._cache.clear()
- self._lock._at_fork_reinit()
+ self._lock._at_fork_reinit()
if self._listener is not None:
self._listener.close()
self._listener = None
@@ -132,7 +132,7 @@ class _ResourceSharer(object):
def _serve(self):
if hasattr(signal, 'pthread_sigmask'):
- signal.pthread_sigmask(signal.SIG_BLOCK, signal.valid_signals())
+ signal.pthread_sigmask(signal.SIG_BLOCK, signal.valid_signals())
while 1:
try:
with self._listener.accept() as conn:
diff --git a/contrib/tools/python3/src/Lib/multiprocessing/resource_tracker.py b/contrib/tools/python3/src/Lib/multiprocessing/resource_tracker.py
index c9bfa9b82b..dc24abcae1 100644
--- a/contrib/tools/python3/src/Lib/multiprocessing/resource_tracker.py
+++ b/contrib/tools/python3/src/Lib/multiprocessing/resource_tracker.py
@@ -1,231 +1,231 @@
-###############################################################################
-# Server process to keep track of unlinked resources (like shared memory
-# segments, semaphores etc.) and clean them.
-#
-# On Unix we run a server process which keeps track of unlinked
-# resources. The server ignores SIGINT and SIGTERM and reads from a
-# pipe. Every other process of the program has a copy of the writable
-# end of the pipe, so we get EOF when all other processes have exited.
-# Then the server process unlinks any remaining resource names.
-#
-# This is important because there may be system limits for such resources: for
-# instance, the system only supports a limited number of named semaphores, and
-# shared-memory segments live in the RAM. If a python process leaks such a
-# resource, this resource will not be removed till the next reboot. Without
-# this resource tracker process, "killall python" would probably leave unlinked
-# resources.
-
-import os
-import signal
-import sys
-import threading
-import warnings
-
-from . import spawn
-from . import util
-
-__all__ = ['ensure_running', 'register', 'unregister']
-
-_HAVE_SIGMASK = hasattr(signal, 'pthread_sigmask')
-_IGNORED_SIGNALS = (signal.SIGINT, signal.SIGTERM)
-
-_CLEANUP_FUNCS = {
- 'noop': lambda: None,
-}
-
-if os.name == 'posix':
- import _multiprocessing
- import _posixshmem
-
- _CLEANUP_FUNCS.update({
- 'semaphore': _multiprocessing.sem_unlink,
- 'shared_memory': _posixshmem.shm_unlink,
- })
-
-
-class ResourceTracker(object):
-
- def __init__(self):
- self._lock = threading.Lock()
- self._fd = None
- self._pid = None
-
- def _stop(self):
- with self._lock:
- if self._fd is None:
- # not running
- return
-
- # closing the "alive" file descriptor stops main()
- os.close(self._fd)
- self._fd = None
-
- os.waitpid(self._pid, 0)
- self._pid = None
-
- def getfd(self):
- self.ensure_running()
- return self._fd
-
- def ensure_running(self):
- '''Make sure that resource tracker process is running.
-
- This can be run from any process. Usually a child process will use
- the resource created by its parent.'''
- with self._lock:
- if self._fd is not None:
- # resource tracker was launched before, is it still running?
- if self._check_alive():
- # => still alive
- return
- # => dead, launch it again
- os.close(self._fd)
-
- # Clean-up to avoid dangling processes.
- try:
- # _pid can be None if this process is a child from another
- # python process, which has started the resource_tracker.
- if self._pid is not None:
- os.waitpid(self._pid, 0)
- except ChildProcessError:
- # The resource_tracker has already been terminated.
- pass
- self._fd = None
- self._pid = None
-
- warnings.warn('resource_tracker: process died unexpectedly, '
- 'relaunching. Some resources might leak.')
-
- fds_to_pass = []
- try:
- fds_to_pass.append(sys.stderr.fileno())
- except Exception:
- pass
- cmd = 'from multiprocessing.resource_tracker import main;main(%d)'
- r, w = os.pipe()
- try:
- fds_to_pass.append(r)
- # process will out live us, so no need to wait on pid
- exe = spawn.get_executable()
- args = [exe] + util._args_from_interpreter_flags()
- args += ['-c', cmd % r]
- # bpo-33613: Register a signal mask that will block the signals.
- # This signal mask will be inherited by the child that is going
- # to be spawned and will protect the child from a race condition
- # that can make the child die before it registers signal handlers
- # for SIGINT and SIGTERM. The mask is unregistered after spawning
- # the child.
- try:
- if _HAVE_SIGMASK:
- signal.pthread_sigmask(signal.SIG_BLOCK, _IGNORED_SIGNALS)
- pid = util.spawnv_passfds(exe, args, fds_to_pass)
- finally:
- if _HAVE_SIGMASK:
- signal.pthread_sigmask(signal.SIG_UNBLOCK, _IGNORED_SIGNALS)
- except:
- os.close(w)
- raise
- else:
- self._fd = w
- self._pid = pid
- finally:
- os.close(r)
-
- def _check_alive(self):
- '''Check that the pipe has not been closed by sending a probe.'''
- try:
- # We cannot use send here as it calls ensure_running, creating
- # a cycle.
- os.write(self._fd, b'PROBE:0:noop\n')
- except OSError:
- return False
- else:
- return True
-
- def register(self, name, rtype):
- '''Register name of resource with resource tracker.'''
- self._send('REGISTER', name, rtype)
-
- def unregister(self, name, rtype):
- '''Unregister name of resource with resource tracker.'''
- self._send('UNREGISTER', name, rtype)
-
- def _send(self, cmd, name, rtype):
- self.ensure_running()
- msg = '{0}:{1}:{2}\n'.format(cmd, name, rtype).encode('ascii')
- if len(name) > 512:
- # posix guarantees that writes to a pipe of less than PIPE_BUF
- # bytes are atomic, and that PIPE_BUF >= 512
- raise ValueError('name too long')
- nbytes = os.write(self._fd, msg)
- assert nbytes == len(msg), "nbytes {0:n} but len(msg) {1:n}".format(
- nbytes, len(msg))
-
-
-_resource_tracker = ResourceTracker()
-ensure_running = _resource_tracker.ensure_running
-register = _resource_tracker.register
-unregister = _resource_tracker.unregister
-getfd = _resource_tracker.getfd
-
-def main(fd):
- '''Run resource tracker.'''
- # protect the process from ^C and "killall python" etc
- signal.signal(signal.SIGINT, signal.SIG_IGN)
- signal.signal(signal.SIGTERM, signal.SIG_IGN)
- if _HAVE_SIGMASK:
- signal.pthread_sigmask(signal.SIG_UNBLOCK, _IGNORED_SIGNALS)
-
- for f in (sys.stdin, sys.stdout):
- try:
- f.close()
- except Exception:
- pass
-
- cache = {rtype: set() for rtype in _CLEANUP_FUNCS.keys()}
- try:
- # keep track of registered/unregistered resources
- with open(fd, 'rb') as f:
- for line in f:
- try:
- cmd, name, rtype = line.strip().decode('ascii').split(':')
- cleanup_func = _CLEANUP_FUNCS.get(rtype, None)
- if cleanup_func is None:
- raise ValueError(
- f'Cannot register {name} for automatic cleanup: '
- f'unknown resource type {rtype}')
-
- if cmd == 'REGISTER':
- cache[rtype].add(name)
- elif cmd == 'UNREGISTER':
- cache[rtype].remove(name)
- elif cmd == 'PROBE':
- pass
- else:
- raise RuntimeError('unrecognized command %r' % cmd)
- except Exception:
- try:
- sys.excepthook(*sys.exc_info())
- except:
- pass
- finally:
- # all processes have terminated; cleanup any remaining resources
- for rtype, rtype_cache in cache.items():
- if rtype_cache:
- try:
- warnings.warn('resource_tracker: There appear to be %d '
- 'leaked %s objects to clean up at shutdown' %
- (len(rtype_cache), rtype))
- except Exception:
- pass
- for name in rtype_cache:
- # For some reason the process which created and registered this
- # resource has failed to unregister it. Presumably it has
- # died. We therefore unlink it.
- try:
- try:
- _CLEANUP_FUNCS[rtype](name)
- except Exception as e:
- warnings.warn('resource_tracker: %r: %s' % (name, e))
- finally:
- pass
+###############################################################################
+# Server process to keep track of unlinked resources (like shared memory
+# segments, semaphores etc.) and clean them.
+#
+# On Unix we run a server process which keeps track of unlinked
+# resources. The server ignores SIGINT and SIGTERM and reads from a
+# pipe. Every other process of the program has a copy of the writable
+# end of the pipe, so we get EOF when all other processes have exited.
+# Then the server process unlinks any remaining resource names.
+#
+# This is important because there may be system limits for such resources: for
+# instance, the system only supports a limited number of named semaphores, and
+# shared-memory segments live in the RAM. If a python process leaks such a
+# resource, this resource will not be removed till the next reboot. Without
+# this resource tracker process, "killall python" would probably leave unlinked
+# resources.
+
+import os
+import signal
+import sys
+import threading
+import warnings
+
+from . import spawn
+from . import util
+
+__all__ = ['ensure_running', 'register', 'unregister']
+
+_HAVE_SIGMASK = hasattr(signal, 'pthread_sigmask')
+_IGNORED_SIGNALS = (signal.SIGINT, signal.SIGTERM)
+
+_CLEANUP_FUNCS = {
+ 'noop': lambda: None,
+}
+
+if os.name == 'posix':
+ import _multiprocessing
+ import _posixshmem
+
+ _CLEANUP_FUNCS.update({
+ 'semaphore': _multiprocessing.sem_unlink,
+ 'shared_memory': _posixshmem.shm_unlink,
+ })
+
+
+class ResourceTracker(object):
+
+ def __init__(self):
+ self._lock = threading.Lock()
+ self._fd = None
+ self._pid = None
+
+ def _stop(self):
+ with self._lock:
+ if self._fd is None:
+ # not running
+ return
+
+ # closing the "alive" file descriptor stops main()
+ os.close(self._fd)
+ self._fd = None
+
+ os.waitpid(self._pid, 0)
+ self._pid = None
+
+ def getfd(self):
+ self.ensure_running()
+ return self._fd
+
+ def ensure_running(self):
+ '''Make sure that resource tracker process is running.
+
+ This can be run from any process. Usually a child process will use
+ the resource created by its parent.'''
+ with self._lock:
+ if self._fd is not None:
+ # resource tracker was launched before, is it still running?
+ if self._check_alive():
+ # => still alive
+ return
+ # => dead, launch it again
+ os.close(self._fd)
+
+ # Clean-up to avoid dangling processes.
+ try:
+ # _pid can be None if this process is a child from another
+ # python process, which has started the resource_tracker.
+ if self._pid is not None:
+ os.waitpid(self._pid, 0)
+ except ChildProcessError:
+ # The resource_tracker has already been terminated.
+ pass
+ self._fd = None
+ self._pid = None
+
+ warnings.warn('resource_tracker: process died unexpectedly, '
+ 'relaunching. Some resources might leak.')
+
+ fds_to_pass = []
+ try:
+ fds_to_pass.append(sys.stderr.fileno())
+ except Exception:
+ pass
+ cmd = 'from multiprocessing.resource_tracker import main;main(%d)'
+ r, w = os.pipe()
+ try:
+ fds_to_pass.append(r)
+ # process will out live us, so no need to wait on pid
+ exe = spawn.get_executable()
+ args = [exe] + util._args_from_interpreter_flags()
+ args += ['-c', cmd % r]
+ # bpo-33613: Register a signal mask that will block the signals.
+ # This signal mask will be inherited by the child that is going
+ # to be spawned and will protect the child from a race condition
+ # that can make the child die before it registers signal handlers
+ # for SIGINT and SIGTERM. The mask is unregistered after spawning
+ # the child.
+ try:
+ if _HAVE_SIGMASK:
+ signal.pthread_sigmask(signal.SIG_BLOCK, _IGNORED_SIGNALS)
+ pid = util.spawnv_passfds(exe, args, fds_to_pass)
+ finally:
+ if _HAVE_SIGMASK:
+ signal.pthread_sigmask(signal.SIG_UNBLOCK, _IGNORED_SIGNALS)
+ except:
+ os.close(w)
+ raise
+ else:
+ self._fd = w
+ self._pid = pid
+ finally:
+ os.close(r)
+
+ def _check_alive(self):
+ '''Check that the pipe has not been closed by sending a probe.'''
+ try:
+ # We cannot use send here as it calls ensure_running, creating
+ # a cycle.
+ os.write(self._fd, b'PROBE:0:noop\n')
+ except OSError:
+ return False
+ else:
+ return True
+
+ def register(self, name, rtype):
+ '''Register name of resource with resource tracker.'''
+ self._send('REGISTER', name, rtype)
+
+ def unregister(self, name, rtype):
+ '''Unregister name of resource with resource tracker.'''
+ self._send('UNREGISTER', name, rtype)
+
+ def _send(self, cmd, name, rtype):
+ self.ensure_running()
+ msg = '{0}:{1}:{2}\n'.format(cmd, name, rtype).encode('ascii')
+ if len(name) > 512:
+ # posix guarantees that writes to a pipe of less than PIPE_BUF
+ # bytes are atomic, and that PIPE_BUF >= 512
+ raise ValueError('name too long')
+ nbytes = os.write(self._fd, msg)
+ assert nbytes == len(msg), "nbytes {0:n} but len(msg) {1:n}".format(
+ nbytes, len(msg))
+
+
+_resource_tracker = ResourceTracker()
+ensure_running = _resource_tracker.ensure_running
+register = _resource_tracker.register
+unregister = _resource_tracker.unregister
+getfd = _resource_tracker.getfd
+
+def main(fd):
+ '''Run resource tracker.'''
+ # protect the process from ^C and "killall python" etc
+ signal.signal(signal.SIGINT, signal.SIG_IGN)
+ signal.signal(signal.SIGTERM, signal.SIG_IGN)
+ if _HAVE_SIGMASK:
+ signal.pthread_sigmask(signal.SIG_UNBLOCK, _IGNORED_SIGNALS)
+
+ for f in (sys.stdin, sys.stdout):
+ try:
+ f.close()
+ except Exception:
+ pass
+
+ cache = {rtype: set() for rtype in _CLEANUP_FUNCS.keys()}
+ try:
+ # keep track of registered/unregistered resources
+ with open(fd, 'rb') as f:
+ for line in f:
+ try:
+ cmd, name, rtype = line.strip().decode('ascii').split(':')
+ cleanup_func = _CLEANUP_FUNCS.get(rtype, None)
+ if cleanup_func is None:
+ raise ValueError(
+ f'Cannot register {name} for automatic cleanup: '
+ f'unknown resource type {rtype}')
+
+ if cmd == 'REGISTER':
+ cache[rtype].add(name)
+ elif cmd == 'UNREGISTER':
+ cache[rtype].remove(name)
+ elif cmd == 'PROBE':
+ pass
+ else:
+ raise RuntimeError('unrecognized command %r' % cmd)
+ except Exception:
+ try:
+ sys.excepthook(*sys.exc_info())
+ except:
+ pass
+ finally:
+ # all processes have terminated; cleanup any remaining resources
+ for rtype, rtype_cache in cache.items():
+ if rtype_cache:
+ try:
+ warnings.warn('resource_tracker: There appear to be %d '
+ 'leaked %s objects to clean up at shutdown' %
+ (len(rtype_cache), rtype))
+ except Exception:
+ pass
+ for name in rtype_cache:
+ # For some reason the process which created and registered this
+ # resource has failed to unregister it. Presumably it has
+ # died. We therefore unlink it.
+ try:
+ try:
+ _CLEANUP_FUNCS[rtype](name)
+ except Exception as e:
+ warnings.warn('resource_tracker: %r: %s' % (name, e))
+ finally:
+ pass
diff --git a/contrib/tools/python3/src/Lib/multiprocessing/shared_memory.py b/contrib/tools/python3/src/Lib/multiprocessing/shared_memory.py
index 122b3fcebf..db0516a993 100644
--- a/contrib/tools/python3/src/Lib/multiprocessing/shared_memory.py
+++ b/contrib/tools/python3/src/Lib/multiprocessing/shared_memory.py
@@ -1,532 +1,532 @@
-"""Provides shared memory for direct access across processes.
-
-The API of this package is currently provisional. Refer to the
-documentation for details.
-"""
-
-
-__all__ = [ 'SharedMemory', 'ShareableList' ]
-
-
-from functools import partial
-import mmap
-import os
-import errno
-import struct
-import secrets
-import types
-
-if os.name == "nt":
- import _winapi
- _USE_POSIX = False
-else:
- import _posixshmem
- _USE_POSIX = True
-
-
-_O_CREX = os.O_CREAT | os.O_EXCL
-
-# FreeBSD (and perhaps other BSDs) limit names to 14 characters.
-_SHM_SAFE_NAME_LENGTH = 14
-
-# Shared memory block name prefix
-if _USE_POSIX:
- _SHM_NAME_PREFIX = '/psm_'
-else:
- _SHM_NAME_PREFIX = 'wnsm_'
-
-
-def _make_filename():
- "Create a random filename for the shared memory object."
- # number of random bytes to use for name
- nbytes = (_SHM_SAFE_NAME_LENGTH - len(_SHM_NAME_PREFIX)) // 2
- assert nbytes >= 2, '_SHM_NAME_PREFIX too long'
- name = _SHM_NAME_PREFIX + secrets.token_hex(nbytes)
- assert len(name) <= _SHM_SAFE_NAME_LENGTH
- return name
-
-
-class SharedMemory:
- """Creates a new shared memory block or attaches to an existing
- shared memory block.
-
- Every shared memory block is assigned a unique name. This enables
- one process to create a shared memory block with a particular name
- so that a different process can attach to that same shared memory
- block using that same name.
-
- As a resource for sharing data across processes, shared memory blocks
- may outlive the original process that created them. When one process
- no longer needs access to a shared memory block that might still be
- needed by other processes, the close() method should be called.
- When a shared memory block is no longer needed by any process, the
- unlink() method should be called to ensure proper cleanup."""
-
- # Defaults; enables close() and unlink() to run without errors.
- _name = None
- _fd = -1
- _mmap = None
- _buf = None
- _flags = os.O_RDWR
- _mode = 0o600
- _prepend_leading_slash = True if _USE_POSIX else False
-
- def __init__(self, name=None, create=False, size=0):
- if not size >= 0:
- raise ValueError("'size' must be a positive integer")
- if create:
- self._flags = _O_CREX | os.O_RDWR
- if size == 0:
- raise ValueError("'size' must be a positive number different from zero")
- if name is None and not self._flags & os.O_EXCL:
- raise ValueError("'name' can only be None if create=True")
-
- if _USE_POSIX:
-
- # POSIX Shared Memory
-
- if name is None:
- while True:
- name = _make_filename()
- try:
- self._fd = _posixshmem.shm_open(
- name,
- self._flags,
- mode=self._mode
- )
- except FileExistsError:
- continue
- self._name = name
- break
- else:
- name = "/" + name if self._prepend_leading_slash else name
- self._fd = _posixshmem.shm_open(
- name,
- self._flags,
- mode=self._mode
- )
- self._name = name
- try:
- if create and size:
- os.ftruncate(self._fd, size)
- stats = os.fstat(self._fd)
- size = stats.st_size
- self._mmap = mmap.mmap(self._fd, size)
- except OSError:
- self.unlink()
- raise
-
- from .resource_tracker import register
- register(self._name, "shared_memory")
-
- else:
-
- # Windows Named Shared Memory
-
- if create:
- while True:
- temp_name = _make_filename() if name is None else name
- # Create and reserve shared memory block with this name
- # until it can be attached to by mmap.
- h_map = _winapi.CreateFileMapping(
- _winapi.INVALID_HANDLE_VALUE,
- _winapi.NULL,
- _winapi.PAGE_READWRITE,
- (size >> 32) & 0xFFFFFFFF,
- size & 0xFFFFFFFF,
- temp_name
- )
- try:
- last_error_code = _winapi.GetLastError()
- if last_error_code == _winapi.ERROR_ALREADY_EXISTS:
- if name is not None:
- raise FileExistsError(
- errno.EEXIST,
- os.strerror(errno.EEXIST),
- name,
- _winapi.ERROR_ALREADY_EXISTS
- )
- else:
- continue
- self._mmap = mmap.mmap(-1, size, tagname=temp_name)
- finally:
- _winapi.CloseHandle(h_map)
- self._name = temp_name
- break
-
- else:
- self._name = name
- # Dynamically determine the existing named shared memory
- # block's size which is likely a multiple of mmap.PAGESIZE.
- h_map = _winapi.OpenFileMapping(
- _winapi.FILE_MAP_READ,
- False,
- name
- )
- try:
- p_buf = _winapi.MapViewOfFile(
- h_map,
- _winapi.FILE_MAP_READ,
- 0,
- 0,
- 0
- )
- finally:
- _winapi.CloseHandle(h_map)
- size = _winapi.VirtualQuerySize(p_buf)
- self._mmap = mmap.mmap(-1, size, tagname=name)
-
- self._size = size
- self._buf = memoryview(self._mmap)
-
- def __del__(self):
- try:
- self.close()
- except OSError:
- pass
-
- def __reduce__(self):
- return (
- self.__class__,
- (
- self.name,
- False,
- self.size,
- ),
- )
-
- def __repr__(self):
- return f'{self.__class__.__name__}({self.name!r}, size={self.size})'
-
- @property
- def buf(self):
- "A memoryview of contents of the shared memory block."
- return self._buf
-
- @property
- def name(self):
- "Unique name that identifies the shared memory block."
- reported_name = self._name
- if _USE_POSIX and self._prepend_leading_slash:
- if self._name.startswith("/"):
- reported_name = self._name[1:]
- return reported_name
-
- @property
- def size(self):
- "Size in bytes."
- return self._size
-
- def close(self):
- """Closes access to the shared memory from this instance but does
- not destroy the shared memory block."""
- if self._buf is not None:
- self._buf.release()
- self._buf = None
- if self._mmap is not None:
- self._mmap.close()
- self._mmap = None
- if _USE_POSIX and self._fd >= 0:
- os.close(self._fd)
- self._fd = -1
-
- def unlink(self):
- """Requests that the underlying shared memory block be destroyed.
-
- In order to ensure proper cleanup of resources, unlink should be
- called once (and only once) across all processes which have access
- to the shared memory block."""
- if _USE_POSIX and self._name:
- from .resource_tracker import unregister
- _posixshmem.shm_unlink(self._name)
- unregister(self._name, "shared_memory")
-
-
-_encoding = "utf8"
-
-class ShareableList:
- """Pattern for a mutable list-like object shareable via a shared
- memory block. It differs from the built-in list type in that these
- lists can not change their overall length (i.e. no append, insert,
- etc.)
-
- Because values are packed into a memoryview as bytes, the struct
- packing format for any storable value must require no more than 8
- characters to describe its format."""
-
- # The shared memory area is organized as follows:
- # - 8 bytes: number of items (N) as a 64-bit integer
- # - (N + 1) * 8 bytes: offsets of each element from the start of the
- # data area
- # - K bytes: the data area storing item values (with encoding and size
- # depending on their respective types)
- # - N * 8 bytes: `struct` format string for each element
- # - N bytes: index into _back_transforms_mapping for each element
- # (for reconstructing the corresponding Python value)
- _types_mapping = {
- int: "q",
- float: "d",
- bool: "xxxxxxx?",
- str: "%ds",
- bytes: "%ds",
- None.__class__: "xxxxxx?x",
- }
- _alignment = 8
- _back_transforms_mapping = {
- 0: lambda value: value, # int, float, bool
- 1: lambda value: value.rstrip(b'\x00').decode(_encoding), # str
- 2: lambda value: value.rstrip(b'\x00'), # bytes
- 3: lambda _value: None, # None
- }
-
- @staticmethod
- def _extract_recreation_code(value):
- """Used in concert with _back_transforms_mapping to convert values
- into the appropriate Python objects when retrieving them from
- the list as well as when storing them."""
- if not isinstance(value, (str, bytes, None.__class__)):
- return 0
- elif isinstance(value, str):
- return 1
- elif isinstance(value, bytes):
- return 2
- else:
- return 3 # NoneType
-
- def __init__(self, sequence=None, *, name=None):
- if name is None or sequence is not None:
- sequence = sequence or ()
- _formats = [
- self._types_mapping[type(item)]
- if not isinstance(item, (str, bytes))
- else self._types_mapping[type(item)] % (
- self._alignment * (len(item) // self._alignment + 1),
- )
- for item in sequence
- ]
- self._list_len = len(_formats)
- assert sum(len(fmt) <= 8 for fmt in _formats) == self._list_len
- offset = 0
- # The offsets of each list element into the shared memory's
- # data area (0 meaning the start of the data area, not the start
- # of the shared memory area).
- self._allocated_offsets = [0]
- for fmt in _formats:
- offset += self._alignment if fmt[-1] != "s" else int(fmt[:-1])
- self._allocated_offsets.append(offset)
- _recreation_codes = [
- self._extract_recreation_code(item) for item in sequence
- ]
- requested_size = struct.calcsize(
- "q" + self._format_size_metainfo +
- "".join(_formats) +
- self._format_packing_metainfo +
- self._format_back_transform_codes
- )
-
- self.shm = SharedMemory(name, create=True, size=requested_size)
- else:
- self.shm = SharedMemory(name)
-
- if sequence is not None:
- _enc = _encoding
- struct.pack_into(
- "q" + self._format_size_metainfo,
- self.shm.buf,
- 0,
- self._list_len,
- *(self._allocated_offsets)
- )
- struct.pack_into(
- "".join(_formats),
- self.shm.buf,
- self._offset_data_start,
- *(v.encode(_enc) if isinstance(v, str) else v for v in sequence)
- )
- struct.pack_into(
- self._format_packing_metainfo,
- self.shm.buf,
- self._offset_packing_formats,
- *(v.encode(_enc) for v in _formats)
- )
- struct.pack_into(
- self._format_back_transform_codes,
- self.shm.buf,
- self._offset_back_transform_codes,
- *(_recreation_codes)
- )
-
- else:
- self._list_len = len(self) # Obtains size from offset 0 in buffer.
- self._allocated_offsets = list(
- struct.unpack_from(
- self._format_size_metainfo,
- self.shm.buf,
- 1 * 8
- )
- )
-
- def _get_packing_format(self, position):
- "Gets the packing format for a single value stored in the list."
- position = position if position >= 0 else position + self._list_len
- if (position >= self._list_len) or (self._list_len < 0):
- raise IndexError("Requested position out of range.")
-
- v = struct.unpack_from(
- "8s",
- self.shm.buf,
- self._offset_packing_formats + position * 8
- )[0]
- fmt = v.rstrip(b'\x00')
- fmt_as_str = fmt.decode(_encoding)
-
- return fmt_as_str
-
- def _get_back_transform(self, position):
- "Gets the back transformation function for a single value."
-
- if (position >= self._list_len) or (self._list_len < 0):
- raise IndexError("Requested position out of range.")
-
- transform_code = struct.unpack_from(
- "b",
- self.shm.buf,
- self._offset_back_transform_codes + position
- )[0]
- transform_function = self._back_transforms_mapping[transform_code]
-
- return transform_function
-
- def _set_packing_format_and_transform(self, position, fmt_as_str, value):
- """Sets the packing format and back transformation code for a
- single value in the list at the specified position."""
-
- if (position >= self._list_len) or (self._list_len < 0):
- raise IndexError("Requested position out of range.")
-
- struct.pack_into(
- "8s",
- self.shm.buf,
- self._offset_packing_formats + position * 8,
- fmt_as_str.encode(_encoding)
- )
-
- transform_code = self._extract_recreation_code(value)
- struct.pack_into(
- "b",
- self.shm.buf,
- self._offset_back_transform_codes + position,
- transform_code
- )
-
- def __getitem__(self, position):
- position = position if position >= 0 else position + self._list_len
- try:
- offset = self._offset_data_start + self._allocated_offsets[position]
- (v,) = struct.unpack_from(
- self._get_packing_format(position),
- self.shm.buf,
- offset
- )
- except IndexError:
- raise IndexError("index out of range")
-
- back_transform = self._get_back_transform(position)
- v = back_transform(v)
-
- return v
-
- def __setitem__(self, position, value):
- position = position if position >= 0 else position + self._list_len
- try:
- item_offset = self._allocated_offsets[position]
- offset = self._offset_data_start + item_offset
- current_format = self._get_packing_format(position)
- except IndexError:
- raise IndexError("assignment index out of range")
-
- if not isinstance(value, (str, bytes)):
- new_format = self._types_mapping[type(value)]
- encoded_value = value
- else:
- allocated_length = self._allocated_offsets[position + 1] - item_offset
-
- encoded_value = (value.encode(_encoding)
- if isinstance(value, str) else value)
- if len(encoded_value) > allocated_length:
- raise ValueError("bytes/str item exceeds available storage")
- if current_format[-1] == "s":
- new_format = current_format
- else:
- new_format = self._types_mapping[str] % (
- allocated_length,
- )
-
- self._set_packing_format_and_transform(
- position,
- new_format,
- value
- )
- struct.pack_into(new_format, self.shm.buf, offset, encoded_value)
-
- def __reduce__(self):
- return partial(self.__class__, name=self.shm.name), ()
-
- def __len__(self):
- return struct.unpack_from("q", self.shm.buf, 0)[0]
-
- def __repr__(self):
- return f'{self.__class__.__name__}({list(self)}, name={self.shm.name!r})'
-
- @property
- def format(self):
- "The struct packing format used by all currently stored items."
- return "".join(
- self._get_packing_format(i) for i in range(self._list_len)
- )
-
- @property
- def _format_size_metainfo(self):
- "The struct packing format used for the items' storage offsets."
- return "q" * (self._list_len + 1)
-
- @property
- def _format_packing_metainfo(self):
- "The struct packing format used for the items' packing formats."
- return "8s" * self._list_len
-
- @property
- def _format_back_transform_codes(self):
- "The struct packing format used for the items' back transforms."
- return "b" * self._list_len
-
- @property
- def _offset_data_start(self):
- # - 8 bytes for the list length
- # - (N + 1) * 8 bytes for the element offsets
- return (self._list_len + 2) * 8
-
- @property
- def _offset_packing_formats(self):
- return self._offset_data_start + self._allocated_offsets[-1]
-
- @property
- def _offset_back_transform_codes(self):
- return self._offset_packing_formats + self._list_len * 8
-
- def count(self, value):
- "L.count(value) -> integer -- return number of occurrences of value."
-
- return sum(value == entry for entry in self)
-
- def index(self, value):
- """L.index(value) -> integer -- return first index of value.
- Raises ValueError if the value is not present."""
-
- for position, entry in enumerate(self):
- if value == entry:
- return position
- else:
- raise ValueError(f"{value!r} not in this container")
-
- __class_getitem__ = classmethod(types.GenericAlias)
+"""Provides shared memory for direct access across processes.
+
+The API of this package is currently provisional. Refer to the
+documentation for details.
+"""
+
+
+__all__ = [ 'SharedMemory', 'ShareableList' ]
+
+
+from functools import partial
+import mmap
+import os
+import errno
+import struct
+import secrets
+import types
+
+if os.name == "nt":
+ import _winapi
+ _USE_POSIX = False
+else:
+ import _posixshmem
+ _USE_POSIX = True
+
+
+_O_CREX = os.O_CREAT | os.O_EXCL
+
+# FreeBSD (and perhaps other BSDs) limit names to 14 characters.
+_SHM_SAFE_NAME_LENGTH = 14
+
+# Shared memory block name prefix
+if _USE_POSIX:
+ _SHM_NAME_PREFIX = '/psm_'
+else:
+ _SHM_NAME_PREFIX = 'wnsm_'
+
+
+def _make_filename():
+ "Create a random filename for the shared memory object."
+ # number of random bytes to use for name
+ nbytes = (_SHM_SAFE_NAME_LENGTH - len(_SHM_NAME_PREFIX)) // 2
+ assert nbytes >= 2, '_SHM_NAME_PREFIX too long'
+ name = _SHM_NAME_PREFIX + secrets.token_hex(nbytes)
+ assert len(name) <= _SHM_SAFE_NAME_LENGTH
+ return name
+
+
+class SharedMemory:
+ """Creates a new shared memory block or attaches to an existing
+ shared memory block.
+
+ Every shared memory block is assigned a unique name. This enables
+ one process to create a shared memory block with a particular name
+ so that a different process can attach to that same shared memory
+ block using that same name.
+
+ As a resource for sharing data across processes, shared memory blocks
+ may outlive the original process that created them. When one process
+ no longer needs access to a shared memory block that might still be
+ needed by other processes, the close() method should be called.
+ When a shared memory block is no longer needed by any process, the
+ unlink() method should be called to ensure proper cleanup."""
+
+ # Defaults; enables close() and unlink() to run without errors.
+ _name = None
+ _fd = -1
+ _mmap = None
+ _buf = None
+ _flags = os.O_RDWR
+ _mode = 0o600
+ _prepend_leading_slash = True if _USE_POSIX else False
+
+ def __init__(self, name=None, create=False, size=0):
+ if not size >= 0:
+ raise ValueError("'size' must be a positive integer")
+ if create:
+ self._flags = _O_CREX | os.O_RDWR
+ if size == 0:
+ raise ValueError("'size' must be a positive number different from zero")
+ if name is None and not self._flags & os.O_EXCL:
+ raise ValueError("'name' can only be None if create=True")
+
+ if _USE_POSIX:
+
+ # POSIX Shared Memory
+
+ if name is None:
+ while True:
+ name = _make_filename()
+ try:
+ self._fd = _posixshmem.shm_open(
+ name,
+ self._flags,
+ mode=self._mode
+ )
+ except FileExistsError:
+ continue
+ self._name = name
+ break
+ else:
+ name = "/" + name if self._prepend_leading_slash else name
+ self._fd = _posixshmem.shm_open(
+ name,
+ self._flags,
+ mode=self._mode
+ )
+ self._name = name
+ try:
+ if create and size:
+ os.ftruncate(self._fd, size)
+ stats = os.fstat(self._fd)
+ size = stats.st_size
+ self._mmap = mmap.mmap(self._fd, size)
+ except OSError:
+ self.unlink()
+ raise
+
+ from .resource_tracker import register
+ register(self._name, "shared_memory")
+
+ else:
+
+ # Windows Named Shared Memory
+
+ if create:
+ while True:
+ temp_name = _make_filename() if name is None else name
+ # Create and reserve shared memory block with this name
+ # until it can be attached to by mmap.
+ h_map = _winapi.CreateFileMapping(
+ _winapi.INVALID_HANDLE_VALUE,
+ _winapi.NULL,
+ _winapi.PAGE_READWRITE,
+ (size >> 32) & 0xFFFFFFFF,
+ size & 0xFFFFFFFF,
+ temp_name
+ )
+ try:
+ last_error_code = _winapi.GetLastError()
+ if last_error_code == _winapi.ERROR_ALREADY_EXISTS:
+ if name is not None:
+ raise FileExistsError(
+ errno.EEXIST,
+ os.strerror(errno.EEXIST),
+ name,
+ _winapi.ERROR_ALREADY_EXISTS
+ )
+ else:
+ continue
+ self._mmap = mmap.mmap(-1, size, tagname=temp_name)
+ finally:
+ _winapi.CloseHandle(h_map)
+ self._name = temp_name
+ break
+
+ else:
+ self._name = name
+ # Dynamically determine the existing named shared memory
+ # block's size which is likely a multiple of mmap.PAGESIZE.
+ h_map = _winapi.OpenFileMapping(
+ _winapi.FILE_MAP_READ,
+ False,
+ name
+ )
+ try:
+ p_buf = _winapi.MapViewOfFile(
+ h_map,
+ _winapi.FILE_MAP_READ,
+ 0,
+ 0,
+ 0
+ )
+ finally:
+ _winapi.CloseHandle(h_map)
+ size = _winapi.VirtualQuerySize(p_buf)
+ self._mmap = mmap.mmap(-1, size, tagname=name)
+
+ self._size = size
+ self._buf = memoryview(self._mmap)
+
+ def __del__(self):
+ try:
+ self.close()
+ except OSError:
+ pass
+
+ def __reduce__(self):
+ return (
+ self.__class__,
+ (
+ self.name,
+ False,
+ self.size,
+ ),
+ )
+
+ def __repr__(self):
+ return f'{self.__class__.__name__}({self.name!r}, size={self.size})'
+
+ @property
+ def buf(self):
+ "A memoryview of contents of the shared memory block."
+ return self._buf
+
+ @property
+ def name(self):
+ "Unique name that identifies the shared memory block."
+ reported_name = self._name
+ if _USE_POSIX and self._prepend_leading_slash:
+ if self._name.startswith("/"):
+ reported_name = self._name[1:]
+ return reported_name
+
+ @property
+ def size(self):
+ "Size in bytes."
+ return self._size
+
+ def close(self):
+ """Closes access to the shared memory from this instance but does
+ not destroy the shared memory block."""
+ if self._buf is not None:
+ self._buf.release()
+ self._buf = None
+ if self._mmap is not None:
+ self._mmap.close()
+ self._mmap = None
+ if _USE_POSIX and self._fd >= 0:
+ os.close(self._fd)
+ self._fd = -1
+
+ def unlink(self):
+ """Requests that the underlying shared memory block be destroyed.
+
+ In order to ensure proper cleanup of resources, unlink should be
+ called once (and only once) across all processes which have access
+ to the shared memory block."""
+ if _USE_POSIX and self._name:
+ from .resource_tracker import unregister
+ _posixshmem.shm_unlink(self._name)
+ unregister(self._name, "shared_memory")
+
+
+_encoding = "utf8"
+
+class ShareableList:
+ """Pattern for a mutable list-like object shareable via a shared
+ memory block. It differs from the built-in list type in that these
+ lists can not change their overall length (i.e. no append, insert,
+ etc.)
+
+ Because values are packed into a memoryview as bytes, the struct
+ packing format for any storable value must require no more than 8
+ characters to describe its format."""
+
+ # The shared memory area is organized as follows:
+ # - 8 bytes: number of items (N) as a 64-bit integer
+ # - (N + 1) * 8 bytes: offsets of each element from the start of the
+ # data area
+ # - K bytes: the data area storing item values (with encoding and size
+ # depending on their respective types)
+ # - N * 8 bytes: `struct` format string for each element
+ # - N bytes: index into _back_transforms_mapping for each element
+ # (for reconstructing the corresponding Python value)
+ _types_mapping = {
+ int: "q",
+ float: "d",
+ bool: "xxxxxxx?",
+ str: "%ds",
+ bytes: "%ds",
+ None.__class__: "xxxxxx?x",
+ }
+ _alignment = 8
+ _back_transforms_mapping = {
+ 0: lambda value: value, # int, float, bool
+ 1: lambda value: value.rstrip(b'\x00').decode(_encoding), # str
+ 2: lambda value: value.rstrip(b'\x00'), # bytes
+ 3: lambda _value: None, # None
+ }
+
+ @staticmethod
+ def _extract_recreation_code(value):
+ """Used in concert with _back_transforms_mapping to convert values
+ into the appropriate Python objects when retrieving them from
+ the list as well as when storing them."""
+ if not isinstance(value, (str, bytes, None.__class__)):
+ return 0
+ elif isinstance(value, str):
+ return 1
+ elif isinstance(value, bytes):
+ return 2
+ else:
+ return 3 # NoneType
+
+ def __init__(self, sequence=None, *, name=None):
+ if name is None or sequence is not None:
+ sequence = sequence or ()
+ _formats = [
+ self._types_mapping[type(item)]
+ if not isinstance(item, (str, bytes))
+ else self._types_mapping[type(item)] % (
+ self._alignment * (len(item) // self._alignment + 1),
+ )
+ for item in sequence
+ ]
+ self._list_len = len(_formats)
+ assert sum(len(fmt) <= 8 for fmt in _formats) == self._list_len
+ offset = 0
+ # The offsets of each list element into the shared memory's
+ # data area (0 meaning the start of the data area, not the start
+ # of the shared memory area).
+ self._allocated_offsets = [0]
+ for fmt in _formats:
+ offset += self._alignment if fmt[-1] != "s" else int(fmt[:-1])
+ self._allocated_offsets.append(offset)
+ _recreation_codes = [
+ self._extract_recreation_code(item) for item in sequence
+ ]
+ requested_size = struct.calcsize(
+ "q" + self._format_size_metainfo +
+ "".join(_formats) +
+ self._format_packing_metainfo +
+ self._format_back_transform_codes
+ )
+
+ self.shm = SharedMemory(name, create=True, size=requested_size)
+ else:
+ self.shm = SharedMemory(name)
+
+ if sequence is not None:
+ _enc = _encoding
+ struct.pack_into(
+ "q" + self._format_size_metainfo,
+ self.shm.buf,
+ 0,
+ self._list_len,
+ *(self._allocated_offsets)
+ )
+ struct.pack_into(
+ "".join(_formats),
+ self.shm.buf,
+ self._offset_data_start,
+ *(v.encode(_enc) if isinstance(v, str) else v for v in sequence)
+ )
+ struct.pack_into(
+ self._format_packing_metainfo,
+ self.shm.buf,
+ self._offset_packing_formats,
+ *(v.encode(_enc) for v in _formats)
+ )
+ struct.pack_into(
+ self._format_back_transform_codes,
+ self.shm.buf,
+ self._offset_back_transform_codes,
+ *(_recreation_codes)
+ )
+
+ else:
+ self._list_len = len(self) # Obtains size from offset 0 in buffer.
+ self._allocated_offsets = list(
+ struct.unpack_from(
+ self._format_size_metainfo,
+ self.shm.buf,
+ 1 * 8
+ )
+ )
+
+ def _get_packing_format(self, position):
+ "Gets the packing format for a single value stored in the list."
+ position = position if position >= 0 else position + self._list_len
+ if (position >= self._list_len) or (self._list_len < 0):
+ raise IndexError("Requested position out of range.")
+
+ v = struct.unpack_from(
+ "8s",
+ self.shm.buf,
+ self._offset_packing_formats + position * 8
+ )[0]
+ fmt = v.rstrip(b'\x00')
+ fmt_as_str = fmt.decode(_encoding)
+
+ return fmt_as_str
+
+ def _get_back_transform(self, position):
+ "Gets the back transformation function for a single value."
+
+ if (position >= self._list_len) or (self._list_len < 0):
+ raise IndexError("Requested position out of range.")
+
+ transform_code = struct.unpack_from(
+ "b",
+ self.shm.buf,
+ self._offset_back_transform_codes + position
+ )[0]
+ transform_function = self._back_transforms_mapping[transform_code]
+
+ return transform_function
+
+ def _set_packing_format_and_transform(self, position, fmt_as_str, value):
+ """Sets the packing format and back transformation code for a
+ single value in the list at the specified position."""
+
+ if (position >= self._list_len) or (self._list_len < 0):
+ raise IndexError("Requested position out of range.")
+
+ struct.pack_into(
+ "8s",
+ self.shm.buf,
+ self._offset_packing_formats + position * 8,
+ fmt_as_str.encode(_encoding)
+ )
+
+ transform_code = self._extract_recreation_code(value)
+ struct.pack_into(
+ "b",
+ self.shm.buf,
+ self._offset_back_transform_codes + position,
+ transform_code
+ )
+
+ def __getitem__(self, position):
+ position = position if position >= 0 else position + self._list_len
+ try:
+ offset = self._offset_data_start + self._allocated_offsets[position]
+ (v,) = struct.unpack_from(
+ self._get_packing_format(position),
+ self.shm.buf,
+ offset
+ )
+ except IndexError:
+ raise IndexError("index out of range")
+
+ back_transform = self._get_back_transform(position)
+ v = back_transform(v)
+
+ return v
+
+ def __setitem__(self, position, value):
+ position = position if position >= 0 else position + self._list_len
+ try:
+ item_offset = self._allocated_offsets[position]
+ offset = self._offset_data_start + item_offset
+ current_format = self._get_packing_format(position)
+ except IndexError:
+ raise IndexError("assignment index out of range")
+
+ if not isinstance(value, (str, bytes)):
+ new_format = self._types_mapping[type(value)]
+ encoded_value = value
+ else:
+ allocated_length = self._allocated_offsets[position + 1] - item_offset
+
+ encoded_value = (value.encode(_encoding)
+ if isinstance(value, str) else value)
+ if len(encoded_value) > allocated_length:
+ raise ValueError("bytes/str item exceeds available storage")
+ if current_format[-1] == "s":
+ new_format = current_format
+ else:
+ new_format = self._types_mapping[str] % (
+ allocated_length,
+ )
+
+ self._set_packing_format_and_transform(
+ position,
+ new_format,
+ value
+ )
+ struct.pack_into(new_format, self.shm.buf, offset, encoded_value)
+
+ def __reduce__(self):
+ return partial(self.__class__, name=self.shm.name), ()
+
+ def __len__(self):
+ return struct.unpack_from("q", self.shm.buf, 0)[0]
+
+ def __repr__(self):
+ return f'{self.__class__.__name__}({list(self)}, name={self.shm.name!r})'
+
+ @property
+ def format(self):
+ "The struct packing format used by all currently stored items."
+ return "".join(
+ self._get_packing_format(i) for i in range(self._list_len)
+ )
+
+ @property
+ def _format_size_metainfo(self):
+ "The struct packing format used for the items' storage offsets."
+ return "q" * (self._list_len + 1)
+
+ @property
+ def _format_packing_metainfo(self):
+ "The struct packing format used for the items' packing formats."
+ return "8s" * self._list_len
+
+ @property
+ def _format_back_transform_codes(self):
+ "The struct packing format used for the items' back transforms."
+ return "b" * self._list_len
+
+ @property
+ def _offset_data_start(self):
+ # - 8 bytes for the list length
+ # - (N + 1) * 8 bytes for the element offsets
+ return (self._list_len + 2) * 8
+
+ @property
+ def _offset_packing_formats(self):
+ return self._offset_data_start + self._allocated_offsets[-1]
+
+ @property
+ def _offset_back_transform_codes(self):
+ return self._offset_packing_formats + self._list_len * 8
+
+ def count(self, value):
+ "L.count(value) -> integer -- return number of occurrences of value."
+
+ return sum(value == entry for entry in self)
+
+ def index(self, value):
+ """L.index(value) -> integer -- return first index of value.
+ Raises ValueError if the value is not present."""
+
+ for position, entry in enumerate(self):
+ if value == entry:
+ return position
+ else:
+ raise ValueError(f"{value!r} not in this container")
+
+ __class_getitem__ = classmethod(types.GenericAlias)
diff --git a/contrib/tools/python3/src/Lib/multiprocessing/spawn.py b/contrib/tools/python3/src/Lib/multiprocessing/spawn.py
index f7f8deb246..2ac0b653b6 100644
--- a/contrib/tools/python3/src/Lib/multiprocessing/spawn.py
+++ b/contrib/tools/python3/src/Lib/multiprocessing/spawn.py
@@ -96,28 +96,28 @@ def spawn_main(pipe_handle, parent_pid=None, tracker_fd=None):
assert is_forking(sys.argv), "Not forking"
if sys.platform == 'win32':
import msvcrt
- import _winapi
-
- if parent_pid is not None:
- source_process = _winapi.OpenProcess(
- _winapi.SYNCHRONIZE | _winapi.PROCESS_DUP_HANDLE,
- False, parent_pid)
- else:
- source_process = None
- new_handle = reduction.duplicate(pipe_handle,
- source_process=source_process)
+ import _winapi
+
+ if parent_pid is not None:
+ source_process = _winapi.OpenProcess(
+ _winapi.SYNCHRONIZE | _winapi.PROCESS_DUP_HANDLE,
+ False, parent_pid)
+ else:
+ source_process = None
+ new_handle = reduction.duplicate(pipe_handle,
+ source_process=source_process)
fd = msvcrt.open_osfhandle(new_handle, os.O_RDONLY)
- parent_sentinel = source_process
+ parent_sentinel = source_process
else:
- from . import resource_tracker
- resource_tracker._resource_tracker._fd = tracker_fd
+ from . import resource_tracker
+ resource_tracker._resource_tracker._fd = tracker_fd
fd = pipe_handle
- parent_sentinel = os.dup(pipe_handle)
- exitcode = _main(fd, parent_sentinel)
+ parent_sentinel = os.dup(pipe_handle)
+ exitcode = _main(fd, parent_sentinel)
sys.exit(exitcode)
-def _main(fd, parent_sentinel):
+def _main(fd, parent_sentinel):
with os.fdopen(fd, 'rb', closefd=True) as from_parent:
process.current_process()._inheriting = True
try:
@@ -126,7 +126,7 @@ def _main(fd, parent_sentinel):
self = reduction.pickle.load(from_parent)
finally:
del process.current_process()._inheriting
- return self._bootstrap(parent_sentinel)
+ return self._bootstrap(parent_sentinel)
def _check_not_importing_main():
diff --git a/contrib/tools/python3/src/Lib/multiprocessing/synchronize.py b/contrib/tools/python3/src/Lib/multiprocessing/synchronize.py
index d0be48f1fd..881c823d28 100644
--- a/contrib/tools/python3/src/Lib/multiprocessing/synchronize.py
+++ b/contrib/tools/python3/src/Lib/multiprocessing/synchronize.py
@@ -76,16 +76,16 @@ class SemLock(object):
# We only get here if we are on Unix with forking
# disabled. When the object is garbage collected or the
# process shuts down we unlink the semaphore name
- from .resource_tracker import register
- register(self._semlock.name, "semaphore")
+ from .resource_tracker import register
+ register(self._semlock.name, "semaphore")
util.Finalize(self, SemLock._cleanup, (self._semlock.name,),
exitpriority=0)
@staticmethod
def _cleanup(name):
- from .resource_tracker import unregister
+ from .resource_tracker import unregister
sem_unlink(name)
- unregister(name, "semaphore")
+ unregister(name, "semaphore")
def _make_methods(self):
self.acquire = self._semlock.acquire
@@ -270,7 +270,7 @@ class Condition(object):
def notify(self, n=1):
assert self._lock._semlock._is_mine(), 'lock is not owned'
assert not self._wait_semaphore.acquire(
- False), ('notify: Should not have been able to acquire '
+ False), ('notify: Should not have been able to acquire '
+ '_wait_semaphore')
# to take account of timeouts since last notify*() we subtract
diff --git a/contrib/tools/python3/src/Lib/multiprocessing/util.py b/contrib/tools/python3/src/Lib/multiprocessing/util.py
index e94466be8e..fed9ddf6ee 100644
--- a/contrib/tools/python3/src/Lib/multiprocessing/util.py
+++ b/contrib/tools/python3/src/Lib/multiprocessing/util.py
@@ -102,42 +102,42 @@ def log_to_stderr(level=None):
_log_to_stderr = True
return _logger
-
-# Abstract socket support
-
-def _platform_supports_abstract_sockets():
- if sys.platform == "linux":
- return True
- if hasattr(sys, 'getandroidapilevel'):
- return True
- return False
-
-
-def is_abstract_socket_namespace(address):
- if not address:
- return False
- if isinstance(address, bytes):
- return address[0] == 0
- elif isinstance(address, str):
- return address[0] == "\0"
- raise TypeError('address type of {address!r} unrecognized')
-
-
-abstract_sockets_supported = _platform_supports_abstract_sockets()
-
+
+# Abstract socket support
+
+def _platform_supports_abstract_sockets():
+ if sys.platform == "linux":
+ return True
+ if hasattr(sys, 'getandroidapilevel'):
+ return True
+ return False
+
+
+def is_abstract_socket_namespace(address):
+ if not address:
+ return False
+ if isinstance(address, bytes):
+ return address[0] == 0
+ elif isinstance(address, str):
+ return address[0] == "\0"
+ raise TypeError('address type of {address!r} unrecognized')
+
+
+abstract_sockets_supported = _platform_supports_abstract_sockets()
+
#
# Function returning a temp directory which will be removed on exit
#
-def _remove_temp_dir(rmtree, tempdir):
- rmtree(tempdir)
-
- current_process = process.current_process()
- # current_process() can be None if the finalizer is called
- # late during Python finalization
- if current_process is not None:
- current_process._config['tempdir'] = None
-
+def _remove_temp_dir(rmtree, tempdir):
+ rmtree(tempdir)
+
+ current_process = process.current_process()
+ # current_process() can be None if the finalizer is called
+ # late during Python finalization
+ if current_process is not None:
+ current_process._config['tempdir'] = None
+
def get_temp_dir():
# get name of a temp directory which will be automatically cleaned up
tempdir = process.current_process()._config.get('tempdir')
@@ -145,10 +145,10 @@ def get_temp_dir():
import shutil, tempfile
tempdir = tempfile.mkdtemp(prefix='pymp-')
info('created temp directory %s', tempdir)
- # keep a strong reference to shutil.rmtree(), since the finalizer
- # can be called late during Python shutdown
- Finalize(None, _remove_temp_dir, args=(shutil.rmtree, tempdir),
- exitpriority=-100)
+ # keep a strong reference to shutil.rmtree(), since the finalizer
+ # can be called late during Python shutdown
+ Finalize(None, _remove_temp_dir, args=(shutil.rmtree, tempdir),
+ exitpriority=-100)
process.current_process()._config['tempdir'] = tempdir
return tempdir
@@ -261,7 +261,7 @@ class Finalize(object):
if self._kwargs:
x += ', kwargs=' + str(self._kwargs)
if self._key[0] is not None:
- x += ', exitpriority=' + str(self._key[0])
+ x += ', exitpriority=' + str(self._key[0])
return x + '>'
@@ -370,11 +370,11 @@ class ForkAwareThreadLock(object):
self._lock = threading.Lock()
self.acquire = self._lock.acquire
self.release = self._lock.release
- register_after_fork(self, ForkAwareThreadLock._at_fork_reinit)
-
- def _at_fork_reinit(self):
- self._lock._at_fork_reinit()
+ register_after_fork(self, ForkAwareThreadLock._at_fork_reinit)
+ def _at_fork_reinit(self):
+ self._lock._at_fork_reinit()
+
def __enter__(self):
return self._lock.__enter__()
@@ -467,38 +467,38 @@ def spawnv_passfds(path, args, passfds):
return _posixsubprocess.fork_exec(
args, [os.fsencode(path)], True, passfds, None, _env_list(),
-1, -1, -1, -1, -1, -1, errpipe_read, errpipe_write,
- False, False, None, None, None, -1, None)
+ False, False, None, None, None, -1, None)
finally:
os.close(errpipe_read)
os.close(errpipe_write)
-
-
-def close_fds(*fds):
- """Close each file descriptor given as an argument"""
- for fd in fds:
- os.close(fd)
-
-
-def _cleanup_tests():
- """Cleanup multiprocessing resources when multiprocessing tests
- completed."""
-
- from test import support
-
- # cleanup multiprocessing
- process._cleanup()
-
- # Stop the ForkServer process if it's running
- from multiprocessing import forkserver
- forkserver._forkserver._stop()
-
- # Stop the ResourceTracker process if it's running
- from multiprocessing import resource_tracker
- resource_tracker._resource_tracker._stop()
-
- # bpo-37421: Explicitly call _run_finalizers() to remove immediately
- # temporary directories created by multiprocessing.util.get_temp_dir().
- _run_finalizers()
- support.gc_collect()
-
- support.reap_children()
+
+
+def close_fds(*fds):
+ """Close each file descriptor given as an argument"""
+ for fd in fds:
+ os.close(fd)
+
+
+def _cleanup_tests():
+ """Cleanup multiprocessing resources when multiprocessing tests
+ completed."""
+
+ from test import support
+
+ # cleanup multiprocessing
+ process._cleanup()
+
+ # Stop the ForkServer process if it's running
+ from multiprocessing import forkserver
+ forkserver._forkserver._stop()
+
+ # Stop the ResourceTracker process if it's running
+ from multiprocessing import resource_tracker
+ resource_tracker._resource_tracker._stop()
+
+ # bpo-37421: Explicitly call _run_finalizers() to remove immediately
+ # temporary directories created by multiprocessing.util.get_temp_dir().
+ _run_finalizers()
+ support.gc_collect()
+
+ support.reap_children()
diff --git a/contrib/tools/python3/src/Lib/nntplib.py b/contrib/tools/python3/src/Lib/nntplib.py
index f6e746e7c9..1508c4b2d3 100644
--- a/contrib/tools/python3/src/Lib/nntplib.py
+++ b/contrib/tools/python3/src/Lib/nntplib.py
@@ -67,7 +67,7 @@ import re
import socket
import collections
import datetime
-import sys
+import sys
try:
import ssl
@@ -293,7 +293,7 @@ if _have_ssl:
# The classes themselves
-class NNTP:
+class NNTP:
# UTF-8 is the character set for all NNTP commands and responses: they
# are automatically encoded (when sending) and decoded (and receiving)
# by this class.
@@ -309,18 +309,18 @@ class NNTP:
encoding = 'utf-8'
errors = 'surrogateescape'
- def __init__(self, host, port=NNTP_PORT, user=None, password=None,
- readermode=None, usenetrc=False,
- timeout=_GLOBAL_DEFAULT_TIMEOUT):
+ def __init__(self, host, port=NNTP_PORT, user=None, password=None,
+ readermode=None, usenetrc=False,
+ timeout=_GLOBAL_DEFAULT_TIMEOUT):
"""Initialize an instance. Arguments:
- - host: hostname to connect to
- - port: port to connect to (default the standard NNTP port)
- - user: username to authenticate with
- - password: password to use with username
+ - host: hostname to connect to
+ - port: port to connect to (default the standard NNTP port)
+ - user: username to authenticate with
+ - password: password to use with username
- readermode: if true, send 'mode reader' command after
connecting.
- - usenetrc: allow loading username and password from ~/.netrc file
- if not specified explicitly
+ - usenetrc: allow loading username and password from ~/.netrc file
+ if not specified explicitly
- timeout: timeout (in seconds) used for socket connections
readermode is sometimes necessary if you are connecting to an
@@ -330,24 +330,24 @@ class NNTP:
readermode.
"""
self.host = host
- self.port = port
- self.sock = self._create_socket(timeout)
- self.file = None
- try:
- self.file = self.sock.makefile("rwb")
- self._base_init(readermode)
- if user or usenetrc:
- self.login(user, password, usenetrc)
- except:
- if self.file:
- self.file.close()
- self.sock.close()
- raise
-
- def _base_init(self, readermode):
- """Partial initialization for the NNTP protocol.
- This instance method is extracted for supporting the test code.
- """
+ self.port = port
+ self.sock = self._create_socket(timeout)
+ self.file = None
+ try:
+ self.file = self.sock.makefile("rwb")
+ self._base_init(readermode)
+ if user or usenetrc:
+ self.login(user, password, usenetrc)
+ except:
+ if self.file:
+ self.file.close()
+ self.sock.close()
+ raise
+
+ def _base_init(self, readermode):
+ """Partial initialization for the NNTP protocol.
+ This instance method is extracted for supporting the test code.
+ """
self.debugging = 0
self.welcome = self._getresp()
@@ -392,12 +392,12 @@ class NNTP:
if is_connected():
self._close()
- def _create_socket(self, timeout):
- if timeout is not None and not timeout:
- raise ValueError('Non-blocking socket (timeout=0) is not supported')
- sys.audit("nntplib.connect", self, self.host, self.port)
- return socket.create_connection((self.host, self.port), timeout)
-
+ def _create_socket(self, timeout):
+ if timeout is not None and not timeout:
+ raise ValueError('Non-blocking socket (timeout=0) is not supported')
+ sys.audit("nntplib.connect", self, self.host, self.port)
+ return socket.create_connection((self.host, self.port), timeout)
+
def getwelcome(self):
"""Get the welcome message from the server
(this is read and squirreled away by __init__()).
@@ -441,7 +441,7 @@ class NNTP:
def _putline(self, line):
"""Internal: send one line to the server, appending CRLF.
The `line` must be a bytes-like object."""
- sys.audit("nntplib.putline", self, line)
+ sys.audit("nntplib.putline", self, line)
line = line + _CRLF
if self.debugging > 1: print('*put*', repr(line))
self.file.write(line)
@@ -916,12 +916,12 @@ class NNTP:
return self._post('IHAVE {0}'.format(message_id), data)
def _close(self):
- try:
- if self.file:
- self.file.close()
- del self.file
- finally:
- self.sock.close()
+ try:
+ if self.file:
+ self.file.close()
+ del self.file
+ finally:
+ self.sock.close()
def quit(self):
"""Process a QUIT command and close the socket. Returns:
@@ -1012,7 +1012,7 @@ class NNTP:
if _have_ssl:
- class NNTP_SSL(NNTP):
+ class NNTP_SSL(NNTP):
def __init__(self, host, port=NNTP_SSL_PORT,
user=None, password=None, ssl_context=None,
@@ -1021,19 +1021,19 @@ if _have_ssl:
"""This works identically to NNTP.__init__, except for the change
in default port and the `ssl_context` argument for SSL connections.
"""
- self.ssl_context = ssl_context
- super().__init__(host, port, user, password, readermode,
- usenetrc, timeout)
-
- def _create_socket(self, timeout):
- sock = super()._create_socket(timeout)
+ self.ssl_context = ssl_context
+ super().__init__(host, port, user, password, readermode,
+ usenetrc, timeout)
+
+ def _create_socket(self, timeout):
+ sock = super()._create_socket(timeout)
try:
- sock = _encrypt_on(sock, self.ssl_context, self.host)
+ sock = _encrypt_on(sock, self.ssl_context, self.host)
except:
- sock.close()
+ sock.close()
raise
- else:
- return sock
+ else:
+ return sock
__all__.append("NNTP_SSL")
@@ -1046,7 +1046,7 @@ if __name__ == '__main__':
nntplib built-in demo - display the latest articles in a newsgroup""")
parser.add_argument('-g', '--group', default='gmane.comp.python.general',
help='group to fetch messages from (default: %(default)s)')
- parser.add_argument('-s', '--server', default='news.gmane.io',
+ parser.add_argument('-s', '--server', default='news.gmane.io',
help='NNTP server hostname (default: %(default)s)')
parser.add_argument('-p', '--port', default=-1, type=int,
help='NNTP port number (default: %s / %s)' % (NNTP_PORT, NNTP_SSL_PORT))
diff --git a/contrib/tools/python3/src/Lib/ntpath.py b/contrib/tools/python3/src/Lib/ntpath.py
index 6f771773a7..2a249fd756 100644
--- a/contrib/tools/python3/src/Lib/ntpath.py
+++ b/contrib/tools/python3/src/Lib/ntpath.py
@@ -46,10 +46,10 @@ def normcase(s):
Makes all characters lowercase and all slashes into backslashes."""
s = os.fspath(s)
- if isinstance(s, bytes):
- return s.replace(b'/', b'\\').lower()
- else:
- return s.replace('/', '\\').lower()
+ if isinstance(s, bytes):
+ return s.replace(b'/', b'\\').lower()
+ else:
+ return s.replace('/', '\\').lower()
# Return whether a path is absolute.
@@ -61,14 +61,14 @@ def normcase(s):
def isabs(s):
"""Test whether a path is absolute"""
s = os.fspath(s)
- # Paths beginning with \\?\ are always absolute, but do not
- # necessarily contain a drive.
- if isinstance(s, bytes):
- if s.replace(b'/', b'\\').startswith(b'\\\\?\\'):
- return True
- else:
- if s.replace('/', '\\').startswith('\\\\?\\'):
- return True
+ # Paths beginning with \\?\ are always absolute, but do not
+ # necessarily contain a drive.
+ if isinstance(s, bytes):
+ if s.replace(b'/', b'\\').startswith(b'\\\\?\\'):
+ return True
+ else:
+ if s.replace('/', '\\').startswith('\\\\?\\'):
+ return True
s = splitdrive(s)[1]
return len(s) > 0 and s[0] in _get_bothseps(s)
@@ -231,7 +231,7 @@ def islink(path):
"""
try:
st = os.lstat(path)
- except (OSError, ValueError, AttributeError):
+ except (OSError, ValueError, AttributeError):
return False
return stat.S_ISLNK(st.st_mode)
@@ -241,7 +241,7 @@ def lexists(path):
"""Test whether a path exists. Returns True for broken symbolic links"""
try:
st = os.lstat(path)
- except (OSError, ValueError):
+ except (OSError, ValueError):
return False
return True
@@ -301,7 +301,7 @@ def expanduser(path):
while i < n and path[i] not in _get_bothseps(path):
i += 1
- if 'USERPROFILE' in os.environ:
+ if 'USERPROFILE' in os.environ:
userhome = os.environ['USERPROFILE']
elif not 'HOMEPATH' in os.environ:
return path
@@ -466,8 +466,8 @@ def normpath(path):
# in the case of paths with these prefixes:
# \\.\ -> device names
# \\?\ -> literal paths
- # do not do any normalization, but return the path
- # unchanged apart from the call to os.fspath()
+ # do not do any normalization, but return the path
+ # unchanged apart from the call to os.fspath()
return path
path = path.replace(altsep, sep)
prefix, path = splitdrive(path)
@@ -528,149 +528,149 @@ else: # use native Windows method on Windows
except (OSError, ValueError):
return _abspath_fallback(path)
-try:
- from nt import _getfinalpathname, readlink as _nt_readlink
-except ImportError:
- # realpath is a no-op on systems without _getfinalpathname support.
- realpath = abspath
-else:
- def _readlink_deep(path):
- # These error codes indicate that we should stop reading links and
- # return the path we currently have.
- # 1: ERROR_INVALID_FUNCTION
- # 2: ERROR_FILE_NOT_FOUND
- # 3: ERROR_DIRECTORY_NOT_FOUND
- # 5: ERROR_ACCESS_DENIED
- # 21: ERROR_NOT_READY (implies drive with no media)
- # 32: ERROR_SHARING_VIOLATION (probably an NTFS paging file)
- # 50: ERROR_NOT_SUPPORTED (implies no support for reparse points)
- # 67: ERROR_BAD_NET_NAME (implies remote server unavailable)
- # 87: ERROR_INVALID_PARAMETER
- # 4390: ERROR_NOT_A_REPARSE_POINT
- # 4392: ERROR_INVALID_REPARSE_DATA
- # 4393: ERROR_REPARSE_TAG_INVALID
- allowed_winerror = 1, 2, 3, 5, 21, 32, 50, 67, 87, 4390, 4392, 4393
-
- seen = set()
- while normcase(path) not in seen:
- seen.add(normcase(path))
- try:
- old_path = path
- path = _nt_readlink(path)
- # Links may be relative, so resolve them against their
- # own location
- if not isabs(path):
- # If it's something other than a symlink, we don't know
- # what it's actually going to be resolved against, so
- # just return the old path.
- if not islink(old_path):
- path = old_path
- break
- path = normpath(join(dirname(old_path), path))
- except OSError as ex:
- if ex.winerror in allowed_winerror:
- break
- raise
- except ValueError:
- # Stop on reparse points that are not symlinks
- break
- return path
-
- def _getfinalpathname_nonstrict(path):
- # These error codes indicate that we should stop resolving the path
- # and return the value we currently have.
- # 1: ERROR_INVALID_FUNCTION
- # 2: ERROR_FILE_NOT_FOUND
- # 3: ERROR_DIRECTORY_NOT_FOUND
- # 5: ERROR_ACCESS_DENIED
- # 21: ERROR_NOT_READY (implies drive with no media)
- # 32: ERROR_SHARING_VIOLATION (probably an NTFS paging file)
- # 50: ERROR_NOT_SUPPORTED
- # 67: ERROR_BAD_NET_NAME (implies remote server unavailable)
- # 87: ERROR_INVALID_PARAMETER
- # 123: ERROR_INVALID_NAME
- # 1920: ERROR_CANT_ACCESS_FILE
- # 1921: ERROR_CANT_RESOLVE_FILENAME (implies unfollowable symlink)
- allowed_winerror = 1, 2, 3, 5, 21, 32, 50, 67, 87, 123, 1920, 1921
-
- # Non-strict algorithm is to find as much of the target directory
- # as we can and join the rest.
- tail = ''
- while path:
- try:
- path = _getfinalpathname(path)
- return join(path, tail) if tail else path
- except OSError as ex:
- if ex.winerror not in allowed_winerror:
- raise
- try:
- # The OS could not resolve this path fully, so we attempt
- # to follow the link ourselves. If we succeed, join the tail
- # and return.
- new_path = _readlink_deep(path)
- if new_path != path:
- return join(new_path, tail) if tail else new_path
- except OSError:
- # If we fail to readlink(), let's keep traversing
- pass
- path, name = split(path)
- # TODO (bpo-38186): Request the real file name from the directory
- # entry using FindFirstFileW. For now, we will return the path
- # as best we have it
- if path and not name:
- return path + tail
- tail = join(name, tail) if tail else name
- return tail
-
- def realpath(path):
- path = normpath(path)
- if isinstance(path, bytes):
- prefix = b'\\\\?\\'
- unc_prefix = b'\\\\?\\UNC\\'
- new_unc_prefix = b'\\\\'
- cwd = os.getcwdb()
- # bpo-38081: Special case for realpath(b'nul')
- if normcase(path) == normcase(os.fsencode(devnull)):
- return b'\\\\.\\NUL'
- else:
- prefix = '\\\\?\\'
- unc_prefix = '\\\\?\\UNC\\'
- new_unc_prefix = '\\\\'
- cwd = os.getcwd()
- # bpo-38081: Special case for realpath('nul')
- if normcase(path) == normcase(devnull):
- return '\\\\.\\NUL'
- had_prefix = path.startswith(prefix)
- if not had_prefix and not isabs(path):
- path = join(cwd, path)
- try:
- path = _getfinalpathname(path)
- initial_winerror = 0
- except OSError as ex:
- initial_winerror = ex.winerror
- path = _getfinalpathname_nonstrict(path)
- # The path returned by _getfinalpathname will always start with \\?\ -
- # strip off that prefix unless it was already provided on the original
- # path.
- if not had_prefix and path.startswith(prefix):
- # For UNC paths, the prefix will actually be \\?\UNC\
- # Handle that case as well.
- if path.startswith(unc_prefix):
- spath = new_unc_prefix + path[len(unc_prefix):]
- else:
- spath = path[len(prefix):]
- # Ensure that the non-prefixed path resolves to the same path
- try:
- if _getfinalpathname(spath) == path:
- path = spath
- except OSError as ex:
- # If the path does not exist and originally did not exist, then
- # strip the prefix anyway.
- if ex.winerror == initial_winerror:
- path = spath
- return path
-
-
+try:
+ from nt import _getfinalpathname, readlink as _nt_readlink
+except ImportError:
+ # realpath is a no-op on systems without _getfinalpathname support.
+ realpath = abspath
+else:
+ def _readlink_deep(path):
+ # These error codes indicate that we should stop reading links and
+ # return the path we currently have.
+ # 1: ERROR_INVALID_FUNCTION
+ # 2: ERROR_FILE_NOT_FOUND
+ # 3: ERROR_DIRECTORY_NOT_FOUND
+ # 5: ERROR_ACCESS_DENIED
+ # 21: ERROR_NOT_READY (implies drive with no media)
+ # 32: ERROR_SHARING_VIOLATION (probably an NTFS paging file)
+ # 50: ERROR_NOT_SUPPORTED (implies no support for reparse points)
+ # 67: ERROR_BAD_NET_NAME (implies remote server unavailable)
+ # 87: ERROR_INVALID_PARAMETER
+ # 4390: ERROR_NOT_A_REPARSE_POINT
+ # 4392: ERROR_INVALID_REPARSE_DATA
+ # 4393: ERROR_REPARSE_TAG_INVALID
+ allowed_winerror = 1, 2, 3, 5, 21, 32, 50, 67, 87, 4390, 4392, 4393
+
+ seen = set()
+ while normcase(path) not in seen:
+ seen.add(normcase(path))
+ try:
+ old_path = path
+ path = _nt_readlink(path)
+ # Links may be relative, so resolve them against their
+ # own location
+ if not isabs(path):
+ # If it's something other than a symlink, we don't know
+ # what it's actually going to be resolved against, so
+ # just return the old path.
+ if not islink(old_path):
+ path = old_path
+ break
+ path = normpath(join(dirname(old_path), path))
+ except OSError as ex:
+ if ex.winerror in allowed_winerror:
+ break
+ raise
+ except ValueError:
+ # Stop on reparse points that are not symlinks
+ break
+ return path
+
+ def _getfinalpathname_nonstrict(path):
+ # These error codes indicate that we should stop resolving the path
+ # and return the value we currently have.
+ # 1: ERROR_INVALID_FUNCTION
+ # 2: ERROR_FILE_NOT_FOUND
+ # 3: ERROR_DIRECTORY_NOT_FOUND
+ # 5: ERROR_ACCESS_DENIED
+ # 21: ERROR_NOT_READY (implies drive with no media)
+ # 32: ERROR_SHARING_VIOLATION (probably an NTFS paging file)
+ # 50: ERROR_NOT_SUPPORTED
+ # 67: ERROR_BAD_NET_NAME (implies remote server unavailable)
+ # 87: ERROR_INVALID_PARAMETER
+ # 123: ERROR_INVALID_NAME
+ # 1920: ERROR_CANT_ACCESS_FILE
+ # 1921: ERROR_CANT_RESOLVE_FILENAME (implies unfollowable symlink)
+ allowed_winerror = 1, 2, 3, 5, 21, 32, 50, 67, 87, 123, 1920, 1921
+
+ # Non-strict algorithm is to find as much of the target directory
+ # as we can and join the rest.
+ tail = ''
+ while path:
+ try:
+ path = _getfinalpathname(path)
+ return join(path, tail) if tail else path
+ except OSError as ex:
+ if ex.winerror not in allowed_winerror:
+ raise
+ try:
+ # The OS could not resolve this path fully, so we attempt
+ # to follow the link ourselves. If we succeed, join the tail
+ # and return.
+ new_path = _readlink_deep(path)
+ if new_path != path:
+ return join(new_path, tail) if tail else new_path
+ except OSError:
+ # If we fail to readlink(), let's keep traversing
+ pass
+ path, name = split(path)
+ # TODO (bpo-38186): Request the real file name from the directory
+ # entry using FindFirstFileW. For now, we will return the path
+ # as best we have it
+ if path and not name:
+ return path + tail
+ tail = join(name, tail) if tail else name
+ return tail
+
+ def realpath(path):
+ path = normpath(path)
+ if isinstance(path, bytes):
+ prefix = b'\\\\?\\'
+ unc_prefix = b'\\\\?\\UNC\\'
+ new_unc_prefix = b'\\\\'
+ cwd = os.getcwdb()
+ # bpo-38081: Special case for realpath(b'nul')
+ if normcase(path) == normcase(os.fsencode(devnull)):
+ return b'\\\\.\\NUL'
+ else:
+ prefix = '\\\\?\\'
+ unc_prefix = '\\\\?\\UNC\\'
+ new_unc_prefix = '\\\\'
+ cwd = os.getcwd()
+ # bpo-38081: Special case for realpath('nul')
+ if normcase(path) == normcase(devnull):
+ return '\\\\.\\NUL'
+ had_prefix = path.startswith(prefix)
+ if not had_prefix and not isabs(path):
+ path = join(cwd, path)
+ try:
+ path = _getfinalpathname(path)
+ initial_winerror = 0
+ except OSError as ex:
+ initial_winerror = ex.winerror
+ path = _getfinalpathname_nonstrict(path)
+ # The path returned by _getfinalpathname will always start with \\?\ -
+ # strip off that prefix unless it was already provided on the original
+ # path.
+ if not had_prefix and path.startswith(prefix):
+ # For UNC paths, the prefix will actually be \\?\UNC\
+ # Handle that case as well.
+ if path.startswith(unc_prefix):
+ spath = new_unc_prefix + path[len(unc_prefix):]
+ else:
+ spath = path[len(prefix):]
+ # Ensure that the non-prefixed path resolves to the same path
+ try:
+ if _getfinalpathname(spath) == path:
+ path = spath
+ except OSError as ex:
+ # If the path does not exist and originally did not exist, then
+ # strip the prefix anyway.
+ if ex.winerror == initial_winerror:
+ path = spath
+ return path
+
+
# Win9x family and earlier have no Unicode filename support.
supports_unicode_filenames = (hasattr(sys, "getwindowsversion") and
sys.getwindowsversion()[3] >= 2)
diff --git a/contrib/tools/python3/src/Lib/nturl2path.py b/contrib/tools/python3/src/Lib/nturl2path.py
index 61852aff58..90c95a26ff 100644
--- a/contrib/tools/python3/src/Lib/nturl2path.py
+++ b/contrib/tools/python3/src/Lib/nturl2path.py
@@ -50,14 +50,14 @@ def pathname2url(p):
# becomes
# ///C:/foo/bar/spam.foo
import urllib.parse
- # First, clean up some special forms. We are going to sacrifice
- # the additional information anyway
- if p[:4] == '\\\\?\\':
- p = p[4:]
- if p[:4].upper() == 'UNC\\':
- p = '\\' + p[4:]
- elif p[1:2] != ':':
- raise OSError('Bad path: ' + p)
+ # First, clean up some special forms. We are going to sacrifice
+ # the additional information anyway
+ if p[:4] == '\\\\?\\':
+ p = p[4:]
+ if p[:4].upper() == 'UNC\\':
+ p = '\\' + p[4:]
+ elif p[1:2] != ':':
+ raise OSError('Bad path: ' + p)
if not ':' in p:
# No drive specifier, just convert slashes and quote the name
if p[:2] == '\\\\':
@@ -67,7 +67,7 @@ def pathname2url(p):
p = '\\\\' + p
components = p.split('\\')
return urllib.parse.quote('/'.join(components))
- comp = p.split(':', maxsplit=2)
+ comp = p.split(':', maxsplit=2)
if len(comp) != 2 or len(comp[0]) > 1:
error = 'Bad path: ' + p
raise OSError(error)
diff --git a/contrib/tools/python3/src/Lib/numbers.py b/contrib/tools/python3/src/Lib/numbers.py
index 5b98e64208..73db7de86c 100644
--- a/contrib/tools/python3/src/Lib/numbers.py
+++ b/contrib/tools/python3/src/Lib/numbers.py
@@ -33,7 +33,7 @@ class Complex(Number):
"""Complex defines the operations that work on the builtin complex type.
In short, those are: a conversion to complex, .real, .imag, +, -,
- *, /, **, abs(), .conjugate, ==, and !=.
+ *, /, **, abs(), .conjugate, ==, and !=.
If it is given heterogeneous arguments, and doesn't have special
knowledge about them, it should fall back to the builtin complex
@@ -292,12 +292,12 @@ class Rational(Real):
class Integral(Rational):
- """Integral adds methods that work on integral numbers.
-
- In short, these are conversion to int, pow with modulus, and the
- bit-string operations.
- """
+ """Integral adds methods that work on integral numbers.
+ In short, these are conversion to int, pow with modulus, and the
+ bit-string operations.
+ """
+
__slots__ = ()
@abstractmethod
diff --git a/contrib/tools/python3/src/Lib/opcode.py b/contrib/tools/python3/src/Lib/opcode.py
index ac1aa535f6..18d59ecada 100644
--- a/contrib/tools/python3/src/Lib/opcode.py
+++ b/contrib/tools/python3/src/Lib/opcode.py
@@ -21,7 +21,7 @@ try:
except ImportError:
pass
-cmp_op = ('<', '<=', '==', '!=', '>', '>=')
+cmp_op = ('<', '<=', '==', '!=', '>', '>=')
hasconst = []
hasname = []
@@ -59,7 +59,7 @@ def_op('ROT_TWO', 2)
def_op('ROT_THREE', 3)
def_op('DUP_TOP', 4)
def_op('DUP_TOP_TWO', 5)
-def_op('ROT_FOUR', 6)
+def_op('ROT_FOUR', 6)
def_op('NOP', 9)
def_op('UNARY_POSITIVE', 10)
@@ -83,13 +83,13 @@ def_op('BINARY_TRUE_DIVIDE', 27)
def_op('INPLACE_FLOOR_DIVIDE', 28)
def_op('INPLACE_TRUE_DIVIDE', 29)
-def_op('RERAISE', 48)
-def_op('WITH_EXCEPT_START', 49)
+def_op('RERAISE', 48)
+def_op('WITH_EXCEPT_START', 49)
def_op('GET_AITER', 50)
def_op('GET_ANEXT', 51)
def_op('BEFORE_ASYNC_WITH', 52)
-
-def_op('END_ASYNC_FOR', 54)
+
+def_op('END_ASYNC_FOR', 54)
def_op('INPLACE_ADD', 55)
def_op('INPLACE_SUBTRACT', 56)
def_op('INPLACE_MULTIPLY', 57)
@@ -110,20 +110,20 @@ def_op('PRINT_EXPR', 70)
def_op('LOAD_BUILD_CLASS', 71)
def_op('YIELD_FROM', 72)
def_op('GET_AWAITABLE', 73)
-def_op('LOAD_ASSERTION_ERROR', 74)
+def_op('LOAD_ASSERTION_ERROR', 74)
def_op('INPLACE_LSHIFT', 75)
def_op('INPLACE_RSHIFT', 76)
def_op('INPLACE_AND', 77)
def_op('INPLACE_XOR', 78)
def_op('INPLACE_OR', 79)
-
-def_op('LIST_TO_TUPLE', 82)
+
+def_op('LIST_TO_TUPLE', 82)
def_op('RETURN_VALUE', 83)
def_op('IMPORT_STAR', 84)
def_op('SETUP_ANNOTATIONS', 85)
def_op('YIELD_VALUE', 86)
def_op('POP_BLOCK', 87)
-
+
def_op('POP_EXCEPT', 89)
HAVE_ARGUMENT = 90 # Opcodes from here have an argument:
@@ -159,11 +159,11 @@ jabs_op('POP_JUMP_IF_TRUE', 115) # ""
name_op('LOAD_GLOBAL', 116) # Index in name list
-def_op('IS_OP', 117)
-def_op('CONTAINS_OP', 118)
-
-jabs_op('JUMP_IF_NOT_EXC_MATCH', 121)
-jrel_op('SETUP_FINALLY', 122) # Distance to target address
+def_op('IS_OP', 117)
+def_op('CONTAINS_OP', 118)
+
+jabs_op('JUMP_IF_NOT_EXC_MATCH', 121)
+jrel_op('SETUP_FINALLY', 122) # Distance to target address
def_op('LOAD_FAST', 124) # Local variable number
haslocal.append(124)
@@ -209,9 +209,9 @@ def_op('BUILD_STRING', 157)
name_op('LOAD_METHOD', 160)
def_op('CALL_METHOD', 161)
-def_op('LIST_EXTEND', 162)
-def_op('SET_UPDATE', 163)
-def_op('DICT_MERGE', 164)
-def_op('DICT_UPDATE', 165)
-
+def_op('LIST_EXTEND', 162)
+def_op('SET_UPDATE', 163)
+def_op('DICT_MERGE', 164)
+def_op('DICT_UPDATE', 165)
+
del def_op, name_op, jrel_op, jabs_op
diff --git a/contrib/tools/python3/src/Lib/operator.py b/contrib/tools/python3/src/Lib/operator.py
index 241fdbb679..9cc5b89958 100644
--- a/contrib/tools/python3/src/Lib/operator.py
+++ b/contrib/tools/python3/src/Lib/operator.py
@@ -155,10 +155,10 @@ def contains(a, b):
return b in a
def countOf(a, b):
- "Return the number of items in a which are, or which equal, b."
+ "Return the number of items in a which are, or which equal, b."
count = 0
for i in a:
- if i is b or i == b:
+ if i is b or i == b:
count += 1
return count
@@ -173,7 +173,7 @@ def getitem(a, b):
def indexOf(a, b):
"Return the first index of b in a."
for i, j in enumerate(a):
- if j is b or j == b:
+ if j is b or j == b:
return i
else:
raise ValueError('sequence.index(x): x not in sequence')
@@ -302,11 +302,11 @@ class methodcaller:
"""
__slots__ = ('_name', '_args', '_kwargs')
- def __init__(self, name, /, *args, **kwargs):
- self._name = name
+ def __init__(self, name, /, *args, **kwargs):
+ self._name = name
if not isinstance(self._name, str):
raise TypeError('method name must be a string')
- self._args = args
+ self._args = args
self._kwargs = kwargs
def __call__(self, obj):
diff --git a/contrib/tools/python3/src/Lib/optparse.py b/contrib/tools/python3/src/Lib/optparse.py
index 1c450c6fcb..0fb36323a7 100644
--- a/contrib/tools/python3/src/Lib/optparse.py
+++ b/contrib/tools/python3/src/Lib/optparse.py
@@ -929,7 +929,7 @@ class OptionContainer:
self.set_description(description)
def _create_option_mappings(self):
- # For use by OptionParser constructor -- create the main
+ # For use by OptionParser constructor -- create the main
# option mappings used by this OptionParser and all
# OptionGroups that it owns.
self._short_opt = {} # single letter -> Option instance
diff --git a/contrib/tools/python3/src/Lib/os.py b/contrib/tools/python3/src/Lib/os.py
index b794159f86..28b1c4130e 100644
--- a/contrib/tools/python3/src/Lib/os.py
+++ b/contrib/tools/python3/src/Lib/os.py
@@ -26,10 +26,10 @@ import abc
import sys
import stat as st
-from _collections_abc import _check_methods
-
-GenericAlias = type(list[int])
-
+from _collections_abc import _check_methods
+
+GenericAlias = type(list[int])
+
_names = sys.builtin_module_names
# Note: more names are added to __all__ later.
@@ -304,11 +304,11 @@ def walk(top, topdown=True, onerror=None, followlinks=False):
(e.g., via del or slice assignment), and walk will only recurse into the
subdirectories whose names remain in dirnames; this can be used to prune the
search, or to impose a specific order of visiting. Modifying dirnames when
- topdown is false has no effect on the behavior of os.walk(), since the
- directories in dirnames have already been generated by the time dirnames
- itself is generated. No matter the value of topdown, the list of
- subdirectories is retrieved before the tuples for the directory and its
- subdirectories are generated.
+ topdown is false has no effect on the behavior of os.walk(), since the
+ directories in dirnames have already been generated by the time dirnames
+ itself is generated. No matter the value of topdown, the list of
+ subdirectories is retrieved before the tuples for the directory and its
+ subdirectories are generated.
By default errors from the os.scandir() call are ignored. If
optional arg 'onerror' is specified, it should be a function; it
@@ -332,16 +332,16 @@ def walk(top, topdown=True, onerror=None, followlinks=False):
from os.path import join, getsize
for root, dirs, files in os.walk('python/Lib/email'):
print(root, "consumes", end="")
- print(sum(getsize(join(root, name)) for name in files), end="")
+ print(sum(getsize(join(root, name)) for name in files), end="")
print("bytes in", len(files), "non-directory files")
if 'CVS' in dirs:
dirs.remove('CVS') # don't visit CVS directories
"""
- sys.audit("os.walk", top, topdown, onerror, followlinks)
- return _walk(fspath(top), topdown, onerror, followlinks)
-
-def _walk(top, topdown, onerror, followlinks):
+ sys.audit("os.walk", top, topdown, onerror, followlinks)
+ return _walk(fspath(top), topdown, onerror, followlinks)
+
+def _walk(top, topdown, onerror, followlinks):
dirs = []
nondirs = []
walk_dirs = []
@@ -415,11 +415,11 @@ def _walk(top, topdown, onerror, followlinks):
# the caller can replace the directory entry during the "yield"
# above.
if followlinks or not islink(new_path):
- yield from _walk(new_path, topdown, onerror, followlinks)
+ yield from _walk(new_path, topdown, onerror, followlinks)
else:
# Recurse into sub-directories
for new_path in walk_dirs:
- yield from _walk(new_path, topdown, onerror, followlinks)
+ yield from _walk(new_path, topdown, onerror, followlinks)
# Yield after recursion if going bottom up
yield top, dirs, nondirs
@@ -454,13 +454,13 @@ if {open, stat} <= supports_dir_fd and {scandir, stat} <= supports_fd:
import os
for root, dirs, files, rootfd in os.fwalk('python/Lib/email'):
print(root, "consumes", end="")
- print(sum(os.stat(name, dir_fd=rootfd).st_size for name in files),
+ print(sum(os.stat(name, dir_fd=rootfd).st_size for name in files),
end="")
print("bytes in", len(files), "non-directory files")
if 'CVS' in dirs:
dirs.remove('CVS') # don't visit CVS directories
"""
- sys.audit("os.fwalk", top, topdown, onerror, follow_symlinks, dir_fd)
+ sys.audit("os.fwalk", top, topdown, onerror, follow_symlinks, dir_fd)
if not isinstance(top, int) or not hasattr(top, '__index__'):
top = fspath(top)
# Note: To guard against symlink races, we use the standard
@@ -577,7 +577,7 @@ def execvpe(file, args, env):
"""execvpe(file, args, env)
Execute the executable file (which is searched for along $PATH)
- with argument list args and environment env, replacing the
+ with argument list args and environment env, replacing the
current process.
args may be a list or tuple of strings. """
_execvpe(file, args, env)
@@ -660,11 +660,11 @@ def get_exec_path(env=None):
return path_list.split(pathsep)
-# Change environ to automatically call putenv() and unsetenv()
-from _collections_abc import MutableMapping, Mapping
+# Change environ to automatically call putenv() and unsetenv()
+from _collections_abc import MutableMapping, Mapping
class _Environ(MutableMapping):
- def __init__(self, data, encodekey, decodekey, encodevalue, decodevalue):
+ def __init__(self, data, encodekey, decodekey, encodevalue, decodevalue):
self.encodekey = encodekey
self.decodekey = decodekey
self.encodevalue = encodevalue
@@ -682,12 +682,12 @@ class _Environ(MutableMapping):
def __setitem__(self, key, value):
key = self.encodekey(key)
value = self.encodevalue(value)
- putenv(key, value)
+ putenv(key, value)
self._data[key] = value
def __delitem__(self, key):
encodedkey = self.encodekey(key)
- unsetenv(encodedkey)
+ unsetenv(encodedkey)
try:
del self._data[encodedkey]
except KeyError:
@@ -716,24 +716,24 @@ class _Environ(MutableMapping):
self[key] = value
return self[key]
- def __ior__(self, other):
- self.update(other)
- return self
-
- def __or__(self, other):
- if not isinstance(other, Mapping):
- return NotImplemented
- new = dict(self)
- new.update(other)
- return new
-
- def __ror__(self, other):
- if not isinstance(other, Mapping):
- return NotImplemented
- new = dict(other)
- new.update(self)
- return new
-
+ def __ior__(self, other):
+ self.update(other)
+ return self
+
+ def __or__(self, other):
+ if not isinstance(other, Mapping):
+ return NotImplemented
+ new = dict(self)
+ new.update(other)
+ return new
+
+ def __ror__(self, other):
+ if not isinstance(other, Mapping):
+ return NotImplemented
+ new = dict(other)
+ new.update(self)
+ return new
+
def _createenviron():
if name == 'nt':
# Where Env Var Names Must Be UPPERCASE
@@ -761,7 +761,7 @@ def _createenviron():
data = environ
return _Environ(data,
encodekey, decode,
- encode, decode)
+ encode, decode)
# unicode environ
environ = _createenviron()
@@ -786,7 +786,7 @@ if supports_bytes_environ:
# bytes environ
environb = _Environ(environ._data,
_check_bytes, bytes,
- _check_bytes, bytes)
+ _check_bytes, bytes)
del _check_bytes
def getenvb(key, default=None):
@@ -867,8 +867,8 @@ if _exists("fork") and not _exists("spawnv") and _exists("execv"):
if WIFSTOPPED(sts):
continue
- return waitstatus_to_exitcode(sts)
-
+ return waitstatus_to_exitcode(sts)
+
def spawnv(mode, file, args):
"""spawnv(mode, file, args) -> integer
@@ -1072,45 +1072,45 @@ class PathLike(abc.ABC):
@classmethod
def __subclasshook__(cls, subclass):
- if cls is PathLike:
- return _check_methods(subclass, '__fspath__')
- return NotImplemented
-
- __class_getitem__ = classmethod(GenericAlias)
-
-
-if name == 'nt':
- class _AddedDllDirectory:
- def __init__(self, path, cookie, remove_dll_directory):
- self.path = path
- self._cookie = cookie
- self._remove_dll_directory = remove_dll_directory
- def close(self):
- self._remove_dll_directory(self._cookie)
- self.path = None
- def __enter__(self):
- return self
- def __exit__(self, *args):
- self.close()
- def __repr__(self):
- if self.path:
- return "<AddedDllDirectory({!r})>".format(self.path)
- return "<AddedDllDirectory()>"
-
- def add_dll_directory(path):
- """Add a path to the DLL search path.
-
- This search path is used when resolving dependencies for imported
- extension modules (the module itself is resolved through sys.path),
- and also by ctypes.
-
- Remove the directory by calling close() on the returned object or
- using it in a with statement.
- """
- import nt
- cookie = nt._add_dll_directory(path)
- return _AddedDllDirectory(
- path,
- cookie,
- nt._remove_dll_directory
- )
+ if cls is PathLike:
+ return _check_methods(subclass, '__fspath__')
+ return NotImplemented
+
+ __class_getitem__ = classmethod(GenericAlias)
+
+
+if name == 'nt':
+ class _AddedDllDirectory:
+ def __init__(self, path, cookie, remove_dll_directory):
+ self.path = path
+ self._cookie = cookie
+ self._remove_dll_directory = remove_dll_directory
+ def close(self):
+ self._remove_dll_directory(self._cookie)
+ self.path = None
+ def __enter__(self):
+ return self
+ def __exit__(self, *args):
+ self.close()
+ def __repr__(self):
+ if self.path:
+ return "<AddedDllDirectory({!r})>".format(self.path)
+ return "<AddedDllDirectory()>"
+
+ def add_dll_directory(path):
+ """Add a path to the DLL search path.
+
+ This search path is used when resolving dependencies for imported
+ extension modules (the module itself is resolved through sys.path),
+ and also by ctypes.
+
+ Remove the directory by calling close() on the returned object or
+ using it in a with statement.
+ """
+ import nt
+ cookie = nt._add_dll_directory(path)
+ return _AddedDllDirectory(
+ path,
+ cookie,
+ nt._remove_dll_directory
+ )
diff --git a/contrib/tools/python3/src/Lib/pathlib.py b/contrib/tools/python3/src/Lib/pathlib.py
index 7aeda14a14..a15b71e548 100644
--- a/contrib/tools/python3/src/Lib/pathlib.py
+++ b/contrib/tools/python3/src/Lib/pathlib.py
@@ -7,7 +7,7 @@ import posixpath
import re
import sys
from _collections_abc import Sequence
-from errno import EINVAL, ENOENT, ENOTDIR, EBADF, ELOOP
+from errno import EINVAL, ENOENT, ENOTDIR, EBADF, ELOOP
from operator import attrgetter
from stat import S_ISDIR, S_ISLNK, S_ISREG, S_ISSOCK, S_ISBLK, S_ISCHR, S_ISFIFO
from urllib.parse import quote_from_bytes as urlquote_from_bytes
@@ -34,13 +34,13 @@ __all__ = [
# Internals
#
-# EBADF - guard against macOS `stat` throwing EBADF
-_IGNORED_ERROS = (ENOENT, ENOTDIR, EBADF, ELOOP)
+# EBADF - guard against macOS `stat` throwing EBADF
+_IGNORED_ERROS = (ENOENT, ENOTDIR, EBADF, ELOOP)
_IGNORED_WINERRORS = (
21, # ERROR_NOT_READY - drive exists but is not accessible
- 123, # ERROR_INVALID_NAME - fix for bpo-35306
- 1921, # ERROR_CANT_RESOLVE_FILENAME - fix for broken symlink pointing to itself
+ 123, # ERROR_INVALID_NAME - fix for bpo-35306
+ 1921, # ERROR_CANT_RESOLVE_FILENAME - fix for broken symlink pointing to itself
)
def _ignore_error(exception):
@@ -132,25 +132,25 @@ class _WindowsFlavour(_Flavour):
ext_namespace_prefix = '\\\\?\\'
reserved_names = (
- {'CON', 'PRN', 'AUX', 'NUL', 'CONIN$', 'CONOUT$'} |
- {'COM%s' % c for c in '123456789\xb9\xb2\xb3'} |
- {'LPT%s' % c for c in '123456789\xb9\xb2\xb3'}
+ {'CON', 'PRN', 'AUX', 'NUL', 'CONIN$', 'CONOUT$'} |
+ {'COM%s' % c for c in '123456789\xb9\xb2\xb3'} |
+ {'LPT%s' % c for c in '123456789\xb9\xb2\xb3'}
)
# Interesting findings about extended paths:
- # * '\\?\c:\a' is an extended path, which bypasses normal Windows API
- # path processing. Thus relative paths are not resolved and slash is not
- # translated to backslash. It has the native NT path limit of 32767
- # characters, but a bit less after resolving device symbolic links,
- # such as '\??\C:' => '\Device\HarddiskVolume2'.
- # * '\\?\c:/a' looks for a device named 'C:/a' because slash is a
- # regular name character in the object namespace.
- # * '\\?\c:\foo/bar' is invalid because '/' is illegal in NT filesystems.
- # The only path separator at the filesystem level is backslash.
- # * '//?/c:\a' and '//?/c:/a' are effectively equivalent to '\\.\c:\a' and
- # thus limited to MAX_PATH.
- # * Prior to Windows 8, ANSI API bytes paths are limited to MAX_PATH,
- # even with the '\\?\' prefix.
+ # * '\\?\c:\a' is an extended path, which bypasses normal Windows API
+ # path processing. Thus relative paths are not resolved and slash is not
+ # translated to backslash. It has the native NT path limit of 32767
+ # characters, but a bit less after resolving device symbolic links,
+ # such as '\??\C:' => '\Device\HarddiskVolume2'.
+ # * '\\?\c:/a' looks for a device named 'C:/a' because slash is a
+ # regular name character in the object namespace.
+ # * '\\?\c:\foo/bar' is invalid because '/' is illegal in NT filesystems.
+ # The only path separator at the filesystem level is backslash.
+ # * '//?/c:\a' and '//?/c:/a' are effectively equivalent to '\\.\c:\a' and
+ # thus limited to MAX_PATH.
+ # * Prior to Windows 8, ANSI API bytes paths are limited to MAX_PATH,
+ # even with the '\\?\' prefix.
def splitroot(self, part, sep=sep):
first = part[0:1]
@@ -197,9 +197,9 @@ class _WindowsFlavour(_Flavour):
def casefold_parts(self, parts):
return [p.lower() for p in parts]
- def compile_pattern(self, pattern):
- return re.compile(fnmatch.translate(pattern), re.IGNORECASE).fullmatch
-
+ def compile_pattern(self, pattern):
+ return re.compile(fnmatch.translate(pattern), re.IGNORECASE).fullmatch
+
def resolve(self, path, strict=False):
s = str(path)
if not s:
@@ -240,16 +240,16 @@ class _WindowsFlavour(_Flavour):
def is_reserved(self, parts):
# NOTE: the rules for reserved names seem somewhat complicated
- # (e.g. r"..\NUL" is reserved but not r"foo\NUL" if "foo" does not
- # exist). We err on the side of caution and return True for paths
- # which are not considered reserved by Windows.
+ # (e.g. r"..\NUL" is reserved but not r"foo\NUL" if "foo" does not
+ # exist). We err on the side of caution and return True for paths
+ # which are not considered reserved by Windows.
if not parts:
return False
if parts[0].startswith('\\\\'):
# UNC paths are never reserved
return False
- name = parts[-1].partition('.')[0].partition(':')[0].rstrip(' ')
- return name.upper() in self.reserved_names
+ name = parts[-1].partition('.')[0].partition(':')[0].rstrip(' ')
+ return name.upper() in self.reserved_names
def make_uri(self, path):
# Under Windows, file URIs use the UTF-8 encoding.
@@ -264,7 +264,7 @@ class _WindowsFlavour(_Flavour):
return 'file:' + urlquote_from_bytes(path.as_posix().encode('utf-8'))
def gethomedir(self, username):
- if 'USERPROFILE' in os.environ:
+ if 'USERPROFILE' in os.environ:
userhome = os.environ['USERPROFILE']
elif 'HOMEPATH' in os.environ:
try:
@@ -321,9 +321,9 @@ class _PosixFlavour(_Flavour):
def casefold_parts(self, parts):
return parts
- def compile_pattern(self, pattern):
- return re.compile(fnmatch.translate(pattern)).fullmatch
-
+ def compile_pattern(self, pattern):
+ return re.compile(fnmatch.translate(pattern)).fullmatch
+
def resolve(self, path, strict=False):
sep = self.sep
accessor = path._accessor
@@ -340,10 +340,10 @@ class _PosixFlavour(_Flavour):
# parent dir
path, _, _ = path.rpartition(sep)
continue
- if path.endswith(sep):
- newpath = path + name
- else:
- newpath = path + sep + name
+ if path.endswith(sep):
+ newpath = path + name
+ else:
+ newpath = path + sep + name
if newpath in seen:
# Already seen this path
path = seen[newpath]
@@ -430,13 +430,13 @@ class _NormalAccessor(_Accessor):
unlink = os.unlink
- if hasattr(os, "link"):
- link_to = os.link
- else:
- @staticmethod
- def link_to(self, target):
- raise NotImplementedError("os.link() not available on this system")
-
+ if hasattr(os, "link"):
+ link_to = os.link
+ else:
+ @staticmethod
+ def link_to(self, target):
+ raise NotImplementedError("os.link() not available on this system")
+
rmdir = os.rmdir
rename = os.rename
@@ -461,21 +461,21 @@ class _NormalAccessor(_Accessor):
def readlink(self, path):
return os.readlink(path)
- def owner(self, path):
- try:
- import pwd
- return pwd.getpwuid(self.stat(path).st_uid).pw_name
- except ImportError:
- raise NotImplementedError("Path.owner() is unsupported on this system")
-
- def group(self, path):
- try:
- import grp
- return grp.getgrgid(self.stat(path).st_gid).gr_name
- except ImportError:
- raise NotImplementedError("Path.group() is unsupported on this system")
-
-
+ def owner(self, path):
+ try:
+ import pwd
+ return pwd.getpwuid(self.stat(path).st_uid).pw_name
+ except ImportError:
+ raise NotImplementedError("Path.owner() is unsupported on this system")
+
+ def group(self, path):
+ try:
+ import grp
+ return grp.getgrgid(self.stat(path).st_gid).gr_name
+ except ImportError:
+ raise NotImplementedError("Path.group() is unsupported on this system")
+
+
_normal_accessor = _NormalAccessor()
@@ -483,7 +483,7 @@ _normal_accessor = _NormalAccessor()
# Globbing helpers
#
-def _make_selector(pattern_parts, flavour):
+def _make_selector(pattern_parts, flavour):
pat = pattern_parts[0]
child_parts = pattern_parts[1:]
if pat == '**':
@@ -494,7 +494,7 @@ def _make_selector(pattern_parts, flavour):
cls = _WildcardSelector
else:
cls = _PreciseSelector
- return cls(pat, child_parts, flavour)
+ return cls(pat, child_parts, flavour)
if hasattr(functools, "lru_cache"):
_make_selector = functools.lru_cache()(_make_selector)
@@ -504,10 +504,10 @@ class _Selector:
"""A selector matches a specific glob pattern part against the children
of a given path."""
- def __init__(self, child_parts, flavour):
+ def __init__(self, child_parts, flavour):
self.child_parts = child_parts
if child_parts:
- self.successor = _make_selector(child_parts, flavour)
+ self.successor = _make_selector(child_parts, flavour)
self.dironly = True
else:
self.successor = _TerminatingSelector()
@@ -533,9 +533,9 @@ class _TerminatingSelector:
class _PreciseSelector(_Selector):
- def __init__(self, name, child_parts, flavour):
+ def __init__(self, name, child_parts, flavour):
self.name = name
- _Selector.__init__(self, child_parts, flavour)
+ _Selector.__init__(self, child_parts, flavour)
def _select_from(self, parent_path, is_dir, exists, scandir):
try:
@@ -549,45 +549,45 @@ class _PreciseSelector(_Selector):
class _WildcardSelector(_Selector):
- def __init__(self, pat, child_parts, flavour):
- self.match = flavour.compile_pattern(pat)
- _Selector.__init__(self, child_parts, flavour)
+ def __init__(self, pat, child_parts, flavour):
+ self.match = flavour.compile_pattern(pat)
+ _Selector.__init__(self, child_parts, flavour)
def _select_from(self, parent_path, is_dir, exists, scandir):
try:
- with scandir(parent_path) as scandir_it:
- entries = list(scandir_it)
+ with scandir(parent_path) as scandir_it:
+ entries = list(scandir_it)
for entry in entries:
- if self.dironly:
- try:
- # "entry.is_dir()" can raise PermissionError
- # in some cases (see bpo-38894), which is not
- # among the errors ignored by _ignore_error()
- if not entry.is_dir():
- continue
- except OSError as e:
- if not _ignore_error(e):
- raise
- continue
- name = entry.name
- if self.match(name):
- path = parent_path._make_child_relpath(name)
- for p in self.successor._select_from(path, is_dir, exists, scandir):
- yield p
+ if self.dironly:
+ try:
+ # "entry.is_dir()" can raise PermissionError
+ # in some cases (see bpo-38894), which is not
+ # among the errors ignored by _ignore_error()
+ if not entry.is_dir():
+ continue
+ except OSError as e:
+ if not _ignore_error(e):
+ raise
+ continue
+ name = entry.name
+ if self.match(name):
+ path = parent_path._make_child_relpath(name)
+ for p in self.successor._select_from(path, is_dir, exists, scandir):
+ yield p
except PermissionError:
return
class _RecursiveWildcardSelector(_Selector):
- def __init__(self, pat, child_parts, flavour):
- _Selector.__init__(self, child_parts, flavour)
+ def __init__(self, pat, child_parts, flavour):
+ _Selector.__init__(self, child_parts, flavour)
def _iterate_directories(self, parent_path, is_dir, scandir):
yield parent_path
try:
- with scandir(parent_path) as scandir_it:
- entries = list(scandir_it)
+ with scandir(parent_path) as scandir_it:
+ entries = list(scandir_it)
for entry in entries:
entry_is_dir = False
try:
@@ -813,9 +813,9 @@ class PurePath(object):
return NotImplemented
return self._cparts >= other._cparts
- def __class_getitem__(cls, type):
- return cls
-
+ def __class_getitem__(cls, type):
+ return cls
+
drive = property(attrgetter('_drv'),
doc="""The drive prefix (letter or UNC path), if any.""")
@@ -838,11 +838,11 @@ class PurePath(object):
@property
def suffix(self):
- """
- The final component's last suffix, if any.
-
- This includes the leading period. For example: '.txt'
- """
+ """
+ The final component's last suffix, if any.
+
+ This includes the leading period. For example: '.txt'
+ """
name = self.name
i = name.rfind('.')
if 0 < i < len(name) - 1:
@@ -852,11 +852,11 @@ class PurePath(object):
@property
def suffixes(self):
- """
- A list of the final component's suffixes, if any.
-
- These include the leading periods. For example: ['.tar', '.gz']
- """
+ """
+ A list of the final component's suffixes, if any.
+
+ These include the leading periods. For example: ['.tar', '.gz']
+ """
name = self.name
if name.endswith('.'):
return []
@@ -884,10 +884,10 @@ class PurePath(object):
return self._from_parsed_parts(self._drv, self._root,
self._parts[:-1] + [name])
- def with_stem(self, stem):
- """Return a new path with the stem changed."""
- return self.with_name(stem + self.suffix)
-
+ def with_stem(self, stem):
+ """Return a new path with the stem changed."""
+ return self.with_name(stem + self.suffix)
+
def with_suffix(self, suffix):
"""Return a new path with the file suffix changed. If the path
has no suffix, add given suffix. If the given suffix is an empty
@@ -936,21 +936,21 @@ class PurePath(object):
cf = self._flavour.casefold_parts
if (root or drv) if n == 0 else cf(abs_parts[:n]) != cf(to_abs_parts):
formatted = self._format_parsed_parts(to_drv, to_root, to_parts)
- raise ValueError("{!r} is not in the subpath of {!r}"
- " OR one path is relative and the other is absolute."
+ raise ValueError("{!r} is not in the subpath of {!r}"
+ " OR one path is relative and the other is absolute."
.format(str(self), str(formatted)))
return self._from_parsed_parts('', root if n == 1 else '',
abs_parts[n:])
- def is_relative_to(self, *other):
- """Return True if the path is relative to another path or False.
- """
- try:
- self.relative_to(*other)
- return True
- except ValueError:
- return False
-
+ def is_relative_to(self, *other):
+ """Return True if the path is relative to another path or False.
+ """
+ try:
+ self.relative_to(*other)
+ return True
+ except ValueError:
+ return False
+
@property
def parts(self):
"""An object providing sequence-like access to the
@@ -972,16 +972,16 @@ class PurePath(object):
return self._make_child(args)
def __truediv__(self, key):
- try:
- return self._make_child((key,))
- except TypeError:
- return NotImplemented
+ try:
+ return self._make_child((key,))
+ except TypeError:
+ return NotImplemented
def __rtruediv__(self, key):
- try:
- return self._from_parts([key] + self._parts)
- except TypeError:
- return NotImplemented
+ try:
+ return self._from_parts([key] + self._parts)
+ except TypeError:
+ return NotImplemented
@property
def parent(self):
@@ -1105,15 +1105,15 @@ class Path(PurePath):
return self
def __exit__(self, t, v, tb):
- # https://bugs.python.org/issue39682
- # In previous versions of pathlib, this method marked this path as
- # closed; subsequent attempts to perform I/O would raise an IOError.
- # This functionality was never documented, and had the effect of
- # making Path objects mutable, contrary to PEP 428. In Python 3.9 the
- # _closed attribute was removed, and this method made a no-op.
- # This method and __enter__()/__exit__() should be deprecated and
- # removed in the future.
- pass
+ # https://bugs.python.org/issue39682
+ # In previous versions of pathlib, this method marked this path as
+ # closed; subsequent attempts to perform I/O would raise an IOError.
+ # This functionality was never documented, and had the effect of
+ # making Path objects mutable, contrary to PEP 428. In Python 3.9 the
+ # _closed attribute was removed, and this method made a no-op.
+ # This method and __enter__()/__exit__() should be deprecated and
+ # removed in the future.
+ pass
def _opener(self, name, flags, mode=0o666):
# A stub for the opener argument to built-in open()
@@ -1150,7 +1150,7 @@ class Path(PurePath):
try:
other_st = other_path.stat()
except AttributeError:
- other_st = self._accessor.stat(other_path)
+ other_st = self._accessor.stat(other_path)
return os.path.samestat(st, other_st)
def iterdir(self):
@@ -1167,13 +1167,13 @@ class Path(PurePath):
"""Iterate over this subtree and yield all existing files (of any
kind, including directories) matching the given relative pattern.
"""
- sys.audit("pathlib.Path.glob", self, pattern)
+ sys.audit("pathlib.Path.glob", self, pattern)
if not pattern:
raise ValueError("Unacceptable pattern: {!r}".format(pattern))
drv, root, pattern_parts = self._flavour.parse_parts((pattern,))
if drv or root:
raise NotImplementedError("Non-relative patterns are unsupported")
- selector = _make_selector(tuple(pattern_parts), self._flavour)
+ selector = _make_selector(tuple(pattern_parts), self._flavour)
for p in selector.select_from(self):
yield p
@@ -1182,11 +1182,11 @@ class Path(PurePath):
directories) matching the given relative pattern, anywhere in
this subtree.
"""
- sys.audit("pathlib.Path.rglob", self, pattern)
+ sys.audit("pathlib.Path.rglob", self, pattern)
drv, root, pattern_parts = self._flavour.parse_parts((pattern,))
if drv or root:
raise NotImplementedError("Non-relative patterns are unsupported")
- selector = _make_selector(("**",) + tuple(pattern_parts), self._flavour)
+ selector = _make_selector(("**",) + tuple(pattern_parts), self._flavour)
for p in selector.select_from(self):
yield p
@@ -1235,13 +1235,13 @@ class Path(PurePath):
"""
Return the login name of the file owner.
"""
- return self._accessor.owner(self)
+ return self._accessor.owner(self)
def group(self):
"""
Return the group name of the file gid.
"""
- return self._accessor.group(self)
+ return self._accessor.group(self)
def open(self, mode='r', buffering=-1, encoding=None,
errors=None, newline=None):
@@ -1285,15 +1285,15 @@ class Path(PurePath):
with self.open(mode='w', encoding=encoding, errors=errors) as f:
return f.write(data)
- def readlink(self):
- """
- Return the path to which the symbolic link points.
- """
- path = self._accessor.readlink(self)
- obj = self._from_parts((path,), init=False)
- obj._init(template=self)
- return obj
-
+ def readlink(self):
+ """
+ Return the path to which the symbolic link points.
+ """
+ path = self._accessor.readlink(self)
+ obj = self._from_parts((path,), init=False)
+ obj._init(template=self)
+ return obj
+
def touch(self, mode=0o666, exist_ok=True):
"""
Create this file with the given access mode, if it doesn't exist.
@@ -1345,16 +1345,16 @@ class Path(PurePath):
"""
self._accessor.lchmod(self, mode)
- def unlink(self, missing_ok=False):
+ def unlink(self, missing_ok=False):
"""
Remove this file or link.
If the path is a directory, use rmdir() instead.
"""
- try:
- self._accessor.unlink(self)
- except FileNotFoundError:
- if not missing_ok:
- raise
+ try:
+ self._accessor.unlink(self)
+ except FileNotFoundError:
+ if not missing_ok:
+ raise
def rmdir(self):
"""
@@ -1371,49 +1371,49 @@ class Path(PurePath):
def rename(self, target):
"""
- Rename this path to the target path.
-
- The target path may be absolute or relative. Relative paths are
- interpreted relative to the current working directory, *not* the
- directory of the Path object.
-
- Returns the new Path instance pointing to the target path.
+ Rename this path to the target path.
+
+ The target path may be absolute or relative. Relative paths are
+ interpreted relative to the current working directory, *not* the
+ directory of the Path object.
+
+ Returns the new Path instance pointing to the target path.
"""
self._accessor.rename(self, target)
- return self.__class__(target)
+ return self.__class__(target)
def replace(self, target):
"""
- Rename this path to the target path, overwriting if that path exists.
-
- The target path may be absolute or relative. Relative paths are
- interpreted relative to the current working directory, *not* the
- directory of the Path object.
-
- Returns the new Path instance pointing to the target path.
+ Rename this path to the target path, overwriting if that path exists.
+
+ The target path may be absolute or relative. Relative paths are
+ interpreted relative to the current working directory, *not* the
+ directory of the Path object.
+
+ Returns the new Path instance pointing to the target path.
"""
self._accessor.replace(self, target)
- return self.__class__(target)
+ return self.__class__(target)
def symlink_to(self, target, target_is_directory=False):
"""
- Make this path a symlink pointing to the target path.
- Note the order of arguments (link, target) is the reverse of os.symlink.
+ Make this path a symlink pointing to the target path.
+ Note the order of arguments (link, target) is the reverse of os.symlink.
"""
self._accessor.symlink(target, self, target_is_directory)
- def link_to(self, target):
- """
- Make the target path a hard link pointing to this path.
-
- Note this function does not make this path a hard link to *target*,
- despite the implication of the function and argument names. The order
- of arguments (target, link) is the reverse of Path.symlink_to, but
- matches that of os.link.
-
- """
- self._accessor.link_to(self, target)
-
+ def link_to(self, target):
+ """
+ Make the target path a hard link pointing to this path.
+
+ Note this function does not make this path a hard link to *target*,
+ despite the implication of the function and argument names. The order
+ of arguments (target, link) is the reverse of Path.symlink_to, but
+ matches that of os.link.
+
+ """
+ self._accessor.link_to(self, target)
+
# Convenience functions for querying the stat results
def exists(self):
@@ -1426,9 +1426,9 @@ class Path(PurePath):
if not _ignore_error(e):
raise
return False
- except ValueError:
- # Non-encodable path
- return False
+ except ValueError:
+ # Non-encodable path
+ return False
return True
def is_dir(self):
@@ -1441,11 +1441,11 @@ class Path(PurePath):
if not _ignore_error(e):
raise
# Path doesn't exist or is a broken symlink
- # (see http://web.archive.org/web/20200623061726/https://bitbucket.org/pitrou/pathlib/issues/12/ )
- return False
- except ValueError:
- # Non-encodable path
+ # (see http://web.archive.org/web/20200623061726/https://bitbucket.org/pitrou/pathlib/issues/12/ )
return False
+ except ValueError:
+ # Non-encodable path
+ return False
def is_file(self):
"""
@@ -1458,11 +1458,11 @@ class Path(PurePath):
if not _ignore_error(e):
raise
# Path doesn't exist or is a broken symlink
- # (see http://web.archive.org/web/20200623061726/https://bitbucket.org/pitrou/pathlib/issues/12/ )
- return False
- except ValueError:
- # Non-encodable path
+ # (see http://web.archive.org/web/20200623061726/https://bitbucket.org/pitrou/pathlib/issues/12/ )
return False
+ except ValueError:
+ # Non-encodable path
+ return False
def is_mount(self):
"""
@@ -1473,7 +1473,7 @@ class Path(PurePath):
return False
try:
- parent_dev = self.parent.stat().st_dev
+ parent_dev = self.parent.stat().st_dev
except OSError:
return False
@@ -1481,7 +1481,7 @@ class Path(PurePath):
if dev != parent_dev:
return True
ino = self.stat().st_ino
- parent_ino = self.parent.stat().st_ino
+ parent_ino = self.parent.stat().st_ino
return ino == parent_ino
def is_symlink(self):
@@ -1495,9 +1495,9 @@ class Path(PurePath):
raise
# Path doesn't exist
return False
- except ValueError:
- # Non-encodable path
- return False
+ except ValueError:
+ # Non-encodable path
+ return False
def is_block_device(self):
"""
@@ -1509,11 +1509,11 @@ class Path(PurePath):
if not _ignore_error(e):
raise
# Path doesn't exist or is a broken symlink
- # (see http://web.archive.org/web/20200623061726/https://bitbucket.org/pitrou/pathlib/issues/12/ )
- return False
- except ValueError:
- # Non-encodable path
+ # (see http://web.archive.org/web/20200623061726/https://bitbucket.org/pitrou/pathlib/issues/12/ )
return False
+ except ValueError:
+ # Non-encodable path
+ return False
def is_char_device(self):
"""
@@ -1525,11 +1525,11 @@ class Path(PurePath):
if not _ignore_error(e):
raise
# Path doesn't exist or is a broken symlink
- # (see http://web.archive.org/web/20200623061726/https://bitbucket.org/pitrou/pathlib/issues/12/ )
- return False
- except ValueError:
- # Non-encodable path
+ # (see http://web.archive.org/web/20200623061726/https://bitbucket.org/pitrou/pathlib/issues/12/ )
return False
+ except ValueError:
+ # Non-encodable path
+ return False
def is_fifo(self):
"""
@@ -1541,11 +1541,11 @@ class Path(PurePath):
if not _ignore_error(e):
raise
# Path doesn't exist or is a broken symlink
- # (see http://web.archive.org/web/20200623061726/https://bitbucket.org/pitrou/pathlib/issues/12/ )
- return False
- except ValueError:
- # Non-encodable path
+ # (see http://web.archive.org/web/20200623061726/https://bitbucket.org/pitrou/pathlib/issues/12/ )
return False
+ except ValueError:
+ # Non-encodable path
+ return False
def is_socket(self):
"""
@@ -1557,11 +1557,11 @@ class Path(PurePath):
if not _ignore_error(e):
raise
# Path doesn't exist or is a broken symlink
- # (see http://web.archive.org/web/20200623061726/https://bitbucket.org/pitrou/pathlib/issues/12/ )
- return False
- except ValueError:
- # Non-encodable path
+ # (see http://web.archive.org/web/20200623061726/https://bitbucket.org/pitrou/pathlib/issues/12/ )
return False
+ except ValueError:
+ # Non-encodable path
+ return False
def expanduser(self):
""" Return a new path with expanded ~ and ~user constructs
diff --git a/contrib/tools/python3/src/Lib/pdb.py b/contrib/tools/python3/src/Lib/pdb.py
index 943211158a..eb1b0d8d4c 100644
--- a/contrib/tools/python3/src/Lib/pdb.py
+++ b/contrib/tools/python3/src/Lib/pdb.py
@@ -68,7 +68,7 @@ Debugger commands
# commands and is appended to __doc__ after the class has been defined.
import os
-import io
+import io
import re
import sys
import cmd
@@ -79,7 +79,7 @@ import glob
import pprint
import signal
import inspect
-import tokenize
+import tokenize
import traceback
import linecache
@@ -94,7 +94,7 @@ __all__ = ["run", "pm", "Pdb", "runeval", "runctx", "runcall", "set_trace",
def find_function(funcname, filename):
cre = re.compile(r'def\s+%s\s*[(]' % re.escape(funcname))
try:
- fp = tokenize.open(filename)
+ fp = tokenize.open(filename)
except OSError:
return None
# consumer of this info expects the first line to be 1
@@ -143,7 +143,7 @@ class Pdb(bdb.Bdb, cmd.Cmd):
nosigint=False, readrc=True):
bdb.Bdb.__init__(self, skip=skip)
cmd.Cmd.__init__(self, completekey, stdin, stdout)
- sys.audit("pdb.Pdb")
+ sys.audit("pdb.Pdb")
if stdout:
self.use_rawinput = 0
self.prompt = '(Pdb) '
@@ -162,15 +162,15 @@ class Pdb(bdb.Bdb, cmd.Cmd):
self.allow_kbdint = False
self.nosigint = nosigint
- # Read ~/.pdbrc and ./.pdbrc
+ # Read ~/.pdbrc and ./.pdbrc
self.rcLines = []
if readrc:
try:
- with open(os.path.expanduser('~/.pdbrc')) as rcFile:
- self.rcLines.extend(rcFile)
- except OSError:
- pass
- try:
+ with open(os.path.expanduser('~/.pdbrc')) as rcFile:
+ self.rcLines.extend(rcFile)
+ except OSError:
+ pass
+ try:
with open(".pdbrc") as rcFile:
self.rcLines.extend(rcFile)
except OSError:
@@ -342,12 +342,12 @@ class Pdb(bdb.Bdb, cmd.Cmd):
def interaction(self, frame, traceback):
# Restore the previous signal handler at the Pdb prompt.
if Pdb._previous_sigint_handler:
- try:
- signal.signal(signal.SIGINT, Pdb._previous_sigint_handler)
- except ValueError: # ValueError: signal only works in main thread
- pass
- else:
- Pdb._previous_sigint_handler = None
+ try:
+ signal.signal(signal.SIGINT, Pdb._previous_sigint_handler)
+ except ValueError: # ValueError: signal only works in main thread
+ pass
+ else:
+ Pdb._previous_sigint_handler = None
if self.setup(frame, traceback):
# no interaction desired at this time (happens if .pdbrc contains
# a command like "continue")
@@ -384,7 +384,7 @@ class Pdb(bdb.Bdb, cmd.Cmd):
sys.stdin = save_stdin
sys.displayhook = save_displayhook
except:
- self._error_exc()
+ self._error_exc()
def precmd(self, line):
"""Handle alias expansion and ';;' separator."""
@@ -473,7 +473,7 @@ class Pdb(bdb.Bdb, cmd.Cmd):
except Exception:
ret = []
# Then, try to complete file names as well.
- globs = glob.glob(glob.escape(text) + '*')
+ globs = glob.glob(glob.escape(text) + '*')
for fn in globs:
if os.path.isdir(fn):
ret.append(fn + '/')
@@ -495,7 +495,7 @@ class Pdb(bdb.Bdb, cmd.Cmd):
# Collect globals and locals. It is usually not really sensible to also
# complete builtins, and they clutter the namespace quite heavily, so we
# leave them out.
- ns = {**self.curframe.f_globals, **self.curframe_locals}
+ ns = {**self.curframe.f_globals, **self.curframe_locals}
if '.' in text:
# Walk an attribute chain up to the last part, similar to what
# rlcompleter does. This will bail if any of the parts are not
@@ -751,8 +751,8 @@ class Pdb(bdb.Bdb, cmd.Cmd):
"""
# this method should be callable before starting debugging, so default
# to "no globals" if there is no current frame
- frame = getattr(self, 'curframe', None)
- globs = frame.f_globals if frame else None
+ frame = getattr(self, 'curframe', None)
+ globs = frame.f_globals if frame else None
line = linecache.getline(filename, lineno, globs)
if not line:
self.message('End of file')
@@ -893,7 +893,7 @@ class Pdb(bdb.Bdb, cmd.Cmd):
except ValueError:
err = "Invalid line number (%s)" % arg
else:
- bplist = self.get_breaks(filename, lineno)[:]
+ bplist = self.get_breaks(filename, lineno)[:]
err = self.clear_break(filename, lineno)
if err:
self.error(err)
@@ -1026,11 +1026,11 @@ class Pdb(bdb.Bdb, cmd.Cmd):
if arg:
import shlex
argv0 = sys.argv[0:1]
- try:
- sys.argv = shlex.split(arg)
- except ValueError as e:
- self.error('Cannot run %s: %s' % (arg, e))
- return
+ try:
+ sys.argv = shlex.split(arg)
+ except ValueError as e:
+ self.error('Cannot run %s: %s' % (arg, e))
+ return
sys.argv[:0] = argv0
# this is caught in the main debugger loop
raise Restart
@@ -1107,7 +1107,7 @@ class Pdb(bdb.Bdb, cmd.Cmd):
try:
sys.call_tracing(p.run, (arg, globals, locals))
except Exception:
- self._error_exc()
+ self._error_exc()
self.message("LEAVING RECURSIVE DEBUGGER")
sys.settrace(self.trace_dispatch)
self.lastcmd = p.lastcmd
@@ -1140,9 +1140,9 @@ class Pdb(bdb.Bdb, cmd.Cmd):
"""
co = self.curframe.f_code
dict = self.curframe_locals
- n = co.co_argcount + co.co_kwonlyargcount
- if co.co_flags & inspect.CO_VARARGS: n = n+1
- if co.co_flags & inspect.CO_VARKEYWORDS: n = n+1
+ n = co.co_argcount + co.co_kwonlyargcount
+ if co.co_flags & inspect.CO_VARARGS: n = n+1
+ if co.co_flags & inspect.CO_VARKEYWORDS: n = n+1
for i in range(n):
name = co.co_varnames[i]
if name in dict:
@@ -1165,7 +1165,7 @@ class Pdb(bdb.Bdb, cmd.Cmd):
try:
return eval(arg, self.curframe.f_globals, self.curframe_locals)
except:
- self._error_exc()
+ self._error_exc()
raise
def _getval_except(self, arg, frame=None):
@@ -1179,31 +1179,31 @@ class Pdb(bdb.Bdb, cmd.Cmd):
err = traceback.format_exception_only(*exc_info)[-1].strip()
return _rstr('** raised %s **' % err)
- def _error_exc(self):
- exc_info = sys.exc_info()[:2]
- self.error(traceback.format_exception_only(*exc_info)[-1].strip())
-
- def _msg_val_func(self, arg, func):
- try:
- val = self._getval(arg)
- except:
- return # _getval() has displayed the error
- try:
- self.message(func(val))
- except:
- self._error_exc()
-
+ def _error_exc(self):
+ exc_info = sys.exc_info()[:2]
+ self.error(traceback.format_exception_only(*exc_info)[-1].strip())
+
+ def _msg_val_func(self, arg, func):
+ try:
+ val = self._getval(arg)
+ except:
+ return # _getval() has displayed the error
+ try:
+ self.message(func(val))
+ except:
+ self._error_exc()
+
def do_p(self, arg):
"""p expression
Print the value of the expression.
"""
- self._msg_val_func(arg, repr)
+ self._msg_val_func(arg, repr)
def do_pp(self, arg):
"""pp expression
Pretty-print the value of the expression.
"""
- self._msg_val_func(arg, pprint.pformat)
+ self._msg_val_func(arg, pprint.pformat)
complete_print = _complete_expression
complete_p = _complete_expression
@@ -1322,21 +1322,21 @@ class Pdb(bdb.Bdb, cmd.Cmd):
# _getval() already printed the error
return
code = None
- # Is it an instance method?
+ # Is it an instance method?
try:
- code = value.__func__.__code__
+ code = value.__func__.__code__
except Exception:
pass
if code:
- self.message('Method %s' % code.co_name)
+ self.message('Method %s' % code.co_name)
return
- # Is it a function?
+ # Is it a function?
try:
- code = value.__code__
+ code = value.__code__
except Exception:
pass
if code:
- self.message('Function %s' % code.co_name)
+ self.message('Function %s' % code.co_name)
return
# Is it a class?
if value.__class__ is type:
@@ -1391,7 +1391,7 @@ class Pdb(bdb.Bdb, cmd.Cmd):
Start an interactive interpreter whose global namespace
contains all the (global and local) names found in the current scope.
"""
- ns = {**self.curframe.f_globals, **self.curframe_locals}
+ ns = {**self.curframe.f_globals, **self.curframe_locals}
code.interact("*interactive*", local=ns)
def do_alias(self, arg):
@@ -1577,7 +1577,7 @@ class Pdb(bdb.Bdb, cmd.Cmd):
self._wait_for_mainpyfile = True
self.mainpyfile = self.canonic(filename)
self._user_requested_quit = False
- with io.open_code(filename) as fp:
+ with io.open_code(filename) as fp:
statement = "exec(compile(%r, %r, 'exec'))" % \
(fp.read(), self.mainpyfile)
self.run(statement)
@@ -1672,7 +1672,7 @@ To let the script run up to a given line X in the debugged file, use
def main():
import getopt
- opts, args = getopt.getopt(sys.argv[1:], 'mhc:', ['help', 'command='])
+ opts, args = getopt.getopt(sys.argv[1:], 'mhc:', ['help', 'command='])
if not args:
print(_usage)
@@ -1694,19 +1694,19 @@ def main():
print('Error:', mainpyfile, 'does not exist')
sys.exit(1)
- if run_as_module:
- import runpy
- try:
- runpy._get_module_details(mainpyfile)
- except Exception:
- traceback.print_exc()
- sys.exit(1)
-
+ if run_as_module:
+ import runpy
+ try:
+ runpy._get_module_details(mainpyfile)
+ except Exception:
+ traceback.print_exc()
+ sys.exit(1)
+
sys.argv[:] = args # Hide "pdb.py" and pdb options from argument list
if not run_as_module:
- mainpyfile = os.path.realpath(mainpyfile)
- # Replace pdb's dir with script's dir in front of module search path.
+ mainpyfile = os.path.realpath(mainpyfile)
+ # Replace pdb's dir with script's dir in front of module search path.
sys.path[0] = os.path.dirname(mainpyfile)
# Note on saving/restoring sys.argv: it's a good idea when sys.argv was
@@ -1726,7 +1726,7 @@ def main():
print("The program finished and will be restarted")
except Restart:
print("Restarting", mainpyfile, "with arguments:")
- print("\t" + " ".join(sys.argv[1:]))
+ print("\t" + " ".join(sys.argv[1:]))
except SystemExit:
# In most cases SystemExit does not warrant a post-mortem session.
print("The program exited via sys.exit(). Exit status:", end=' ')
diff --git a/contrib/tools/python3/src/Lib/pickle.py b/contrib/tools/python3/src/Lib/pickle.py
index 3d2c75a853..c3aa5f686e 100644
--- a/contrib/tools/python3/src/Lib/pickle.py
+++ b/contrib/tools/python3/src/Lib/pickle.py
@@ -13,7 +13,7 @@ Functions:
dump(object, file)
dumps(object) -> string
load(file) -> object
- loads(bytes) -> object
+ loads(bytes) -> object
Misc variables:
@@ -39,14 +39,14 @@ import _compat_pickle
__all__ = ["PickleError", "PicklingError", "UnpicklingError", "Pickler",
"Unpickler", "dump", "dumps", "load", "loads"]
-try:
- from _pickle import PickleBuffer
- __all__.append("PickleBuffer")
- _HAVE_PICKLE_BUFFER = True
-except ImportError:
- _HAVE_PICKLE_BUFFER = False
-
-
+try:
+ from _pickle import PickleBuffer
+ __all__.append("PickleBuffer")
+ _HAVE_PICKLE_BUFFER = True
+except ImportError:
+ _HAVE_PICKLE_BUFFER = False
+
+
# Shortcut for use in isinstance testing
bytes_types = (bytes, bytearray)
@@ -59,16 +59,16 @@ compatible_formats = ["1.0", # Original protocol 0
"2.0", # Protocol 2
"3.0", # Protocol 3
"4.0", # Protocol 4
- "5.0", # Protocol 5
+ "5.0", # Protocol 5
] # Old format versions we can read
# This is the highest protocol number we know how to read.
-HIGHEST_PROTOCOL = 5
+HIGHEST_PROTOCOL = 5
# The protocol we write by default. May be less than HIGHEST_PROTOCOL.
-# Only bump this if the oldest still supported version of Python already
-# includes it.
-DEFAULT_PROTOCOL = 4
+# Only bump this if the oldest still supported version of Python already
+# includes it.
+DEFAULT_PROTOCOL = 4
class PickleError(Exception):
"""A common base class for the other pickling exceptions."""
@@ -176,7 +176,7 @@ BINBYTES = b'B' # push bytes; counted binary string argument
SHORT_BINBYTES = b'C' # " " ; " " " " < 256 bytes
# Protocol 4
-
+
SHORT_BINUNICODE = b'\x8c' # push short string; UTF-8 length < 256 bytes
BINUNICODE8 = b'\x8d' # push very long string
BINBYTES8 = b'\x8e' # push very long bytes string
@@ -188,12 +188,12 @@ STACK_GLOBAL = b'\x93' # same as GLOBAL but using names on the stacks
MEMOIZE = b'\x94' # store top of the stack in memo
FRAME = b'\x95' # indicate the beginning of a new frame
-# Protocol 5
-
-BYTEARRAY8 = b'\x96' # push bytearray
-NEXT_BUFFER = b'\x97' # push next out-of-band buffer
-READONLY_BUFFER = b'\x98' # make top of stack readonly
-
+# Protocol 5
+
+BYTEARRAY8 = b'\x96' # push bytearray
+NEXT_BUFFER = b'\x97' # push next out-of-band buffer
+READONLY_BUFFER = b'\x98' # make top of stack readonly
+
__all__.extend([x for x in dir() if re.match("[A-Z][A-Z0-9_]+$", x)])
@@ -267,23 +267,23 @@ class _Unframer:
self.file_readline = file_readline
self.current_frame = None
- def readinto(self, buf):
- if self.current_frame:
- n = self.current_frame.readinto(buf)
- if n == 0 and len(buf) != 0:
- self.current_frame = None
- n = len(buf)
- buf[:] = self.file_read(n)
- return n
- if n < len(buf):
- raise UnpicklingError(
- "pickle exhausted before end of frame")
- return n
- else:
- n = len(buf)
- buf[:] = self.file_read(n)
- return n
-
+ def readinto(self, buf):
+ if self.current_frame:
+ n = self.current_frame.readinto(buf)
+ if n == 0 and len(buf) != 0:
+ self.current_frame = None
+ n = len(buf)
+ buf[:] = self.file_read(n)
+ return n
+ if n < len(buf):
+ raise UnpicklingError(
+ "pickle exhausted before end of frame")
+ return n
+ else:
+ n = len(buf)
+ buf[:] = self.file_read(n)
+ return n
+
def read(self, n):
if self.current_frame:
data = self.current_frame.read(n)
@@ -339,10 +339,10 @@ def whichmodule(obj, name):
return module_name
# Protect the iteration by using a list copy of sys.modules against dynamic
# modules that trigger imports of other modules upon calls to getattr.
- for module_name, module in sys.modules.copy().items():
- if (module_name == '__main__'
- or module_name == '__mp_main__' # bpo-42406
- or module is None):
+ for module_name, module in sys.modules.copy().items():
+ if (module_name == '__main__'
+ or module_name == '__mp_main__' # bpo-42406
+ or module is None):
continue
try:
if _getattribute(module, name)[0] is obj:
@@ -406,14 +406,14 @@ def decode_long(data):
class _Pickler:
- def __init__(self, file, protocol=None, *, fix_imports=True,
- buffer_callback=None):
+ def __init__(self, file, protocol=None, *, fix_imports=True,
+ buffer_callback=None):
"""This takes a binary file for writing a pickle data stream.
The optional *protocol* argument tells the pickler to use the
- given protocol; supported protocols are 0, 1, 2, 3, 4 and 5.
- The default protocol is 4. It was introduced in Python 3.4, and
- is incompatible with previous versions.
+ given protocol; supported protocols are 0, 1, 2, 3, 4 and 5.
+ The default protocol is 4. It was introduced in Python 3.4, and
+ is incompatible with previous versions.
Specifying a negative protocol version selects the highest
protocol version supported. The higher the protocol used, the
@@ -429,17 +429,17 @@ class _Pickler:
will try to map the new Python 3 names to the old module names
used in Python 2, so that the pickle data stream is readable
with Python 2.
-
- If *buffer_callback* is None (the default), buffer views are
- serialized into *file* as part of the pickle stream.
-
- If *buffer_callback* is not None, then it can be called any number
- of times with a buffer view. If the callback returns a false value
- (such as None), the given buffer is out-of-band; otherwise the
- buffer is serialized in-band, i.e. inside the pickle stream.
-
- It is an error if *buffer_callback* is not None and *protocol*
- is None or smaller than 5.
+
+ If *buffer_callback* is None (the default), buffer views are
+ serialized into *file* as part of the pickle stream.
+
+ If *buffer_callback* is not None, then it can be called any number
+ of times with a buffer view. If the callback returns a false value
+ (such as None), the given buffer is out-of-band; otherwise the
+ buffer is serialized in-band, i.e. inside the pickle stream.
+
+ It is an error if *buffer_callback* is not None and *protocol*
+ is None or smaller than 5.
"""
if protocol is None:
protocol = DEFAULT_PROTOCOL
@@ -447,9 +447,9 @@ class _Pickler:
protocol = HIGHEST_PROTOCOL
elif not 0 <= protocol <= HIGHEST_PROTOCOL:
raise ValueError("pickle protocol must be <= %d" % HIGHEST_PROTOCOL)
- if buffer_callback is not None and protocol < 5:
- raise ValueError("buffer_callback needs protocol >= 5")
- self._buffer_callback = buffer_callback
+ if buffer_callback is not None and protocol < 5:
+ raise ValueError("buffer_callback needs protocol >= 5")
+ self._buffer_callback = buffer_callback
try:
self._file_write = file.write
except AttributeError:
@@ -547,42 +547,42 @@ class _Pickler:
self.write(self.get(x[0]))
return
- rv = NotImplemented
- reduce = getattr(self, "reducer_override", None)
+ rv = NotImplemented
+ reduce = getattr(self, "reducer_override", None)
if reduce is not None:
rv = reduce(obj)
-
- if rv is NotImplemented:
- # Check the type dispatch table
- t = type(obj)
- f = self.dispatch.get(t)
- if f is not None:
- f(self, obj) # Call unbound method with explicit self
+
+ if rv is NotImplemented:
+ # Check the type dispatch table
+ t = type(obj)
+ f = self.dispatch.get(t)
+ if f is not None:
+ f(self, obj) # Call unbound method with explicit self
return
- # Check private dispatch table if any, or else
- # copyreg.dispatch_table
- reduce = getattr(self, 'dispatch_table', dispatch_table).get(t)
+ # Check private dispatch table if any, or else
+ # copyreg.dispatch_table
+ reduce = getattr(self, 'dispatch_table', dispatch_table).get(t)
if reduce is not None:
- rv = reduce(obj)
+ rv = reduce(obj)
else:
- # Check for a class with a custom metaclass; treat as regular
- # class
- if issubclass(t, type):
- self.save_global(obj)
- return
-
- # Check for a __reduce_ex__ method, fall back to __reduce__
- reduce = getattr(obj, "__reduce_ex__", None)
+ # Check for a class with a custom metaclass; treat as regular
+ # class
+ if issubclass(t, type):
+ self.save_global(obj)
+ return
+
+ # Check for a __reduce_ex__ method, fall back to __reduce__
+ reduce = getattr(obj, "__reduce_ex__", None)
if reduce is not None:
- rv = reduce(self.proto)
+ rv = reduce(self.proto)
else:
- reduce = getattr(obj, "__reduce__", None)
- if reduce is not None:
- rv = reduce()
- else:
- raise PicklingError("Can't pickle %r object: %r" %
- (t.__name__, obj))
+ reduce = getattr(obj, "__reduce__", None)
+ if reduce is not None:
+ rv = reduce()
+ else:
+ raise PicklingError("Can't pickle %r object: %r" %
+ (t.__name__, obj))
# Check for string returned by reduce(), meaning "save as global"
if isinstance(rv, str):
@@ -595,9 +595,9 @@ class _Pickler:
# Assert that it returned an appropriately sized tuple
l = len(rv)
- if not (2 <= l <= 6):
+ if not (2 <= l <= 6):
raise PicklingError("Tuple returned by %s must have "
- "two to six elements" % reduce)
+ "two to six elements" % reduce)
# Save the reduce() output and finally memoize the object
self.save_reduce(obj=obj, *rv)
@@ -619,7 +619,7 @@ class _Pickler:
"persistent IDs in protocol 0 must be ASCII strings")
def save_reduce(self, func, args, state=None, listitems=None,
- dictitems=None, state_setter=None, obj=None):
+ dictitems=None, state_setter=None, obj=None):
# This API is called by some subclasses
if not isinstance(args, tuple):
@@ -713,25 +713,25 @@ class _Pickler:
self._batch_setitems(dictitems)
if state is not None:
- if state_setter is None:
- save(state)
- write(BUILD)
- else:
- # If a state_setter is specified, call it instead of load_build
- # to update obj's with its previous state.
- # First, push state_setter and its tuple of expected arguments
- # (obj, state) onto the stack.
- save(state_setter)
- save(obj) # simple BINGET opcode as obj is already memoized.
- save(state)
- write(TUPLE2)
- # Trigger a state_setter(obj, state) function call.
- write(REDUCE)
- # The purpose of state_setter is to carry-out an
- # inplace modification of obj. We do not care about what the
- # method might return, so its output is eventually removed from
- # the stack.
- write(POP)
+ if state_setter is None:
+ save(state)
+ write(BUILD)
+ else:
+ # If a state_setter is specified, call it instead of load_build
+ # to update obj's with its previous state.
+ # First, push state_setter and its tuple of expected arguments
+ # (obj, state) onto the stack.
+ save(state_setter)
+ save(obj) # simple BINGET opcode as obj is already memoized.
+ save(state)
+ write(TUPLE2)
+ # Trigger a state_setter(obj, state) function call.
+ write(REDUCE)
+ # The purpose of state_setter is to carry-out an
+ # inplace modification of obj. We do not care about what the
+ # method might return, so its output is eventually removed from
+ # the stack.
+ write(POP)
# Methods below this point are dispatched through the dispatch table
@@ -806,47 +806,47 @@ class _Pickler:
self.memoize(obj)
dispatch[bytes] = save_bytes
- def save_bytearray(self, obj):
- if self.proto < 5:
- if not obj: # bytearray is empty
- self.save_reduce(bytearray, (), obj=obj)
- else:
- self.save_reduce(bytearray, (bytes(obj),), obj=obj)
- return
- n = len(obj)
- if n >= self.framer._FRAME_SIZE_TARGET:
- self._write_large_bytes(BYTEARRAY8 + pack("<Q", n), obj)
- else:
- self.write(BYTEARRAY8 + pack("<Q", n) + obj)
- dispatch[bytearray] = save_bytearray
-
- if _HAVE_PICKLE_BUFFER:
- def save_picklebuffer(self, obj):
- if self.proto < 5:
- raise PicklingError("PickleBuffer can only pickled with "
- "protocol >= 5")
- with obj.raw() as m:
- if not m.contiguous:
- raise PicklingError("PickleBuffer can not be pickled when "
- "pointing to a non-contiguous buffer")
- in_band = True
- if self._buffer_callback is not None:
- in_band = bool(self._buffer_callback(obj))
- if in_band:
- # Write data in-band
- # XXX The C implementation avoids a copy here
- if m.readonly:
- self.save_bytes(m.tobytes())
- else:
- self.save_bytearray(m.tobytes())
- else:
- # Write data out-of-band
- self.write(NEXT_BUFFER)
- if m.readonly:
- self.write(READONLY_BUFFER)
-
- dispatch[PickleBuffer] = save_picklebuffer
-
+ def save_bytearray(self, obj):
+ if self.proto < 5:
+ if not obj: # bytearray is empty
+ self.save_reduce(bytearray, (), obj=obj)
+ else:
+ self.save_reduce(bytearray, (bytes(obj),), obj=obj)
+ return
+ n = len(obj)
+ if n >= self.framer._FRAME_SIZE_TARGET:
+ self._write_large_bytes(BYTEARRAY8 + pack("<Q", n), obj)
+ else:
+ self.write(BYTEARRAY8 + pack("<Q", n) + obj)
+ dispatch[bytearray] = save_bytearray
+
+ if _HAVE_PICKLE_BUFFER:
+ def save_picklebuffer(self, obj):
+ if self.proto < 5:
+ raise PicklingError("PickleBuffer can only pickled with "
+ "protocol >= 5")
+ with obj.raw() as m:
+ if not m.contiguous:
+ raise PicklingError("PickleBuffer can not be pickled when "
+ "pointing to a non-contiguous buffer")
+ in_band = True
+ if self._buffer_callback is not None:
+ in_band = bool(self._buffer_callback(obj))
+ if in_band:
+ # Write data in-band
+ # XXX The C implementation avoids a copy here
+ if m.readonly:
+ self.save_bytes(m.tobytes())
+ else:
+ self.save_bytearray(m.tobytes())
+ else:
+ # Write data out-of-band
+ self.write(NEXT_BUFFER)
+ if m.readonly:
+ self.write(READONLY_BUFFER)
+
+ dispatch[PickleBuffer] = save_picklebuffer
+
def save_str(self, obj):
if self.bin:
encoded = obj.encode('utf-8', 'surrogatepass')
@@ -861,10 +861,10 @@ class _Pickler:
self.write(BINUNICODE + pack("<I", n) + encoded)
else:
obj = obj.replace("\\", "\\u005c")
- obj = obj.replace("\0", "\\u0000")
+ obj = obj.replace("\0", "\\u0000")
obj = obj.replace("\n", "\\u000a")
- obj = obj.replace("\r", "\\u000d")
- obj = obj.replace("\x1a", "\\u001a") # EOF on DOS
+ obj = obj.replace("\r", "\\u000d")
+ obj = obj.replace("\x1a", "\\u001a") # EOF on DOS
self.write(UNICODE + obj.encode('raw-unicode-escape') +
b'\n')
self.memoize(obj)
@@ -1136,7 +1136,7 @@ class _Pickler:
class _Unpickler:
def __init__(self, file, *, fix_imports=True,
- encoding="ASCII", errors="strict", buffers=None):
+ encoding="ASCII", errors="strict", buffers=None):
"""This takes a binary file for reading a pickle data stream.
The protocol version of the pickle is detected automatically, so
@@ -1155,26 +1155,26 @@ class _Unpickler:
reading, a BytesIO object, or any other custom object that
meets this interface.
- If *buffers* is not None, it should be an iterable of buffer-enabled
- objects that is consumed each time the pickle stream references
- an out-of-band buffer view. Such buffers have been given in order
- to the *buffer_callback* of a Pickler object.
-
- If *buffers* is None (the default), then the buffers are taken
- from the pickle stream, assuming they are serialized there.
- It is an error for *buffers* to be None if the pickle stream
- was produced with a non-None *buffer_callback*.
-
- Other optional arguments are *fix_imports*, *encoding* and
+ If *buffers* is not None, it should be an iterable of buffer-enabled
+ objects that is consumed each time the pickle stream references
+ an out-of-band buffer view. Such buffers have been given in order
+ to the *buffer_callback* of a Pickler object.
+
+ If *buffers* is None (the default), then the buffers are taken
+ from the pickle stream, assuming they are serialized there.
+ It is an error for *buffers* to be None if the pickle stream
+ was produced with a non-None *buffer_callback*.
+
+ Other optional arguments are *fix_imports*, *encoding* and
*errors*, which are used to control compatibility support for
pickle stream generated by Python 2. If *fix_imports* is True,
pickle will try to map the old Python 2 names to the new names
used in Python 3. The *encoding* and *errors* tell pickle how
to decode 8-bit string instances pickled by Python 2; these
default to 'ASCII' and 'strict', respectively. *encoding* can be
- 'bytes' to read these 8-bit string instances as bytes objects.
+ 'bytes' to read these 8-bit string instances as bytes objects.
"""
- self._buffers = iter(buffers) if buffers is not None else None
+ self._buffers = iter(buffers) if buffers is not None else None
self._file_readline = file.readline
self._file_read = file.read
self.memo = {}
@@ -1195,7 +1195,7 @@ class _Unpickler:
"%s.__init__()" % (self.__class__.__name__,))
self._unframer = _Unframer(self._file_read, self._file_readline)
self.read = self._unframer.read
- self.readinto = self._unframer.readinto
+ self.readinto = self._unframer.readinto
self.readline = self._unframer.readline
self.metastack = []
self.stack = []
@@ -1382,34 +1382,34 @@ class _Unpickler:
self.append(self.read(len))
dispatch[BINBYTES8[0]] = load_binbytes8
- def load_bytearray8(self):
- len, = unpack('<Q', self.read(8))
- if len > maxsize:
- raise UnpicklingError("BYTEARRAY8 exceeds system's maximum size "
- "of %d bytes" % maxsize)
- b = bytearray(len)
- self.readinto(b)
- self.append(b)
- dispatch[BYTEARRAY8[0]] = load_bytearray8
-
- def load_next_buffer(self):
- if self._buffers is None:
- raise UnpicklingError("pickle stream refers to out-of-band data "
- "but no *buffers* argument was given")
- try:
- buf = next(self._buffers)
- except StopIteration:
- raise UnpicklingError("not enough out-of-band buffers")
- self.append(buf)
- dispatch[NEXT_BUFFER[0]] = load_next_buffer
-
- def load_readonly_buffer(self):
- buf = self.stack[-1]
- with memoryview(buf) as m:
- if not m.readonly:
- self.stack[-1] = m.toreadonly()
- dispatch[READONLY_BUFFER[0]] = load_readonly_buffer
-
+ def load_bytearray8(self):
+ len, = unpack('<Q', self.read(8))
+ if len > maxsize:
+ raise UnpicklingError("BYTEARRAY8 exceeds system's maximum size "
+ "of %d bytes" % maxsize)
+ b = bytearray(len)
+ self.readinto(b)
+ self.append(b)
+ dispatch[BYTEARRAY8[0]] = load_bytearray8
+
+ def load_next_buffer(self):
+ if self._buffers is None:
+ raise UnpicklingError("pickle stream refers to out-of-band data "
+ "but no *buffers* argument was given")
+ try:
+ buf = next(self._buffers)
+ except StopIteration:
+ raise UnpicklingError("not enough out-of-band buffers")
+ self.append(buf)
+ dispatch[NEXT_BUFFER[0]] = load_next_buffer
+
+ def load_readonly_buffer(self):
+ buf = self.stack[-1]
+ with memoryview(buf) as m:
+ if not m.readonly:
+ self.stack[-1] = m.toreadonly()
+ dispatch[READONLY_BUFFER[0]] = load_readonly_buffer
+
def load_short_binstring(self):
len = self.read(1)[0]
data = self.read(len)
@@ -1570,7 +1570,7 @@ class _Unpickler:
def find_class(self, module, name):
# Subclasses may override this.
- sys.audit('pickle.find_class', module, name)
+ sys.audit('pickle.find_class', module, name)
if self.proto < 3 and self.fix_imports:
if (module, name) in _compat_pickle.NAME_MAPPING:
module, name = _compat_pickle.NAME_MAPPING[(module, name)]
@@ -1606,29 +1606,29 @@ class _Unpickler:
def load_get(self):
i = int(self.readline()[:-1])
- try:
- self.append(self.memo[i])
- except KeyError:
- msg = f'Memo value not found at index {i}'
- raise UnpicklingError(msg) from None
+ try:
+ self.append(self.memo[i])
+ except KeyError:
+ msg = f'Memo value not found at index {i}'
+ raise UnpicklingError(msg) from None
dispatch[GET[0]] = load_get
def load_binget(self):
i = self.read(1)[0]
- try:
- self.append(self.memo[i])
- except KeyError as exc:
- msg = f'Memo value not found at index {i}'
- raise UnpicklingError(msg) from None
+ try:
+ self.append(self.memo[i])
+ except KeyError as exc:
+ msg = f'Memo value not found at index {i}'
+ raise UnpicklingError(msg) from None
dispatch[BINGET[0]] = load_binget
def load_long_binget(self):
i, = unpack('<I', self.read(4))
- try:
- self.append(self.memo[i])
- except KeyError as exc:
- msg = f'Memo value not found at index {i}'
- raise UnpicklingError(msg) from None
+ try:
+ self.append(self.memo[i])
+ except KeyError as exc:
+ msg = f'Memo value not found at index {i}'
+ raise UnpicklingError(msg) from None
dispatch[LONG_BINGET[0]] = load_long_binget
def load_put(self):
@@ -1746,29 +1746,29 @@ class _Unpickler:
# Shorthands
-def _dump(obj, file, protocol=None, *, fix_imports=True, buffer_callback=None):
- _Pickler(file, protocol, fix_imports=fix_imports,
- buffer_callback=buffer_callback).dump(obj)
+def _dump(obj, file, protocol=None, *, fix_imports=True, buffer_callback=None):
+ _Pickler(file, protocol, fix_imports=fix_imports,
+ buffer_callback=buffer_callback).dump(obj)
-def _dumps(obj, protocol=None, *, fix_imports=True, buffer_callback=None):
+def _dumps(obj, protocol=None, *, fix_imports=True, buffer_callback=None):
f = io.BytesIO()
- _Pickler(f, protocol, fix_imports=fix_imports,
- buffer_callback=buffer_callback).dump(obj)
+ _Pickler(f, protocol, fix_imports=fix_imports,
+ buffer_callback=buffer_callback).dump(obj)
res = f.getvalue()
assert isinstance(res, bytes_types)
return res
-def _load(file, *, fix_imports=True, encoding="ASCII", errors="strict",
- buffers=None):
- return _Unpickler(file, fix_imports=fix_imports, buffers=buffers,
+def _load(file, *, fix_imports=True, encoding="ASCII", errors="strict",
+ buffers=None):
+ return _Unpickler(file, fix_imports=fix_imports, buffers=buffers,
encoding=encoding, errors=errors).load()
-def _loads(s, /, *, fix_imports=True, encoding="ASCII", errors="strict",
- buffers=None):
+def _loads(s, /, *, fix_imports=True, encoding="ASCII", errors="strict",
+ buffers=None):
if isinstance(s, str):
raise TypeError("Can't load pickle from unicode string")
file = io.BytesIO(s)
- return _Unpickler(file, fix_imports=fix_imports, buffers=buffers,
+ return _Unpickler(file, fix_imports=fix_imports, buffers=buffers,
encoding=encoding, errors=errors).load()
# Use the faster _pickle if possible
diff --git a/contrib/tools/python3/src/Lib/pickletools.py b/contrib/tools/python3/src/Lib/pickletools.py
index 95706e746c..565b8ceeae 100644
--- a/contrib/tools/python3/src/Lib/pickletools.py
+++ b/contrib/tools/python3/src/Lib/pickletools.py
@@ -565,41 +565,41 @@ bytes8 = ArgumentDescriptor(
the number of bytes, and the second argument is that many bytes.
""")
-
-def read_bytearray8(f):
- r"""
- >>> import io, struct, sys
- >>> read_bytearray8(io.BytesIO(b"\x00\x00\x00\x00\x00\x00\x00\x00abc"))
- bytearray(b'')
- >>> read_bytearray8(io.BytesIO(b"\x03\x00\x00\x00\x00\x00\x00\x00abcdef"))
- bytearray(b'abc')
- >>> bigsize8 = struct.pack("<Q", sys.maxsize//3)
- >>> read_bytearray8(io.BytesIO(bigsize8 + b"abcdef")) #doctest: +ELLIPSIS
- Traceback (most recent call last):
- ...
- ValueError: expected ... bytes in a bytearray8, but only 6 remain
- """
-
- n = read_uint8(f)
- assert n >= 0
- if n > sys.maxsize:
- raise ValueError("bytearray8 byte count > sys.maxsize: %d" % n)
- data = f.read(n)
- if len(data) == n:
- return bytearray(data)
- raise ValueError("expected %d bytes in a bytearray8, but only %d remain" %
- (n, len(data)))
-
-bytearray8 = ArgumentDescriptor(
- name="bytearray8",
- n=TAKEN_FROM_ARGUMENT8U,
- reader=read_bytearray8,
- doc="""A counted bytearray.
-
- The first argument is an 8-byte little-endian unsigned int giving
- the number of bytes, and the second argument is that many bytes.
- """)
-
+
+def read_bytearray8(f):
+ r"""
+ >>> import io, struct, sys
+ >>> read_bytearray8(io.BytesIO(b"\x00\x00\x00\x00\x00\x00\x00\x00abc"))
+ bytearray(b'')
+ >>> read_bytearray8(io.BytesIO(b"\x03\x00\x00\x00\x00\x00\x00\x00abcdef"))
+ bytearray(b'abc')
+ >>> bigsize8 = struct.pack("<Q", sys.maxsize//3)
+ >>> read_bytearray8(io.BytesIO(bigsize8 + b"abcdef")) #doctest: +ELLIPSIS
+ Traceback (most recent call last):
+ ...
+ ValueError: expected ... bytes in a bytearray8, but only 6 remain
+ """
+
+ n = read_uint8(f)
+ assert n >= 0
+ if n > sys.maxsize:
+ raise ValueError("bytearray8 byte count > sys.maxsize: %d" % n)
+ data = f.read(n)
+ if len(data) == n:
+ return bytearray(data)
+ raise ValueError("expected %d bytes in a bytearray8, but only %d remain" %
+ (n, len(data)))
+
+bytearray8 = ArgumentDescriptor(
+ name="bytearray8",
+ n=TAKEN_FROM_ARGUMENT8U,
+ reader=read_bytearray8,
+ doc="""A counted bytearray.
+
+ The first argument is an 8-byte little-endian unsigned int giving
+ the number of bytes, and the second argument is that many bytes.
+ """)
+
def read_unicodestringnl(f):
r"""
>>> import io
@@ -1005,11 +1005,11 @@ pybytes = StackObject(
obtype=bytes,
doc="A Python bytes object.")
-pybytearray = StackObject(
- name='bytearray',
- obtype=bytearray,
- doc="A Python bytearray object.")
-
+pybytearray = StackObject(
+ name='bytearray',
+ obtype=bytearray,
+ doc="A Python bytearray object.")
+
pyunicode = StackObject(
name='str',
obtype=str,
@@ -1045,11 +1045,11 @@ pyfrozenset = StackObject(
obtype=set,
doc="A Python frozenset object.")
-pybuffer = StackObject(
- name='buffer',
- obtype=object,
- doc="A Python buffer-like object.")
-
+pybuffer = StackObject(
+ name='buffer',
+ obtype=object,
+ doc="A Python buffer-like object.")
+
anyobject = StackObject(
name='any',
obtype=object,
@@ -1310,7 +1310,7 @@ opcodes = [
object instead.
"""),
- # Bytes (protocol 3 and higher)
+ # Bytes (protocol 3 and higher)
I(name='BINBYTES',
code='B',
@@ -1351,39 +1351,39 @@ opcodes = [
which are taken literally as the string content.
"""),
- # Bytearray (protocol 5 and higher)
-
- I(name='BYTEARRAY8',
- code='\x96',
- arg=bytearray8,
- stack_before=[],
- stack_after=[pybytearray],
- proto=5,
- doc="""Push a Python bytearray object.
-
- There are two arguments: the first is an 8-byte unsigned int giving
- the number of bytes in the bytearray, and the second is that many bytes,
- which are taken literally as the bytearray content.
- """),
-
- # Out-of-band buffer (protocol 5 and higher)
-
- I(name='NEXT_BUFFER',
- code='\x97',
- arg=None,
- stack_before=[],
- stack_after=[pybuffer],
- proto=5,
- doc="Push an out-of-band buffer object."),
-
- I(name='READONLY_BUFFER',
- code='\x98',
- arg=None,
- stack_before=[pybuffer],
- stack_after=[pybuffer],
- proto=5,
- doc="Make an out-of-band buffer object read-only."),
-
+ # Bytearray (protocol 5 and higher)
+
+ I(name='BYTEARRAY8',
+ code='\x96',
+ arg=bytearray8,
+ stack_before=[],
+ stack_after=[pybytearray],
+ proto=5,
+ doc="""Push a Python bytearray object.
+
+ There are two arguments: the first is an 8-byte unsigned int giving
+ the number of bytes in the bytearray, and the second is that many bytes,
+ which are taken literally as the bytearray content.
+ """),
+
+ # Out-of-band buffer (protocol 5 and higher)
+
+ I(name='NEXT_BUFFER',
+ code='\x97',
+ arg=None,
+ stack_before=[],
+ stack_after=[pybuffer],
+ proto=5,
+ doc="Push an out-of-band buffer object."),
+
+ I(name='READONLY_BUFFER',
+ code='\x98',
+ arg=None,
+ stack_before=[pybuffer],
+ stack_after=[pybuffer],
+ proto=5,
+ doc="Make an out-of-band buffer object read-only."),
+
# Ways to spell None.
I(name='NONE',
diff --git a/contrib/tools/python3/src/Lib/pkgutil.py b/contrib/tools/python3/src/Lib/pkgutil.py
index 9608d0e0ed..c1e5e9239c 100644
--- a/contrib/tools/python3/src/Lib/pkgutil.py
+++ b/contrib/tools/python3/src/Lib/pkgutil.py
@@ -7,7 +7,7 @@ import importlib.util
import importlib.machinery
import os
import os.path
-import re
+import re
import sys
from types import ModuleType
import warnings
@@ -412,7 +412,7 @@ def get_importer(path_item):
The cache (or part of it) can be cleared manually if a
rescan of sys.path_hooks is necessary.
"""
- path_item = os.fsdecode(path_item)
+ path_item = os.fsdecode(path_item)
try:
importer = sys.path_importer_cache[path_item]
except KeyError:
@@ -637,72 +637,72 @@ def get_data(package, resource):
parts.insert(0, os.path.dirname(mod.__file__))
resource_name = os.path.join(*parts)
return loader.get_data(resource_name)
-
-
-_DOTTED_WORDS = r'(?!\d)(\w+)(\.(?!\d)(\w+))*'
-_NAME_PATTERN = re.compile(f'^(?P<pkg>{_DOTTED_WORDS})(?P<cln>:(?P<obj>{_DOTTED_WORDS})?)?$', re.U)
-del _DOTTED_WORDS
-
-def resolve_name(name):
- """
- Resolve a name to an object.
-
- It is expected that `name` will be a string in one of the following
- formats, where W is shorthand for a valid Python identifier and dot stands
- for a literal period in these pseudo-regexes:
-
- W(.W)*
- W(.W)*:(W(.W)*)?
-
- The first form is intended for backward compatibility only. It assumes that
- some part of the dotted name is a package, and the rest is an object
- somewhere within that package, possibly nested inside other objects.
- Because the place where the package stops and the object hierarchy starts
- can't be inferred by inspection, repeated attempts to import must be done
- with this form.
-
- In the second form, the caller makes the division point clear through the
- provision of a single colon: the dotted name to the left of the colon is a
- package to be imported, and the dotted name to the right is the object
- hierarchy within that package. Only one import is needed in this form. If
- it ends with the colon, then a module object is returned.
-
- The function will return an object (which might be a module), or raise one
- of the following exceptions:
-
- ValueError - if `name` isn't in a recognised format
- ImportError - if an import failed when it shouldn't have
- AttributeError - if a failure occurred when traversing the object hierarchy
- within the imported package to get to the desired object.
- """
- m = _NAME_PATTERN.match(name)
- if not m:
- raise ValueError(f'invalid format: {name!r}')
- gd = m.groupdict()
- if gd.get('cln'):
- # there is a colon - a one-step import is all that's needed
- mod = importlib.import_module(gd['pkg'])
- parts = gd.get('obj')
- parts = parts.split('.') if parts else []
- else:
- # no colon - have to iterate to find the package boundary
- parts = name.split('.')
- modname = parts.pop(0)
- # first part *must* be a module/package.
- mod = importlib.import_module(modname)
- while parts:
- p = parts[0]
- s = f'{modname}.{p}'
- try:
- mod = importlib.import_module(s)
- parts.pop(0)
- modname = s
- except ImportError:
- break
- # if we reach this point, mod is the module, already imported, and
- # parts is the list of parts in the object hierarchy to be traversed, or
- # an empty list if just the module is wanted.
- result = mod
- for p in parts:
- result = getattr(result, p)
- return result
+
+
+_DOTTED_WORDS = r'(?!\d)(\w+)(\.(?!\d)(\w+))*'
+_NAME_PATTERN = re.compile(f'^(?P<pkg>{_DOTTED_WORDS})(?P<cln>:(?P<obj>{_DOTTED_WORDS})?)?$', re.U)
+del _DOTTED_WORDS
+
+def resolve_name(name):
+ """
+ Resolve a name to an object.
+
+ It is expected that `name` will be a string in one of the following
+ formats, where W is shorthand for a valid Python identifier and dot stands
+ for a literal period in these pseudo-regexes:
+
+ W(.W)*
+ W(.W)*:(W(.W)*)?
+
+ The first form is intended for backward compatibility only. It assumes that
+ some part of the dotted name is a package, and the rest is an object
+ somewhere within that package, possibly nested inside other objects.
+ Because the place where the package stops and the object hierarchy starts
+ can't be inferred by inspection, repeated attempts to import must be done
+ with this form.
+
+ In the second form, the caller makes the division point clear through the
+ provision of a single colon: the dotted name to the left of the colon is a
+ package to be imported, and the dotted name to the right is the object
+ hierarchy within that package. Only one import is needed in this form. If
+ it ends with the colon, then a module object is returned.
+
+ The function will return an object (which might be a module), or raise one
+ of the following exceptions:
+
+ ValueError - if `name` isn't in a recognised format
+ ImportError - if an import failed when it shouldn't have
+ AttributeError - if a failure occurred when traversing the object hierarchy
+ within the imported package to get to the desired object.
+ """
+ m = _NAME_PATTERN.match(name)
+ if not m:
+ raise ValueError(f'invalid format: {name!r}')
+ gd = m.groupdict()
+ if gd.get('cln'):
+ # there is a colon - a one-step import is all that's needed
+ mod = importlib.import_module(gd['pkg'])
+ parts = gd.get('obj')
+ parts = parts.split('.') if parts else []
+ else:
+ # no colon - have to iterate to find the package boundary
+ parts = name.split('.')
+ modname = parts.pop(0)
+ # first part *must* be a module/package.
+ mod = importlib.import_module(modname)
+ while parts:
+ p = parts[0]
+ s = f'{modname}.{p}'
+ try:
+ mod = importlib.import_module(s)
+ parts.pop(0)
+ modname = s
+ except ImportError:
+ break
+ # if we reach this point, mod is the module, already imported, and
+ # parts is the list of parts in the object hierarchy to be traversed, or
+ # an empty list if just the module is wanted.
+ result = mod
+ for p in parts:
+ result = getattr(result, p)
+ return result
diff --git a/contrib/tools/python3/src/Lib/platform.py b/contrib/tools/python3/src/Lib/platform.py
index d6412e169b..4eb936017c 100644
--- a/contrib/tools/python3/src/Lib/platform.py
+++ b/contrib/tools/python3/src/Lib/platform.py
@@ -72,7 +72,7 @@
# type information
# 0.4.0 - added win32_ver() and modified the platform() output for WinXX
# 0.3.4 - fixed a bug in _follow_symlinks()
-# 0.3.3 - fixed popen() and "file" command invocation bugs
+# 0.3.3 - fixed popen() and "file" command invocation bugs
# 0.3.2 - added architecture() API and support for it in platform()
# 0.3.1 - fixed syscmd_ver() RE to support Windows NT
# 0.3.0 - added system alias support
@@ -113,12 +113,12 @@ __copyright__ = """
__version__ = '1.0.8'
import collections
-import os
-import re
-import sys
-import subprocess
-import functools
-import itertools
+import os
+import re
+import sys
+import subprocess
+import functools
+import itertools
### Globals & Constants
@@ -159,7 +159,7 @@ _libc_search = re.compile(b'(__libc_init)'
b'|'
br'(libc(_\w+)?\.so(?:\.(\d[0-9.]*))?)', re.ASCII)
-def libc_ver(executable=None, lib='', version='', chunksize=16384):
+def libc_ver(executable=None, lib='', version='', chunksize=16384):
""" Tries to determine the libc version that the file executable
(which defaults to the Python interpreter) is linked against.
@@ -174,19 +174,19 @@ def libc_ver(executable=None, lib='', version='', chunksize=16384):
The file is read and scanned in chunks of chunksize bytes.
"""
- if executable is None:
- try:
- ver = os.confstr('CS_GNU_LIBC_VERSION')
- # parse 'glibc 2.28' as ('glibc', '2.28')
- parts = ver.split(maxsplit=1)
- if len(parts) == 2:
- return tuple(parts)
- except (AttributeError, ValueError, OSError):
- # os.confstr() or CS_GNU_LIBC_VERSION value not available
- pass
-
- executable = sys.executable
-
+ if executable is None:
+ try:
+ ver = os.confstr('CS_GNU_LIBC_VERSION')
+ # parse 'glibc 2.28' as ('glibc', '2.28')
+ parts = ver.split(maxsplit=1)
+ if len(parts) == 2:
+ return tuple(parts)
+ except (AttributeError, ValueError, OSError):
+ # os.confstr() or CS_GNU_LIBC_VERSION value not available
+ pass
+
+ executable = sys.executable
+
V = _comparable_version
if hasattr(os.path, 'realpath'):
# Python 2.2 introduced os.path.realpath(); it is used
@@ -239,7 +239,7 @@ def _norm_version(version, build=''):
if build:
l.append(build)
try:
- strings = list(map(str, map(int, l)))
+ strings = list(map(str, map(int, l)))
except ValueError:
strings = l
version = '.'.join(strings[:3])
@@ -276,16 +276,16 @@ def _syscmd_ver(system='', release='', version='',
return system, release, version
# Try some common cmd strings
- import subprocess
+ import subprocess
for cmd in ('ver', 'command /c ver', 'cmd /c ver'):
try:
- info = subprocess.check_output(cmd,
- stdin=subprocess.DEVNULL,
- stderr=subprocess.DEVNULL,
- text=True,
- shell=True)
- except (OSError, subprocess.CalledProcessError) as why:
- #print('Command %s failed: %s' % (cmd, why))
+ info = subprocess.check_output(cmd,
+ stdin=subprocess.DEVNULL,
+ stderr=subprocess.DEVNULL,
+ text=True,
+ shell=True)
+ except (OSError, subprocess.CalledProcessError) as why:
+ #print('Command %s failed: %s' % (cmd, why))
continue
else:
break
@@ -336,27 +336,27 @@ _WIN32_SERVER_RELEASES = {
(6, None): "post2012ServerR2",
}
-def win32_is_iot():
- return win32_edition() in ('IoTUAP', 'NanoServer', 'WindowsCoreHeadless', 'IoTEdgeOS')
-
-def win32_edition():
- try:
- try:
- import winreg
- except ImportError:
- import _winreg as winreg
- except ImportError:
- pass
- else:
- try:
- cvkey = r'SOFTWARE\Microsoft\Windows NT\CurrentVersion'
- with winreg.OpenKeyEx(winreg.HKEY_LOCAL_MACHINE, cvkey) as key:
- return winreg.QueryValueEx(key, 'EditionId')[0]
- except OSError:
- pass
-
- return None
-
+def win32_is_iot():
+ return win32_edition() in ('IoTUAP', 'NanoServer', 'WindowsCoreHeadless', 'IoTEdgeOS')
+
+def win32_edition():
+ try:
+ try:
+ import winreg
+ except ImportError:
+ import _winreg as winreg
+ except ImportError:
+ pass
+ else:
+ try:
+ cvkey = r'SOFTWARE\Microsoft\Windows NT\CurrentVersion'
+ with winreg.OpenKeyEx(winreg.HKEY_LOCAL_MACHINE, cvkey) as key:
+ return winreg.QueryValueEx(key, 'EditionId')[0]
+ except OSError:
+ pass
+
+ return None
+
def win32_ver(release='', version='', csd='', ptype=''):
try:
from sys import getwindowsversion
@@ -364,20 +364,20 @@ def win32_ver(release='', version='', csd='', ptype=''):
return release, version, csd, ptype
winver = getwindowsversion()
- try:
- major, minor, build = map(int, _syscmd_ver()[2].split('.'))
- except ValueError:
- major, minor, build = winver.platform_version or winver[:3]
- version = '{0}.{1}.{2}'.format(major, minor, build)
-
- release = (_WIN32_CLIENT_RELEASES.get((major, minor)) or
- _WIN32_CLIENT_RELEASES.get((major, None)) or
+ try:
+ major, minor, build = map(int, _syscmd_ver()[2].split('.'))
+ except ValueError:
+ major, minor, build = winver.platform_version or winver[:3]
+ version = '{0}.{1}.{2}'.format(major, minor, build)
+
+ release = (_WIN32_CLIENT_RELEASES.get((major, minor)) or
+ _WIN32_CLIENT_RELEASES.get((major, None)) or
release)
# getwindowsversion() reflect the compatibility mode Python is
# running under, and so the service pack value is only going to be
# valid if the versions match.
- if winver[:2] == (major, minor):
+ if winver[:2] == (major, minor):
try:
csd = 'SP{}'.format(winver.service_pack_major)
except AttributeError:
@@ -386,24 +386,24 @@ def win32_ver(release='', version='', csd='', ptype=''):
# VER_NT_SERVER = 3
if getattr(winver, 'product_type', None) == 3:
- release = (_WIN32_SERVER_RELEASES.get((major, minor)) or
- _WIN32_SERVER_RELEASES.get((major, None)) or
+ release = (_WIN32_SERVER_RELEASES.get((major, minor)) or
+ _WIN32_SERVER_RELEASES.get((major, None)) or
release)
try:
- try:
- import winreg
- except ImportError:
- import _winreg as winreg
- except ImportError:
+ try:
+ import winreg
+ except ImportError:
+ import _winreg as winreg
+ except ImportError:
pass
- else:
- try:
- cvkey = r'SOFTWARE\Microsoft\Windows NT\CurrentVersion'
- with winreg.OpenKeyEx(winreg.HKEY_LOCAL_MACHINE, cvkey) as key:
- ptype = winreg.QueryValueEx(key, 'CurrentType')[0]
- except OSError:
- pass
+ else:
+ try:
+ cvkey = r'SOFTWARE\Microsoft\Windows NT\CurrentVersion'
+ with winreg.OpenKeyEx(winreg.HKEY_LOCAL_MACHINE, cvkey) as key:
+ ptype = winreg.QueryValueEx(key, 'CurrentType')[0]
+ except OSError:
+ pass
return release, version, csd, ptype
@@ -432,7 +432,7 @@ def _mac_ver_xml():
def mac_ver(release='', versioninfo=('', '', ''), machine=''):
- """ Get macOS version information and return it as tuple (release,
+ """ Get macOS version information and return it as tuple (release,
versioninfo, machine) with versioninfo being a tuple (version,
dev_stage, non_release_version).
@@ -504,7 +504,7 @@ def system_alias(system, release, version):
where it would otherwise cause confusion.
"""
- if system == 'SunOS':
+ if system == 'SunOS':
# Sun's OS
if release < '5':
# These releases use the old name SunOS
@@ -540,9 +540,9 @@ def system_alias(system, release, version):
# In case one of the other tricks
system = 'Windows'
- # bpo-35516: Don't replace Darwin with macOS since input release and
- # version arguments can be different than the currently running version.
-
+ # bpo-35516: Don't replace Darwin with macOS since input release and
+ # version arguments can be different than the currently running version.
+
return system, release, version
### Various internal helpers
@@ -618,24 +618,24 @@ def _syscmd_file(target, default=''):
if sys.platform in ('dos', 'win32', 'win16'):
# XXX Others too ?
return default
-
- import subprocess
+
+ import subprocess
target = _follow_symlinks(target)
- # "file" output is locale dependent: force the usage of the C locale
- # to get deterministic behavior.
- env = dict(os.environ, LC_ALL='C')
+ # "file" output is locale dependent: force the usage of the C locale
+ # to get deterministic behavior.
+ env = dict(os.environ, LC_ALL='C')
try:
- # -b: do not prepend filenames to output lines (brief mode)
- output = subprocess.check_output(['file', '-b', target],
- stderr=subprocess.DEVNULL,
- env=env)
- except (OSError, subprocess.CalledProcessError):
+ # -b: do not prepend filenames to output lines (brief mode)
+ output = subprocess.check_output(['file', '-b', target],
+ stderr=subprocess.DEVNULL,
+ env=env)
+ except (OSError, subprocess.CalledProcessError):
return default
- if not output:
+ if not output:
return default
- # With the C locale, the output should be mostly ASCII-compatible.
- # Decode from Latin-1 to prevent Unicode decode error.
- return output.decode('latin-1')
+ # With the C locale, the output should be mostly ASCII-compatible.
+ # Decode from Latin-1 to prevent Unicode decode error.
+ return output.decode('latin-1')
### Information about the used architecture
@@ -672,8 +672,8 @@ def architecture(executable=sys.executable, bits='', linkage=''):
# else is given as default.
if not bits:
import struct
- size = struct.calcsize('P')
- bits = str(size * 8) + 'bit'
+ size = struct.calcsize('P')
+ bits = str(size * 8) + 'bit'
# Get data from the 'file' system command
if executable:
@@ -693,7 +693,7 @@ def architecture(executable=sys.executable, bits='', linkage=''):
linkage = l
return bits, linkage
- if 'executable' not in fileout and 'shared object' not in fileout:
+ if 'executable' not in fileout and 'shared object' not in fileout:
# Format not supported
return bits, linkage
@@ -725,103 +725,103 @@ def architecture(executable=sys.executable, bits='', linkage=''):
return bits, linkage
-
-def _get_machine_win32():
- # Try to use the PROCESSOR_* environment variables
- # available on Win XP and later; see
- # http://support.microsoft.com/kb/888731 and
- # http://www.geocities.com/rick_lively/MANUALS/ENV/MSWIN/PROCESSI.HTM
-
- # WOW64 processes mask the native architecture
- return (
- os.environ.get('PROCESSOR_ARCHITEW6432', '') or
- os.environ.get('PROCESSOR_ARCHITECTURE', '')
- )
-
-
-class _Processor:
- @classmethod
- def get(cls):
- func = getattr(cls, f'get_{sys.platform}', cls.from_subprocess)
- return func() or ''
-
- def get_win32():
- return os.environ.get('PROCESSOR_IDENTIFIER', _get_machine_win32())
-
- def get_OpenVMS():
- try:
- import vms_lib
- except ImportError:
- pass
- else:
- csid, cpu_number = vms_lib.getsyi('SYI$_CPU', 0)
- return 'Alpha' if cpu_number >= 128 else 'VAX'
-
- def from_subprocess():
- """
- Fall back to `uname -p`
- """
- try:
- return subprocess.check_output(
- ['uname', '-p'],
- stderr=subprocess.DEVNULL,
- text=True,
- ).strip()
- except (OSError, subprocess.CalledProcessError):
- pass
-
-
-def _unknown_as_blank(val):
- return '' if val == 'unknown' else val
-
-
+
+def _get_machine_win32():
+ # Try to use the PROCESSOR_* environment variables
+ # available on Win XP and later; see
+ # http://support.microsoft.com/kb/888731 and
+ # http://www.geocities.com/rick_lively/MANUALS/ENV/MSWIN/PROCESSI.HTM
+
+ # WOW64 processes mask the native architecture
+ return (
+ os.environ.get('PROCESSOR_ARCHITEW6432', '') or
+ os.environ.get('PROCESSOR_ARCHITECTURE', '')
+ )
+
+
+class _Processor:
+ @classmethod
+ def get(cls):
+ func = getattr(cls, f'get_{sys.platform}', cls.from_subprocess)
+ return func() or ''
+
+ def get_win32():
+ return os.environ.get('PROCESSOR_IDENTIFIER', _get_machine_win32())
+
+ def get_OpenVMS():
+ try:
+ import vms_lib
+ except ImportError:
+ pass
+ else:
+ csid, cpu_number = vms_lib.getsyi('SYI$_CPU', 0)
+ return 'Alpha' if cpu_number >= 128 else 'VAX'
+
+ def from_subprocess():
+ """
+ Fall back to `uname -p`
+ """
+ try:
+ return subprocess.check_output(
+ ['uname', '-p'],
+ stderr=subprocess.DEVNULL,
+ text=True,
+ ).strip()
+ except (OSError, subprocess.CalledProcessError):
+ pass
+
+
+def _unknown_as_blank(val):
+ return '' if val == 'unknown' else val
+
+
### Portable uname() interface
-class uname_result(
- collections.namedtuple(
- "uname_result_base",
- "system node release version machine")
- ):
- """
- A uname_result that's largely compatible with a
- simple namedtuple except that 'processor' is
- resolved late and cached to avoid calling "uname"
- except when needed.
- """
-
- @functools.cached_property
- def processor(self):
- return _unknown_as_blank(_Processor.get())
-
- def __iter__(self):
- return itertools.chain(
- super().__iter__(),
- (self.processor,)
- )
-
- @classmethod
- def _make(cls, iterable):
- # override factory to affect length check
- num_fields = len(cls._fields)
- result = cls.__new__(cls, *iterable)
- if len(result) != num_fields + 1:
- msg = f'Expected {num_fields} arguments, got {len(result)}'
- raise TypeError(msg)
- return result
-
- def __getitem__(self, key):
- return tuple(self)[key]
-
- def __len__(self):
- return len(tuple(iter(self)))
-
- def __reduce__(self):
- return uname_result, tuple(self)[:len(self._fields)]
-
-
+class uname_result(
+ collections.namedtuple(
+ "uname_result_base",
+ "system node release version machine")
+ ):
+ """
+ A uname_result that's largely compatible with a
+ simple namedtuple except that 'processor' is
+ resolved late and cached to avoid calling "uname"
+ except when needed.
+ """
+
+ @functools.cached_property
+ def processor(self):
+ return _unknown_as_blank(_Processor.get())
+
+ def __iter__(self):
+ return itertools.chain(
+ super().__iter__(),
+ (self.processor,)
+ )
+
+ @classmethod
+ def _make(cls, iterable):
+ # override factory to affect length check
+ num_fields = len(cls._fields)
+ result = cls.__new__(cls, *iterable)
+ if len(result) != num_fields + 1:
+ msg = f'Expected {num_fields} arguments, got {len(result)}'
+ raise TypeError(msg)
+ return result
+
+ def __getitem__(self, key):
+ return tuple(self)[key]
+
+ def __len__(self):
+ return len(tuple(iter(self)))
+
+ def __reduce__(self):
+ return uname_result, tuple(self)[:len(self._fields)]
+
+
_uname_cache = None
-
+
def uname():
""" Fairly portable uname interface. Returns a tuple
@@ -841,24 +841,24 @@ def uname():
# Get some infos from the builtin os.uname API...
try:
- system, node, release, version, machine = infos = os.uname()
+ system, node, release, version, machine = infos = os.uname()
except AttributeError:
- system = sys.platform
- node = _node()
- release = version = machine = ''
- infos = ()
+ system = sys.platform
+ node = _node()
+ release = version = machine = ''
+ infos = ()
- if not any(infos):
- # uname is not available
+ if not any(infos):
+ # uname is not available
# Try win32_ver() on win32 platforms
if system == 'win32':
release, version, csd, ptype = win32_ver()
- machine = machine or _get_machine_win32()
+ machine = machine or _get_machine_win32()
# Try the 'ver' system command available on some
# platforms
- if not (release and version):
+ if not (release and version):
system, release, version = _syscmd_ver(system)
# Normalize system to what win32_ver() normally returns
# (_syscmd_ver() tends to return the vendor name as well)
@@ -904,9 +904,9 @@ def uname():
system = 'Windows'
release = 'Vista'
- vals = system, node, release, version, machine
- # Replace 'unknown' values with the more portable ''
- _uname_cache = uname_result(*map(_unknown_as_blank, vals))
+ vals = system, node, release, version, machine
+ # Replace 'unknown' values with the more portable ''
+ _uname_cache = uname_result(*map(_unknown_as_blank, vals))
return _uname_cache
### Direct interfaces to some of the uname() return values
@@ -1215,13 +1215,13 @@ def platform(aliased=0, terse=0):
if aliased:
system, release, version = system_alias(system, release, version)
- if system == 'Darwin':
- # macOS (darwin kernel)
- macos_release = mac_ver()[0]
- if macos_release:
- system = 'macOS'
- release = macos_release
-
+ if system == 'Darwin':
+ # macOS (darwin kernel)
+ macos_release = mac_ver()[0]
+ if macos_release:
+ system = 'macOS'
+ release = macos_release
+
if system == 'Windows':
# MS platforms
rel, vers, csd, ptype = win32_ver(version)
@@ -1231,11 +1231,11 @@ def platform(aliased=0, terse=0):
platform = _platform(system, release, version, csd)
elif system in ('Linux',):
- # check for libc vs. glibc
- libcname, libcversion = libc_ver()
- platform = _platform(system, release, machine, processor,
- 'with',
- libcname+libcversion)
+ # check for libc vs. glibc
+ libcname, libcversion = libc_ver()
+ platform = _platform(system, release, machine, processor,
+ 'with',
+ libcname+libcversion)
elif system == 'Java':
# Java platforms
r, v, vminfo, (os_name, os_version, os_arch) = java_ver()
diff --git a/contrib/tools/python3/src/Lib/plistlib.py b/contrib/tools/python3/src/Lib/plistlib.py
index 2eeebe4c9a..21180eea4a 100644
--- a/contrib/tools/python3/src/Lib/plistlib.py
+++ b/contrib/tools/python3/src/Lib/plistlib.py
@@ -46,7 +46,7 @@ Parse Plist example:
print(pl["aKey"])
"""
__all__ = [
- "InvalidFileException", "FMT_XML", "FMT_BINARY", "load", "dump", "loads", "dumps", "UID"
+ "InvalidFileException", "FMT_XML", "FMT_BINARY", "load", "dump", "loads", "dumps", "UID"
]
import binascii
@@ -65,33 +65,33 @@ PlistFormat = enum.Enum('PlistFormat', 'FMT_XML FMT_BINARY', module=__name__)
globals().update(PlistFormat.__members__)
-class UID:
- def __init__(self, data):
- if not isinstance(data, int):
- raise TypeError("data must be an int")
- if data >= 1 << 64:
- raise ValueError("UIDs cannot be >= 2**64")
- if data < 0:
- raise ValueError("UIDs must be positive")
- self.data = data
-
- def __index__(self):
- return self.data
-
- def __repr__(self):
- return "%s(%s)" % (self.__class__.__name__, repr(self.data))
-
- def __reduce__(self):
- return self.__class__, (self.data,)
-
- def __eq__(self, other):
- if not isinstance(other, UID):
- return NotImplemented
- return self.data == other.data
-
- def __hash__(self):
- return hash(self.data)
-
+class UID:
+ def __init__(self, data):
+ if not isinstance(data, int):
+ raise TypeError("data must be an int")
+ if data >= 1 << 64:
+ raise ValueError("UIDs cannot be >= 2**64")
+ if data < 0:
+ raise ValueError("UIDs must be positive")
+ self.data = data
+
+ def __index__(self):
+ return self.data
+
+ def __repr__(self):
+ return "%s(%s)" % (self.__class__.__name__, repr(self.data))
+
+ def __reduce__(self):
+ return self.__class__, (self.data,)
+
+ def __eq__(self, other):
+ if not isinstance(other, UID):
+ return NotImplemented
+ return self.data == other.data
+
+ def __hash__(self):
+ return hash(self.data)
+
#
# XML support
#
@@ -162,7 +162,7 @@ def _escape(text):
return text
class _PlistParser:
- def __init__(self, dict_type):
+ def __init__(self, dict_type):
self.stack = []
self.current_key = None
self.root = None
@@ -173,16 +173,16 @@ class _PlistParser:
self.parser.StartElementHandler = self.handle_begin_element
self.parser.EndElementHandler = self.handle_end_element
self.parser.CharacterDataHandler = self.handle_data
- self.parser.EntityDeclHandler = self.handle_entity_decl
+ self.parser.EntityDeclHandler = self.handle_entity_decl
self.parser.ParseFile(fileobj)
return self.root
- def handle_entity_decl(self, entity_name, is_parameter_entity, value, base, system_id, public_id, notation_name):
- # Reject plist files with entity declarations to avoid XML vulnerabilies in expat.
- # Regular plist files don't contain those declerations, and Apple's plutil tool does not
- # accept them either.
- raise InvalidFileException("XML entity declarations are not supported in plist files")
-
+ def handle_entity_decl(self, entity_name, is_parameter_entity, value, base, system_id, public_id, notation_name):
+ # Reject plist files with entity declarations to avoid XML vulnerabilies in expat.
+ # Regular plist files don't contain those declerations, and Apple's plutil tool does not
+ # accept them either.
+ raise InvalidFileException("XML entity declarations are not supported in plist files")
+
def handle_begin_element(self, element, attrs):
self.data = []
handler = getattr(self, "begin_" + element, None)
@@ -252,11 +252,11 @@ class _PlistParser:
self.add_object(False)
def end_integer(self):
- raw = self.get_data()
- if raw.startswith('0x') or raw.startswith('0X'):
- self.add_object(int(raw, 16))
- else:
- self.add_object(int(raw))
+ raw = self.get_data()
+ if raw.startswith('0x') or raw.startswith('0X'):
+ self.add_object(int(raw, 16))
+ else:
+ self.add_object(int(raw))
def end_real(self):
self.add_object(float(self.get_data()))
@@ -265,7 +265,7 @@ class _PlistParser:
self.add_object(self.get_data())
def end_data(self):
- self.add_object(_decode_base64(self.get_data()))
+ self.add_object(_decode_base64(self.get_data()))
def end_date(self):
self.add_object(_date_from_string(self.get_data()))
@@ -452,7 +452,7 @@ class _BinaryPlistParser:
see also: http://opensource.apple.com/source/CF/CF-744.18/CFBinaryPList.c
"""
- def __init__(self, dict_type):
+ def __init__(self, dict_type):
self._dict_type = dict_type
def parse(self, fp):
@@ -477,7 +477,7 @@ class _BinaryPlistParser:
return self._read_object(top_object)
except (OSError, IndexError, struct.error, OverflowError,
- ValueError):
+ ValueError):
raise InvalidFileException()
def _get_size(self, tokenL):
@@ -493,7 +493,7 @@ class _BinaryPlistParser:
def _read_ints(self, n, size):
data = self._fp.read(size * n)
if size in _BINARY_FORMAT:
- return struct.unpack(f'>{n}{_BINARY_FORMAT[size]}', data)
+ return struct.unpack(f'>{n}{_BINARY_FORMAT[size]}', data)
else:
if not size or len(data) != size * n:
raise InvalidFileException()
@@ -552,27 +552,27 @@ class _BinaryPlistParser:
elif tokenH == 0x40: # data
s = self._get_size(tokenL)
- result = self._fp.read(s)
- if len(result) != s:
- raise InvalidFileException()
+ result = self._fp.read(s)
+ if len(result) != s:
+ raise InvalidFileException()
elif tokenH == 0x50: # ascii string
s = self._get_size(tokenL)
- data = self._fp.read(s)
- if len(data) != s:
- raise InvalidFileException()
- result = data.decode('ascii')
+ data = self._fp.read(s)
+ if len(data) != s:
+ raise InvalidFileException()
+ result = data.decode('ascii')
elif tokenH == 0x60: # unicode string
- s = self._get_size(tokenL) * 2
- data = self._fp.read(s)
- if len(data) != s:
- raise InvalidFileException()
- result = data.decode('utf-16be')
+ s = self._get_size(tokenL) * 2
+ data = self._fp.read(s)
+ if len(data) != s:
+ raise InvalidFileException()
+ result = data.decode('utf-16be')
- elif tokenH == 0x80: # UID
- # used by Key-Archiver plist files
- result = UID(int.from_bytes(self._fp.read(1 + tokenL), 'big'))
+ elif tokenH == 0x80: # UID
+ # used by Key-Archiver plist files
+ result = UID(int.from_bytes(self._fp.read(1 + tokenL), 'big'))
elif tokenH == 0xA0: # array
s = self._get_size(tokenL)
@@ -593,11 +593,11 @@ class _BinaryPlistParser:
obj_refs = self._read_refs(s)
result = self._dict_type()
self._objects[ref] = result
- try:
- for k, o in zip(key_refs, obj_refs):
- result[self._read_object(k)] = self._read_object(o)
- except TypeError:
- raise InvalidFileException()
+ try:
+ for k, o in zip(key_refs, obj_refs):
+ result[self._read_object(k)] = self._read_object(o)
+ except TypeError:
+ raise InvalidFileException()
else:
raise InvalidFileException()
@@ -611,7 +611,7 @@ def _count_to_size(count):
elif count < 1 << 16:
return 2
- elif count < 1 << 32:
+ elif count < 1 << 32:
return 4
else:
@@ -786,20 +786,20 @@ class _BinaryPlistWriter (object):
self._fp.write(t)
- elif isinstance(value, UID):
- if value.data < 0:
- raise ValueError("UIDs must be positive")
- elif value.data < 1 << 8:
- self._fp.write(struct.pack('>BB', 0x80, value))
- elif value.data < 1 << 16:
- self._fp.write(struct.pack('>BH', 0x81, value))
- elif value.data < 1 << 32:
- self._fp.write(struct.pack('>BL', 0x83, value))
- elif value.data < 1 << 64:
- self._fp.write(struct.pack('>BQ', 0x87, value))
- else:
- raise OverflowError(value)
-
+ elif isinstance(value, UID):
+ if value.data < 0:
+ raise ValueError("UIDs must be positive")
+ elif value.data < 1 << 8:
+ self._fp.write(struct.pack('>BB', 0x80, value))
+ elif value.data < 1 << 16:
+ self._fp.write(struct.pack('>BH', 0x81, value))
+ elif value.data < 1 << 32:
+ self._fp.write(struct.pack('>BL', 0x83, value))
+ elif value.data < 1 << 64:
+ self._fp.write(struct.pack('>BQ', 0x87, value))
+ else:
+ raise OverflowError(value)
+
elif isinstance(value, (list, tuple)):
refs = [self._getrefnum(o) for o in value]
s = len(refs)
@@ -853,8 +853,8 @@ _FORMATS={
}
-def load(fp, *, fmt=None, dict_type=dict):
- """Read a .plist file. 'fp' should be a readable and binary file object.
+def load(fp, *, fmt=None, dict_type=dict):
+ """Read a .plist file. 'fp' should be a readable and binary file object.
Return the unpacked root object (which usually is a dictionary).
"""
if fmt is None:
@@ -871,21 +871,21 @@ def load(fp, *, fmt=None, dict_type=dict):
else:
P = _FORMATS[fmt]['parser']
- p = P(dict_type=dict_type)
+ p = P(dict_type=dict_type)
return p.parse(fp)
-def loads(value, *, fmt=None, dict_type=dict):
+def loads(value, *, fmt=None, dict_type=dict):
"""Read a .plist file from a bytes object.
Return the unpacked root object (which usually is a dictionary).
"""
fp = BytesIO(value)
- return load(fp, fmt=fmt, dict_type=dict_type)
+ return load(fp, fmt=fmt, dict_type=dict_type)
def dump(value, fp, *, fmt=FMT_XML, sort_keys=True, skipkeys=False):
- """Write 'value' to a .plist file. 'fp' should be a writable,
- binary file object.
+ """Write 'value' to a .plist file. 'fp' should be a writable,
+ binary file object.
"""
if fmt not in _FORMATS:
raise ValueError("Unsupported format: %r"%(fmt,))
diff --git a/contrib/tools/python3/src/Lib/poplib.py b/contrib/tools/python3/src/Lib/poplib.py
index 0f8587317c..bc66e1c7e2 100644
--- a/contrib/tools/python3/src/Lib/poplib.py
+++ b/contrib/tools/python3/src/Lib/poplib.py
@@ -16,7 +16,7 @@ Based on the J. Myers POP3 draft, Jan. 96
import errno
import re
import socket
-import sys
+import sys
try:
import ssl
@@ -100,20 +100,20 @@ class POP3:
self.host = host
self.port = port
self._tls_established = False
- sys.audit("poplib.connect", self, host, port)
+ sys.audit("poplib.connect", self, host, port)
self.sock = self._create_socket(timeout)
self.file = self.sock.makefile('rb')
self._debugging = 0
self.welcome = self._getresp()
def _create_socket(self, timeout):
- if timeout is not None and not timeout:
- raise ValueError('Non-blocking socket (timeout=0) is not supported')
+ if timeout is not None and not timeout:
+ raise ValueError('Non-blocking socket (timeout=0) is not supported')
return socket.create_connection((self.host, self.port), timeout)
def _putline(self, line):
if self._debugging > 1: print('*put*', repr(line))
- sys.audit("poplib.putline", self, line)
+ sys.audit("poplib.putline", self, line)
self.sock.sendall(line + CRLF)
@@ -387,7 +387,7 @@ class POP3:
for capline in rawcaps:
capnm, capargs = _parsecap(capline)
caps[capnm] = capargs
- except error_proto:
+ except error_proto:
raise error_proto('-ERR CAPA not supported by server')
return caps
diff --git a/contrib/tools/python3/src/Lib/posixpath.py b/contrib/tools/python3/src/Lib/posixpath.py
index af2814bdb0..632fe004d4 100644
--- a/contrib/tools/python3/src/Lib/posixpath.py
+++ b/contrib/tools/python3/src/Lib/posixpath.py
@@ -2,9 +2,9 @@
Instead of importing this module directly, import os and refer to
this module as os.path. The "os.path" name is an alias for this
-module on Posix systems; on other systems (e.g. Windows),
+module on Posix systems; on other systems (e.g. Windows),
os.path provides the same operations in a manner specific to that
-platform, and is an alias to another module (e.g. ntpath).
+platform, and is an alias to another module (e.g. ntpath).
Some of this can actually be useful on non-Posix systems too, e.g.
for manipulation of the pathname component of URLs.
@@ -18,7 +18,7 @@ pardir = '..'
extsep = '.'
sep = '/'
pathsep = ':'
-defpath = '/bin:/usr/bin'
+defpath = '/bin:/usr/bin'
altsep = None
devnull = '/dev/null'
@@ -51,7 +51,7 @@ def _get_sep(path):
def normcase(s):
"""Normalize case of pathname. Has no effect under Posix"""
- return os.fspath(s)
+ return os.fspath(s)
# Return whether a path is absolute.
@@ -165,7 +165,7 @@ def islink(path):
"""Test whether a path is a symbolic link"""
try:
st = os.lstat(path)
- except (OSError, ValueError, AttributeError):
+ except (OSError, ValueError, AttributeError):
return False
return stat.S_ISLNK(st.st_mode)
@@ -175,7 +175,7 @@ def lexists(path):
"""Test whether a path exists. Returns True for broken symbolic links"""
try:
os.lstat(path)
- except (OSError, ValueError):
+ except (OSError, ValueError):
return False
return True
@@ -187,7 +187,7 @@ def ismount(path):
"""Test whether a path is a mount point"""
try:
s1 = os.lstat(path)
- except (OSError, ValueError):
+ except (OSError, ValueError):
# It doesn't exist -- so not a mount point. :-)
return False
else:
@@ -202,7 +202,7 @@ def ismount(path):
parent = realpath(parent)
try:
s2 = os.lstat(parent)
- except (OSError, ValueError):
+ except (OSError, ValueError):
return False
dev1 = s1.st_dev
@@ -349,7 +349,7 @@ def normpath(path):
initial_slashes = path.startswith(sep)
# POSIX allows one or two initial slashes, but treats three or more
# as single slash.
- # (see http://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap04.html#tag_04_13)
+ # (see http://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap04.html#tag_04_13)
if (initial_slashes and
path.startswith(sep*2) and not path.startswith(sep*3)):
initial_slashes = 2
diff --git a/contrib/tools/python3/src/Lib/pprint.py b/contrib/tools/python3/src/Lib/pprint.py
index 7c1118a484..acd514bc03 100644
--- a/contrib/tools/python3/src/Lib/pprint.py
+++ b/contrib/tools/python3/src/Lib/pprint.py
@@ -41,38 +41,38 @@ import types as _types
from io import StringIO as _StringIO
__all__ = ["pprint","pformat","isreadable","isrecursive","saferepr",
- "PrettyPrinter", "pp"]
+ "PrettyPrinter", "pp"]
def pprint(object, stream=None, indent=1, width=80, depth=None, *,
- compact=False, sort_dicts=True):
+ compact=False, sort_dicts=True):
"""Pretty-print a Python object to a stream [default is sys.stdout]."""
printer = PrettyPrinter(
stream=stream, indent=indent, width=width, depth=depth,
- compact=compact, sort_dicts=sort_dicts)
+ compact=compact, sort_dicts=sort_dicts)
printer.pprint(object)
-def pformat(object, indent=1, width=80, depth=None, *,
- compact=False, sort_dicts=True):
+def pformat(object, indent=1, width=80, depth=None, *,
+ compact=False, sort_dicts=True):
"""Format a Python object into a pretty-printed representation."""
return PrettyPrinter(indent=indent, width=width, depth=depth,
- compact=compact, sort_dicts=sort_dicts).pformat(object)
-
-def pp(object, *args, sort_dicts=False, **kwargs):
- """Pretty-print a Python object"""
- pprint(object, *args, sort_dicts=sort_dicts, **kwargs)
+ compact=compact, sort_dicts=sort_dicts).pformat(object)
+def pp(object, *args, sort_dicts=False, **kwargs):
+ """Pretty-print a Python object"""
+ pprint(object, *args, sort_dicts=sort_dicts, **kwargs)
+
def saferepr(object):
"""Version of repr() which can handle recursive data structures."""
- return _safe_repr(object, {}, None, 0, True)[0]
+ return _safe_repr(object, {}, None, 0, True)[0]
def isreadable(object):
"""Determine if saferepr(object) is readable by eval()."""
- return _safe_repr(object, {}, None, 0, True)[1]
+ return _safe_repr(object, {}, None, 0, True)[1]
def isrecursive(object):
"""Determine if object requires a recursive representation."""
- return _safe_repr(object, {}, None, 0, True)[2]
+ return _safe_repr(object, {}, None, 0, True)[2]
class _safe_key:
"""Helper function for key functions when sorting unorderable objects.
@@ -102,7 +102,7 @@ def _safe_tuple(t):
class PrettyPrinter:
def __init__(self, indent=1, width=80, depth=None, stream=None, *,
- compact=False, sort_dicts=True):
+ compact=False, sort_dicts=True):
"""Handle pretty printing operations onto a stream using a set of
configured parameters.
@@ -122,9 +122,9 @@ class PrettyPrinter:
compact
If true, several items will be combined in one line.
- sort_dicts
- If true, dict keys are sorted.
-
+ sort_dicts
+ If true, dict keys are sorted.
+
"""
indent = int(indent)
width = int(width)
@@ -142,7 +142,7 @@ class PrettyPrinter:
else:
self._stream = _sys.stdout
self._compact = bool(compact)
- self._sort_dicts = sort_dicts
+ self._sort_dicts = sort_dicts
def pprint(self, object):
self._format(object, self._stream, 0, 0, {}, 0)
@@ -193,10 +193,10 @@ class PrettyPrinter:
write((self._indent_per_level - 1) * ' ')
length = len(object)
if length:
- if self._sort_dicts:
- items = sorted(object.items(), key=_safe_tuple)
- else:
- items = object.items()
+ if self._sort_dicts:
+ items = sorted(object.items(), key=_safe_tuple)
+ else:
+ items = object.items()
self._format_dict_items(items, stream, indent, allowance + 1,
context, level)
write('}')
@@ -342,33 +342,33 @@ class PrettyPrinter:
_dispatch[_types.MappingProxyType.__repr__] = _pprint_mappingproxy
- def _pprint_simplenamespace(self, object, stream, indent, allowance, context, level):
- if type(object) is _types.SimpleNamespace:
- # The SimpleNamespace repr is "namespace" instead of the class
- # name, so we do the same here. For subclasses; use the class name.
- cls_name = 'namespace'
- else:
- cls_name = object.__class__.__name__
- indent += len(cls_name) + 1
- delimnl = ',\n' + ' ' * indent
- items = object.__dict__.items()
- last_index = len(items) - 1
-
- stream.write(cls_name + '(')
- for i, (key, ent) in enumerate(items):
- stream.write(key)
- stream.write('=')
-
- last = i == last_index
- self._format(ent, stream, indent + len(key) + 1,
- allowance if last else 1,
- context, level)
- if not last:
- stream.write(delimnl)
- stream.write(')')
-
- _dispatch[_types.SimpleNamespace.__repr__] = _pprint_simplenamespace
-
+ def _pprint_simplenamespace(self, object, stream, indent, allowance, context, level):
+ if type(object) is _types.SimpleNamespace:
+ # The SimpleNamespace repr is "namespace" instead of the class
+ # name, so we do the same here. For subclasses; use the class name.
+ cls_name = 'namespace'
+ else:
+ cls_name = object.__class__.__name__
+ indent += len(cls_name) + 1
+ delimnl = ',\n' + ' ' * indent
+ items = object.__dict__.items()
+ last_index = len(items) - 1
+
+ stream.write(cls_name + '(')
+ for i, (key, ent) in enumerate(items):
+ stream.write(key)
+ stream.write('=')
+
+ last = i == last_index
+ self._format(ent, stream, indent + len(key) + 1,
+ allowance if last else 1,
+ context, level)
+ if not last:
+ stream.write(delimnl)
+ stream.write(')')
+
+ _dispatch[_types.SimpleNamespace.__repr__] = _pprint_simplenamespace
+
def _format_dict_items(self, items, stream, indent, allowance, context,
level):
write = stream.write
@@ -441,7 +441,7 @@ class PrettyPrinter:
and flags indicating whether the representation is 'readable'
and whether the object represents a recursive construct.
"""
- return _safe_repr(object, context, maxlevels, level, self._sort_dicts)
+ return _safe_repr(object, context, maxlevels, level, self._sort_dicts)
def _pprint_default_dict(self, object, stream, indent, allowance, context, level):
if not len(object):
@@ -526,7 +526,7 @@ class PrettyPrinter:
# Return triple (repr_string, isreadable, isrecursive).
-def _safe_repr(object, context, maxlevels, level, sort_dicts):
+def _safe_repr(object, context, maxlevels, level, sort_dicts):
typ = type(object)
if typ in _builtin_scalars:
return repr(object), True, False
@@ -546,13 +546,13 @@ def _safe_repr(object, context, maxlevels, level, sort_dicts):
components = []
append = components.append
level += 1
- if sort_dicts:
- items = sorted(object.items(), key=_safe_tuple)
- else:
- items = object.items()
+ if sort_dicts:
+ items = sorted(object.items(), key=_safe_tuple)
+ else:
+ items = object.items()
for k, v in items:
- krepr, kreadable, krecur = _safe_repr(k, context, maxlevels, level, sort_dicts)
- vrepr, vreadable, vrecur = _safe_repr(v, context, maxlevels, level, sort_dicts)
+ krepr, kreadable, krecur = _safe_repr(k, context, maxlevels, level, sort_dicts)
+ vrepr, vreadable, vrecur = _safe_repr(v, context, maxlevels, level, sort_dicts)
append("%s: %s" % (krepr, vrepr))
readable = readable and kreadable and vreadable
if krecur or vrecur:
@@ -584,7 +584,7 @@ def _safe_repr(object, context, maxlevels, level, sort_dicts):
append = components.append
level += 1
for o in object:
- orepr, oreadable, orecur = _safe_repr(o, context, maxlevels, level, sort_dicts)
+ orepr, oreadable, orecur = _safe_repr(o, context, maxlevels, level, sort_dicts)
append(orepr)
if not oreadable:
readable = False
@@ -610,7 +610,7 @@ def _perfcheck(object=None):
object = [("string", (1, 2), [3, 4], {5: 6, 7: 8})] * 100000
p = PrettyPrinter()
t1 = time.perf_counter()
- _safe_repr(object, {}, None, 0, True)
+ _safe_repr(object, {}, None, 0, True)
t2 = time.perf_counter()
p.pformat(object)
t3 = time.perf_counter()
diff --git a/contrib/tools/python3/src/Lib/profile.py b/contrib/tools/python3/src/Lib/profile.py
index d8599fb4ee..6670400f1a 100644
--- a/contrib/tools/python3/src/Lib/profile.py
+++ b/contrib/tools/python3/src/Lib/profile.py
@@ -425,7 +425,7 @@ class Profile:
return self
# This method is more useful to profile a single function call.
- def runcall(self, func, /, *args, **kw):
+ def runcall(self, func, /, *args, **kw):
self.set_cmd(repr(func))
sys.setprofile(self.dispatcher)
try:
@@ -553,13 +553,13 @@ def main():
import os
from optparse import OptionParser
- usage = "profile.py [-o output_file_path] [-s sort] [-m module | scriptfile] [arg] ..."
+ usage = "profile.py [-o output_file_path] [-s sort] [-m module | scriptfile] [arg] ..."
parser = OptionParser(usage=usage)
parser.allow_interspersed_args = False
parser.add_option('-o', '--outfile', dest="outfile",
help="Save stats to <outfile>", default=None)
- parser.add_option('-m', dest="module", action="store_true",
- help="Profile a library module.", default=False)
+ parser.add_option('-m', dest="module", action="store_true",
+ help="Profile a library module.", default=False)
parser.add_option('-s', '--sort', dest="sort",
help="Sort order when printing to stdout, based on pstats.Stats class",
default=-1)
@@ -571,36 +571,36 @@ def main():
(options, args) = parser.parse_args()
sys.argv[:] = args
- # The script that we're profiling may chdir, so capture the absolute path
- # to the output file at startup.
- if options.outfile is not None:
- options.outfile = os.path.abspath(options.outfile)
-
+ # The script that we're profiling may chdir, so capture the absolute path
+ # to the output file at startup.
+ if options.outfile is not None:
+ options.outfile = os.path.abspath(options.outfile)
+
if len(args) > 0:
- if options.module:
- import runpy
- code = "run_module(modname, run_name='__main__')"
- globs = {
- 'run_module': runpy.run_module,
- 'modname': args[0]
- }
- else:
- progname = args[0]
- sys.path.insert(0, os.path.dirname(progname))
- with open(progname, 'rb') as fp:
- code = compile(fp.read(), progname, 'exec')
- globs = {
- '__file__': progname,
- '__name__': '__main__',
- '__package__': None,
- '__cached__': None,
- }
- try:
- runctx(code, globs, None, options.outfile, options.sort)
- except BrokenPipeError as exc:
- # Prevent "Exception ignored" during interpreter shutdown.
- sys.stdout = None
- sys.exit(exc.errno)
+ if options.module:
+ import runpy
+ code = "run_module(modname, run_name='__main__')"
+ globs = {
+ 'run_module': runpy.run_module,
+ 'modname': args[0]
+ }
+ else:
+ progname = args[0]
+ sys.path.insert(0, os.path.dirname(progname))
+ with open(progname, 'rb') as fp:
+ code = compile(fp.read(), progname, 'exec')
+ globs = {
+ '__file__': progname,
+ '__name__': '__main__',
+ '__package__': None,
+ '__cached__': None,
+ }
+ try:
+ runctx(code, globs, None, options.outfile, options.sort)
+ except BrokenPipeError as exc:
+ # Prevent "Exception ignored" during interpreter shutdown.
+ sys.stdout = None
+ sys.exit(exc.errno)
else:
parser.print_usage()
return parser
diff --git a/contrib/tools/python3/src/Lib/pstats.py b/contrib/tools/python3/src/Lib/pstats.py
index 0f93ae02c9..5a42507732 100644
--- a/contrib/tools/python3/src/Lib/pstats.py
+++ b/contrib/tools/python3/src/Lib/pstats.py
@@ -25,13 +25,13 @@ import os
import time
import marshal
import re
-
+
from enum import Enum
from functools import cmp_to_key
-from dataclasses import dataclass
-from typing import Dict
+from dataclasses import dataclass
+from typing import Dict
-__all__ = ["Stats", "SortKey", "FunctionProfile", "StatsProfile"]
+__all__ = ["Stats", "SortKey", "FunctionProfile", "StatsProfile"]
class SortKey(str, Enum):
CALLS = 'calls', 'ncalls'
@@ -45,31 +45,31 @@ class SortKey(str, Enum):
TIME = 'time', 'tottime'
def __new__(cls, *values):
- value = values[0]
- obj = str.__new__(cls, value)
- obj._value_ = value
+ value = values[0]
+ obj = str.__new__(cls, value)
+ obj._value_ = value
for other_value in values[1:]:
cls._value2member_map_[other_value] = obj
obj._all_values = values
return obj
-@dataclass(unsafe_hash=True)
-class FunctionProfile:
- ncalls: int
- tottime: float
- percall_tottime: float
- cumtime: float
- percall_cumtime: float
- file_name: str
- line_number: int
-
-@dataclass(unsafe_hash=True)
-class StatsProfile:
- '''Class for keeping track of an item in inventory.'''
- total_tt: float
- func_profiles: Dict[str, FunctionProfile]
-
+@dataclass(unsafe_hash=True)
+class FunctionProfile:
+ ncalls: int
+ tottime: float
+ percall_tottime: float
+ cumtime: float
+ percall_cumtime: float
+ file_name: str
+ line_number: int
+
+@dataclass(unsafe_hash=True)
+class StatsProfile:
+ '''Class for keeping track of an item in inventory.'''
+ total_tt: float
+ func_profiles: Dict[str, FunctionProfile]
+
class Stats:
"""This class is used for creating reports from data generated by the
Profile class. It is a "friend" of that class, and imports data either
@@ -351,41 +351,41 @@ class Stats:
return new_list, msg
- def get_stats_profile(self):
- """This method returns an instance of StatsProfile, which contains a mapping
- of function names to instances of FunctionProfile. Each FunctionProfile
- instance holds information related to the function's profile such as how
- long the function took to run, how many times it was called, etc...
- """
- func_list = self.fcn_list[:] if self.fcn_list else list(self.stats.keys())
- if not func_list:
- return StatsProfile(0, {})
-
- total_tt = float(f8(self.total_tt))
- func_profiles = {}
- stats_profile = StatsProfile(total_tt, func_profiles)
-
- for func in func_list:
- cc, nc, tt, ct, callers = self.stats[func]
- file_name, line_number, func_name = func
- ncalls = str(nc) if nc == cc else (str(nc) + '/' + str(cc))
- tottime = float(f8(tt))
- percall_tottime = -1 if nc == 0 else float(f8(tt/nc))
- cumtime = float(f8(ct))
- percall_cumtime = -1 if cc == 0 else float(f8(ct/cc))
- func_profile = FunctionProfile(
- ncalls,
- tottime, # time spent in this function alone
- percall_tottime,
- cumtime, # time spent in the function plus all functions that this function called,
- percall_cumtime,
- file_name,
- line_number
- )
- func_profiles[func_name] = func_profile
-
- return stats_profile
-
+ def get_stats_profile(self):
+ """This method returns an instance of StatsProfile, which contains a mapping
+ of function names to instances of FunctionProfile. Each FunctionProfile
+ instance holds information related to the function's profile such as how
+ long the function took to run, how many times it was called, etc...
+ """
+ func_list = self.fcn_list[:] if self.fcn_list else list(self.stats.keys())
+ if not func_list:
+ return StatsProfile(0, {})
+
+ total_tt = float(f8(self.total_tt))
+ func_profiles = {}
+ stats_profile = StatsProfile(total_tt, func_profiles)
+
+ for func in func_list:
+ cc, nc, tt, ct, callers = self.stats[func]
+ file_name, line_number, func_name = func
+ ncalls = str(nc) if nc == cc else (str(nc) + '/' + str(cc))
+ tottime = float(f8(tt))
+ percall_tottime = -1 if nc == 0 else float(f8(tt/nc))
+ cumtime = float(f8(ct))
+ percall_cumtime = -1 if cc == 0 else float(f8(ct/cc))
+ func_profile = FunctionProfile(
+ ncalls,
+ tottime, # time spent in this function alone
+ percall_tottime,
+ cumtime, # time spent in the function plus all functions that this function called,
+ percall_cumtime,
+ file_name,
+ line_number
+ )
+ func_profiles[func_name] = func_profile
+
+ return stats_profile
+
def get_print_list(self, sel_list):
width = self.max_name_len
if self.fcn_list:
@@ -562,7 +562,7 @@ def func_std_string(func_name): # match what old profile produced
return "%s:%d(%s)" % func_name
#**************************************************************************
-# The following functions combine statistics for pairs functions.
+# The following functions combine statistics for pairs functions.
# The bulk of the processing involves correctly handling "call" lists,
# such as callers and callees.
#**************************************************************************
@@ -685,12 +685,12 @@ if __name__ == '__main__':
print("", file=self.stream)
return 1
def help_EOF(self):
- print("Leave the profile browser.", file=self.stream)
+ print("Leave the profile browser.", file=self.stream)
def do_quit(self, line):
return 1
def help_quit(self):
- print("Leave the profile browser.", file=self.stream)
+ print("Leave the profile browser.", file=self.stream)
def do_read(self, line):
if line:
diff --git a/contrib/tools/python3/src/Lib/pty.py b/contrib/tools/python3/src/Lib/pty.py
index a32432041f..d6bc65b09e 100644
--- a/contrib/tools/python3/src/Lib/pty.py
+++ b/contrib/tools/python3/src/Lib/pty.py
@@ -8,7 +8,7 @@
from select import select
import os
-import sys
+import sys
import tty
__all__ = ["openpty","fork","spawn"]
@@ -152,7 +152,7 @@ def spawn(argv, master_read=_read, stdin_read=_read):
"""Create a spawned process."""
if type(argv) == type(''):
argv = (argv,)
- sys.audit('pty.spawn', argv)
+ sys.audit('pty.spawn', argv)
pid, master_fd = fork()
if pid == CHILD:
os.execlp(argv[0], *argv)
diff --git a/contrib/tools/python3/src/Lib/py_compile.py b/contrib/tools/python3/src/Lib/py_compile.py
index a81f493731..c5caa06f26 100644
--- a/contrib/tools/python3/src/Lib/py_compile.py
+++ b/contrib/tools/python3/src/Lib/py_compile.py
@@ -77,7 +77,7 @@ def _get_default_invalidation_mode():
def compile(file, cfile=None, dfile=None, doraise=False, optimize=-1,
- invalidation_mode=None, quiet=0):
+ invalidation_mode=None, quiet=0):
"""Byte-compile one Python source file to Python bytecode.
:param file: The source file name.
@@ -95,8 +95,8 @@ def compile(file, cfile=None, dfile=None, doraise=False, optimize=-1,
are -1, 0, 1 and 2. A value of -1 means to use the optimization
level of the current interpreter, as given by -O command line options.
:param invalidation_mode:
- :param quiet: Return full output with False or 0, errors only with 1,
- and no output with 2.
+ :param quiet: Return full output with False or 0, errors only with 1,
+ and no output with 2.
:return: Path to the resulting byte compiled file.
@@ -145,12 +145,12 @@ def compile(file, cfile=None, dfile=None, doraise=False, optimize=-1,
_optimize=optimize)
except Exception as err:
py_exc = PyCompileError(err.__class__, err, dfile or file)
- if quiet < 2:
- if doraise:
- raise py_exc
- else:
- sys.stderr.write(py_exc.msg + '\n')
- return
+ if quiet < 2:
+ if doraise:
+ raise py_exc
+ else:
+ sys.stderr.write(py_exc.msg + '\n')
+ return
try:
dirname = os.path.dirname(cfile)
if dirname:
@@ -197,10 +197,10 @@ def main(args=None):
compile(filename, doraise=True)
except PyCompileError as error:
rv = 1
- sys.stderr.write("%s\n" % error.msg)
+ sys.stderr.write("%s\n" % error.msg)
except OSError as error:
rv = 1
- sys.stderr.write("%s\n" % error)
+ sys.stderr.write("%s\n" % error)
else:
for filename in args:
try:
@@ -208,7 +208,7 @@ def main(args=None):
except PyCompileError as error:
# return value to indicate at least one failure
rv = 1
- sys.stderr.write("%s\n" % error.msg)
+ sys.stderr.write("%s\n" % error.msg)
return rv
if __name__ == "__main__":
diff --git a/contrib/tools/python3/src/Lib/pyclbr.py b/contrib/tools/python3/src/Lib/pyclbr.py
index 99a17343fb..2df08ef330 100644
--- a/contrib/tools/python3/src/Lib/pyclbr.py
+++ b/contrib/tools/python3/src/Lib/pyclbr.py
@@ -50,7 +50,7 @@ _modules = {} # Initialize cache of modules we've seen.
class _Object:
- "Information about Python class or function."
+ "Information about Python class or function."
def __init__(self, module, name, file, lineno, parent):
self.module = module
self.name = name
@@ -160,8 +160,8 @@ def _readmodule(module, path, inpackage=None):
else:
search_path = path + sys.path
spec = importlib.util._find_spec_from_path(fullmodule, search_path)
- if spec is None:
- raise ModuleNotFoundError(f"no module named {fullmodule!r}", name=fullmodule)
+ if spec is None:
+ raise ModuleNotFoundError(f"no module named {fullmodule!r}", name=fullmodule)
_modules[fullmodule] = tree
# Is module a package?
if spec.submodule_search_locations is not None:
@@ -171,9 +171,9 @@ def _readmodule(module, path, inpackage=None):
except (AttributeError, ImportError):
# If module is not Python source, we cannot do anything.
return tree
- else:
- if source is None:
- return tree
+ else:
+ if source is None:
+ return tree
fname = spec.loader.get_filename(fullmodule)
return _create_tree(fullmodule, path, fname, source, tree, inpackage)
diff --git a/contrib/tools/python3/src/Lib/pydoc.py b/contrib/tools/python3/src/Lib/pydoc.py
index 4f9d227ff4..52d60d4980 100644
--- a/contrib/tools/python3/src/Lib/pydoc.py
+++ b/contrib/tools/python3/src/Lib/pydoc.py
@@ -66,7 +66,7 @@ import pkgutil
import platform
import re
import sys
-import sysconfig
+import sysconfig
import time
import tokenize
import urllib.parse
@@ -90,101 +90,101 @@ def pathdirs():
normdirs.append(normdir)
return dirs
-def _findclass(func):
- cls = sys.modules.get(func.__module__)
- if cls is None:
- return None
- for name in func.__qualname__.split('.')[:-1]:
- cls = getattr(cls, name)
- if not inspect.isclass(cls):
- return None
- return cls
-
-def _finddoc(obj):
- if inspect.ismethod(obj):
- name = obj.__func__.__name__
- self = obj.__self__
- if (inspect.isclass(self) and
- getattr(getattr(self, name, None), '__func__') is obj.__func__):
- # classmethod
- cls = self
- else:
- cls = self.__class__
- elif inspect.isfunction(obj):
- name = obj.__name__
- cls = _findclass(obj)
- if cls is None or getattr(cls, name) is not obj:
- return None
- elif inspect.isbuiltin(obj):
- name = obj.__name__
- self = obj.__self__
- if (inspect.isclass(self) and
- self.__qualname__ + '.' + name == obj.__qualname__):
- # classmethod
- cls = self
- else:
- cls = self.__class__
- # Should be tested before isdatadescriptor().
- elif isinstance(obj, property):
- func = obj.fget
- name = func.__name__
- cls = _findclass(func)
- if cls is None or getattr(cls, name) is not obj:
- return None
- elif inspect.ismethoddescriptor(obj) or inspect.isdatadescriptor(obj):
- name = obj.__name__
- cls = obj.__objclass__
- if getattr(cls, name) is not obj:
- return None
- if inspect.ismemberdescriptor(obj):
- slots = getattr(cls, '__slots__', None)
- if isinstance(slots, dict) and name in slots:
- return slots[name]
- else:
- return None
- for base in cls.__mro__:
- try:
- doc = _getowndoc(getattr(base, name))
- except AttributeError:
- continue
- if doc is not None:
- return doc
- return None
-
-def _getowndoc(obj):
- """Get the documentation string for an object if it is not
- inherited from its class."""
- try:
- doc = object.__getattribute__(obj, '__doc__')
- if doc is None:
- return None
- if obj is not type:
- typedoc = type(obj).__doc__
- if isinstance(typedoc, str) and typedoc == doc:
- return None
- return doc
- except AttributeError:
- return None
-
-def _getdoc(object):
- """Get the documentation string for an object.
-
- All tabs are expanded to spaces. To clean up docstrings that are
- indented to line up with blocks of code, any whitespace than can be
- uniformly removed from the second line onwards is removed."""
- doc = _getowndoc(object)
- if doc is None:
- try:
- doc = _finddoc(object)
- except (AttributeError, TypeError):
- return None
- if not isinstance(doc, str):
- return None
- return inspect.cleandoc(doc)
-
+def _findclass(func):
+ cls = sys.modules.get(func.__module__)
+ if cls is None:
+ return None
+ for name in func.__qualname__.split('.')[:-1]:
+ cls = getattr(cls, name)
+ if not inspect.isclass(cls):
+ return None
+ return cls
+
+def _finddoc(obj):
+ if inspect.ismethod(obj):
+ name = obj.__func__.__name__
+ self = obj.__self__
+ if (inspect.isclass(self) and
+ getattr(getattr(self, name, None), '__func__') is obj.__func__):
+ # classmethod
+ cls = self
+ else:
+ cls = self.__class__
+ elif inspect.isfunction(obj):
+ name = obj.__name__
+ cls = _findclass(obj)
+ if cls is None or getattr(cls, name) is not obj:
+ return None
+ elif inspect.isbuiltin(obj):
+ name = obj.__name__
+ self = obj.__self__
+ if (inspect.isclass(self) and
+ self.__qualname__ + '.' + name == obj.__qualname__):
+ # classmethod
+ cls = self
+ else:
+ cls = self.__class__
+ # Should be tested before isdatadescriptor().
+ elif isinstance(obj, property):
+ func = obj.fget
+ name = func.__name__
+ cls = _findclass(func)
+ if cls is None or getattr(cls, name) is not obj:
+ return None
+ elif inspect.ismethoddescriptor(obj) or inspect.isdatadescriptor(obj):
+ name = obj.__name__
+ cls = obj.__objclass__
+ if getattr(cls, name) is not obj:
+ return None
+ if inspect.ismemberdescriptor(obj):
+ slots = getattr(cls, '__slots__', None)
+ if isinstance(slots, dict) and name in slots:
+ return slots[name]
+ else:
+ return None
+ for base in cls.__mro__:
+ try:
+ doc = _getowndoc(getattr(base, name))
+ except AttributeError:
+ continue
+ if doc is not None:
+ return doc
+ return None
+
+def _getowndoc(obj):
+ """Get the documentation string for an object if it is not
+ inherited from its class."""
+ try:
+ doc = object.__getattribute__(obj, '__doc__')
+ if doc is None:
+ return None
+ if obj is not type:
+ typedoc = type(obj).__doc__
+ if isinstance(typedoc, str) and typedoc == doc:
+ return None
+ return doc
+ except AttributeError:
+ return None
+
+def _getdoc(object):
+ """Get the documentation string for an object.
+
+ All tabs are expanded to spaces. To clean up docstrings that are
+ indented to line up with blocks of code, any whitespace than can be
+ uniformly removed from the second line onwards is removed."""
+ doc = _getowndoc(object)
+ if doc is None:
+ try:
+ doc = _finddoc(object)
+ except (AttributeError, TypeError):
+ return None
+ if not isinstance(doc, str):
+ return None
+ return inspect.cleandoc(doc)
+
def getdoc(object):
"""Get the doc string or comments for an object."""
- result = _getdoc(object) or inspect.getcomments(object)
+ result = _getdoc(object) or inspect.getcomments(object)
return result and re.sub('^ *\n', '', result.rstrip()) or ''
def splitdoc(doc):
@@ -245,7 +245,7 @@ def _is_bound_method(fn):
def allmethods(cl):
methods = {}
- for key, value in inspect.getmembers(cl, inspect.isroutine):
+ for key, value in inspect.getmembers(cl, inspect.isroutine):
methods[key] = 1
for base in cl.__bases__:
methods.update(allmethods(base)) # all your base are belong to us
@@ -296,8 +296,8 @@ def classify_class_attrs(object):
for (name, kind, cls, value) in inspect.classify_class_attrs(object):
if inspect.isdatadescriptor(value):
kind = 'data descriptor'
- if isinstance(value, property) and value.fset is None:
- kind = 'readonly property'
+ if isinstance(value, property) and value.fset is None:
+ kind = 'readonly property'
results.append((name, kind, cls, value))
return results
@@ -474,7 +474,7 @@ class Doc:
if inspect.isroutine(object): return self.docroutine(*args)
except AttributeError:
pass
- if inspect.isdatadescriptor(object): return self.docdata(*args)
+ if inspect.isdatadescriptor(object): return self.docdata(*args)
return self.docother(*args)
def fail(self, object, name=None, *args):
@@ -485,7 +485,7 @@ class Doc:
docmodule = docclass = docroutine = docother = docproperty = docdata = fail
- def getdocloc(self, object, basedir=sysconfig.get_path('stdlib')):
+ def getdocloc(self, object, basedir=sysconfig.get_path('stdlib')):
"""Return the location of module docs or None"""
try:
@@ -676,7 +676,7 @@ class HTMLDoc(Doc):
escape = escape or self.escape
results = []
here = 0
- pattern = re.compile(r'\b((http|https|ftp)://\S+[\w/]|'
+ pattern = re.compile(r'\b((http|https|ftp)://\S+[\w/]|'
r'RFC[- ]?(\d+)|'
r'PEP[- ]?(\d+)|'
r'(self\.)?(\w+))')
@@ -694,7 +694,7 @@ class HTMLDoc(Doc):
url = 'http://www.rfc-editor.org/rfc/rfc%d.txt' % int(rfc)
results.append('<a href="%s">%s</a>' % (url, escape(all)))
elif pep:
- url = 'https://www.python.org/dev/peps/pep-%04d/' % int(pep)
+ url = 'https://www.python.org/dev/peps/pep-%04d/' % int(pep)
results.append('<a href="%s">%s</a>' % (url, escape(all)))
elif selfdot:
# Create a link for methods like 'self.method(...)'
@@ -894,7 +894,7 @@ class HTMLDoc(Doc):
except Exception:
# Some descriptors may meet a failure in their __get__.
# (bug #1785)
- push(self.docdata(value, name, mod))
+ push(self.docdata(value, name, mod))
else:
push(self.document(value, name, mod,
funcs, classes, mdict, object))
@@ -907,7 +907,7 @@ class HTMLDoc(Doc):
hr.maybe()
push(msg)
for name, kind, homecls, value in ok:
- push(self.docdata(value, name, mod))
+ push(self.docdata(value, name, mod))
return attrs
def spilldata(msg, attrs, predicate):
@@ -917,8 +917,8 @@ class HTMLDoc(Doc):
push(msg)
for name, kind, homecls, value in ok:
base = self.docother(getattr(object, name), name, mod)
- doc = getdoc(value)
- if not doc:
+ doc = getdoc(value)
+ if not doc:
push('<dl><dt>%s</dl>\n' % base)
else:
doc = self.markup(getdoc(value), self.preformat,
@@ -955,7 +955,7 @@ class HTMLDoc(Doc):
thisclass = attrs[0][2]
attrs, inherited = _split_list(attrs, lambda t: t[2] is thisclass)
- if object is not builtins.object and thisclass is builtins.object:
+ if object is not builtins.object and thisclass is builtins.object:
attrs = inherited
continue
elif thisclass is object:
@@ -974,8 +974,8 @@ class HTMLDoc(Doc):
lambda t: t[1] == 'class method')
attrs = spill('Static methods %s' % tag, attrs,
lambda t: t[1] == 'static method')
- attrs = spilldescriptors("Readonly properties %s" % tag, attrs,
- lambda t: t[1] == 'readonly property')
+ attrs = spilldescriptors("Readonly properties %s" % tag, attrs,
+ lambda t: t[1] == 'readonly property')
attrs = spilldescriptors('Data descriptors %s' % tag, attrs,
lambda t: t[1] == 'data descriptor')
attrs = spilldata('Data and other attributes %s' % tag, attrs,
@@ -1039,12 +1039,12 @@ class HTMLDoc(Doc):
else:
note = ' unbound %s method' % self.classlink(imclass,mod)
- if (inspect.iscoroutinefunction(object) or
- inspect.isasyncgenfunction(object)):
- asyncqualifier = 'async '
- else:
- asyncqualifier = ''
-
+ if (inspect.iscoroutinefunction(object) or
+ inspect.isasyncgenfunction(object)):
+ asyncqualifier = 'async '
+ else:
+ asyncqualifier = ''
+
if name == realname:
title = '<a name="%s"><strong>%s</strong></a>' % (anchor, realname)
else:
@@ -1073,8 +1073,8 @@ class HTMLDoc(Doc):
if not argspec:
argspec = '(...)'
- decl = asyncqualifier + title + self.escape(argspec) + (note and
- self.grey('<font face="helvetica, arial">%s</font>' % note))
+ decl = asyncqualifier + title + self.escape(argspec) + (note and
+ self.grey('<font face="helvetica, arial">%s</font>' % note))
if skipdocs:
return '<dl><dt>%s</dt></dl>\n' % decl
@@ -1084,21 +1084,21 @@ class HTMLDoc(Doc):
doc = doc and '<dd><tt>%s</tt></dd>' % doc
return '<dl><dt>%s</dt>%s</dl>\n' % (decl, doc)
- def docdata(self, object, name=None, mod=None, cl=None):
- """Produce html documentation for a data descriptor."""
+ def docdata(self, object, name=None, mod=None, cl=None):
+ """Produce html documentation for a data descriptor."""
results = []
push = results.append
if name:
push('<dl><dt><strong>%s</strong></dt>\n' % name)
- doc = self.markup(getdoc(object), self.preformat)
- if doc:
+ doc = self.markup(getdoc(object), self.preformat)
+ if doc:
push('<dd><tt>%s</tt></dd>\n' % doc)
push('</dl>\n')
return ''.join(results)
- docproperty = docdata
+ docproperty = docdata
def docother(self, object, name=None, mod=None, *ignored):
"""Produce HTML documentation for a data object."""
@@ -1338,24 +1338,24 @@ location listed above.
push(' ' + makename(base))
push('')
- # List the built-in subclasses, if any:
- subclasses = sorted(
- (str(cls.__name__) for cls in type.__subclasses__(object)
- if not cls.__name__.startswith("_") and cls.__module__ == "builtins"),
- key=str.lower
- )
- no_of_subclasses = len(subclasses)
- MAX_SUBCLASSES_TO_DISPLAY = 4
- if subclasses:
- push("Built-in subclasses:")
- for subclassname in subclasses[:MAX_SUBCLASSES_TO_DISPLAY]:
- push(' ' + subclassname)
- if no_of_subclasses > MAX_SUBCLASSES_TO_DISPLAY:
- push(' ... and ' +
- str(no_of_subclasses - MAX_SUBCLASSES_TO_DISPLAY) +
- ' other subclasses')
- push('')
-
+ # List the built-in subclasses, if any:
+ subclasses = sorted(
+ (str(cls.__name__) for cls in type.__subclasses__(object)
+ if not cls.__name__.startswith("_") and cls.__module__ == "builtins"),
+ key=str.lower
+ )
+ no_of_subclasses = len(subclasses)
+ MAX_SUBCLASSES_TO_DISPLAY = 4
+ if subclasses:
+ push("Built-in subclasses:")
+ for subclassname in subclasses[:MAX_SUBCLASSES_TO_DISPLAY]:
+ push(' ' + subclassname)
+ if no_of_subclasses > MAX_SUBCLASSES_TO_DISPLAY:
+ push(' ... and ' +
+ str(no_of_subclasses - MAX_SUBCLASSES_TO_DISPLAY) +
+ ' other subclasses')
+ push('')
+
# Cute little class to pump out a horizontal rule between sections.
class HorizontalRule:
def __init__(self):
@@ -1377,7 +1377,7 @@ location listed above.
except Exception:
# Some descriptors may meet a failure in their __get__.
# (bug #1785)
- push(self.docdata(value, name, mod))
+ push(self.docdata(value, name, mod))
else:
push(self.document(value,
name, mod, object))
@@ -1389,7 +1389,7 @@ location listed above.
hr.maybe()
push(msg)
for name, kind, homecls, value in ok:
- push(self.docdata(value, name, mod))
+ push(self.docdata(value, name, mod))
return attrs
def spilldata(msg, attrs, predicate):
@@ -1398,7 +1398,7 @@ location listed above.
hr.maybe()
push(msg)
for name, kind, homecls, value in ok:
- doc = getdoc(value)
+ doc = getdoc(value)
try:
obj = getattr(object, name)
except AttributeError:
@@ -1418,7 +1418,7 @@ location listed above.
thisclass = attrs[0][2]
attrs, inherited = _split_list(attrs, lambda t: t[2] is thisclass)
- if object is not builtins.object and thisclass is builtins.object:
+ if object is not builtins.object and thisclass is builtins.object:
attrs = inherited
continue
elif thisclass is object:
@@ -1436,8 +1436,8 @@ location listed above.
lambda t: t[1] == 'class method')
attrs = spill("Static methods %s:\n" % tag, attrs,
lambda t: t[1] == 'static method')
- attrs = spilldescriptors("Readonly properties %s:\n" % tag, attrs,
- lambda t: t[1] == 'readonly property')
+ attrs = spilldescriptors("Readonly properties %s:\n" % tag, attrs,
+ lambda t: t[1] == 'readonly property')
attrs = spilldescriptors("Data descriptors %s:\n" % tag, attrs,
lambda t: t[1] == 'data descriptor')
attrs = spilldata("Data and other attributes %s:\n" % tag, attrs,
@@ -1473,12 +1473,12 @@ location listed above.
else:
note = ' unbound %s method' % classname(imclass,mod)
- if (inspect.iscoroutinefunction(object) or
- inspect.isasyncgenfunction(object)):
- asyncqualifier = 'async '
- else:
- asyncqualifier = ''
-
+ if (inspect.iscoroutinefunction(object) or
+ inspect.isasyncgenfunction(object)):
+ asyncqualifier = 'async '
+ else:
+ asyncqualifier = ''
+
if name == realname:
title = self.bold(realname)
else:
@@ -1502,7 +1502,7 @@ location listed above.
argspec = argspec[1:-1] # remove parentheses
if not argspec:
argspec = '(...)'
- decl = asyncqualifier + title + argspec + note
+ decl = asyncqualifier + title + argspec + note
if skipdocs:
return decl + '\n'
@@ -1510,21 +1510,21 @@ location listed above.
doc = getdoc(object) or ''
return decl + '\n' + (doc and self.indent(doc).rstrip() + '\n')
- def docdata(self, object, name=None, mod=None, cl=None):
- """Produce text documentation for a data descriptor."""
+ def docdata(self, object, name=None, mod=None, cl=None):
+ """Produce text documentation for a data descriptor."""
results = []
push = results.append
if name:
push(self.bold(name))
push('\n')
- doc = getdoc(object) or ''
+ doc = getdoc(object) or ''
if doc:
push(self.indent(doc))
push('\n')
return ''.join(results)
- docproperty = docdata
+ docproperty = docdata
def docother(self, object, name=None, mod=None, parent=None, maxlen=None, doc=None):
"""Produce text documentation for a data object."""
@@ -1534,10 +1534,10 @@ location listed above.
chop = maxlen - len(line)
if chop < 0: repr = repr[:chop] + '...'
line = (name and self.bold(name) + ' = ' or '') + repr
- if not doc:
- doc = getdoc(object)
- if doc:
- line += '\n' + self.indent(str(doc)) + '\n'
+ if not doc:
+ doc = getdoc(object)
+ if doc:
+ line += '\n' + self.indent(str(doc)) + '\n'
return line
class _PlainTextDoc(TextDoc):
@@ -1617,13 +1617,13 @@ def pipepager(text, cmd):
def tempfilepager(text, cmd):
"""Page through text by invoking a program on a temporary file."""
import tempfile
- with tempfile.TemporaryDirectory() as tempdir:
- filename = os.path.join(tempdir, 'pydoc.out')
- with open(filename, 'w', errors='backslashreplace',
- encoding=os.device_encoding(0) if
- sys.platform == 'win32' else None
- ) as file:
- file.write(text)
+ with tempfile.TemporaryDirectory() as tempdir:
+ filename = os.path.join(tempdir, 'pydoc.out')
+ with open(filename, 'w', errors='backslashreplace',
+ encoding=os.device_encoding(0) if
+ sys.platform == 'win32' else None
+ ) as file:
+ file.write(text)
os.system(cmd + ' "' + filename + '"')
def _escape_stdout(text):
@@ -1761,15 +1761,15 @@ def render_doc(thing, title='Python Library Documentation: %s', forceload=0,
if not (inspect.ismodule(object) or
inspect.isclass(object) or
inspect.isroutine(object) or
- inspect.isdatadescriptor(object) or
- _getdoc(object)):
+ inspect.isdatadescriptor(object) or
+ _getdoc(object)):
# If the passed object is a piece of data or an instance,
# document its available methods instead of its value.
- if hasattr(object, '__origin__'):
- object = object.__origin__
- else:
- object = type(object)
- desc += ' object'
+ if hasattr(object, '__origin__'):
+ object = object.__origin__
+ else:
+ object = type(object)
+ desc += ' object'
return title % desc + '\n\n' + renderer.document(object, name)
def doc(thing, title='Python Library Documentation: %s', forceload=0,
@@ -1818,7 +1818,7 @@ class Helper:
'False': '',
'None': '',
'True': '',
- '__peg_parser__': '',
+ '__peg_parser__': '',
'and': 'BOOLEAN',
'as': 'with',
'assert': ('assert', ''),
diff --git a/contrib/tools/python3/src/Lib/pydoc_data/topics.py b/contrib/tools/python3/src/Lib/pydoc_data/topics.py
index 67a51977cf..929833791f 100644
--- a/contrib/tools/python3/src/Lib/pydoc_data/topics.py
+++ b/contrib/tools/python3/src/Lib/pydoc_data/topics.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Autogenerated by Sphinx on Thu Jan 13 21:46:32 2022
+# Autogenerated by Sphinx on Thu Jan 13 21:46:32 2022
topics = {'assert': 'The "assert" statement\n'
'**********************\n'
'\n'
@@ -99,26 +99,26 @@ topics = {'assert': 'The "assert" statement\n'
'assigned,\n'
' from left to right, to the corresponding targets.\n'
'\n'
- ' * If the target list contains one target prefixed with an '
- 'asterisk,\n'
- ' called a “starred” target: The object must be an iterable '
- 'with at\n'
- ' least as many items as there are targets in the target '
- 'list, minus\n'
- ' one. The first items of the iterable are assigned, from '
- 'left to\n'
- ' right, to the targets before the starred target. The '
- 'final items\n'
- ' of the iterable are assigned to the targets after the '
+ ' * If the target list contains one target prefixed with an '
+ 'asterisk,\n'
+ ' called a “starred” target: The object must be an iterable '
+ 'with at\n'
+ ' least as many items as there are targets in the target '
+ 'list, minus\n'
+ ' one. The first items of the iterable are assigned, from '
+ 'left to\n'
+ ' right, to the targets before the starred target. The '
+ 'final items\n'
+ ' of the iterable are assigned to the targets after the '
'starred\n'
- ' target. A list of the remaining items in the iterable is '
- 'then\n'
- ' assigned to the starred target (the list can be empty).\n'
+ ' target. A list of the remaining items in the iterable is '
+ 'then\n'
+ ' assigned to the starred target (the list can be empty).\n'
'\n'
' * Else: The object must be an iterable with the same number '
- 'of items\n'
- ' as there are targets in the target list, and the items '
- 'are\n'
+ 'of items\n'
+ ' as there are targets in the target list, and the items '
+ 'are\n'
' assigned, from left to right, to the corresponding '
'targets.\n'
'\n'
@@ -134,10 +134,10 @@ topics = {'assert': 'The "assert" statement\n'
'in the\n'
' current local namespace.\n'
'\n'
- ' * Otherwise: the name is bound to the object in the global '
- 'namespace\n'
- ' or the outer namespace determined by "nonlocal", '
- 'respectively.\n'
+ ' * Otherwise: the name is bound to the object in the global '
+ 'namespace\n'
+ ' or the outer namespace determined by "nonlocal", '
+ 'respectively.\n'
'\n'
' The name is rebound if it was already bound. This may cause '
'the\n'
@@ -161,21 +161,21 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
' Note: If the object is a class instance and the attribute '
'reference\n'
- ' occurs on both sides of the assignment operator, the '
- 'right-hand side\n'
- ' expression, "a.x" can access either an instance attribute or '
- '(if no\n'
- ' instance attribute exists) a class attribute. The left-hand '
- 'side\n'
- ' target "a.x" is always set as an instance attribute, '
- 'creating it if\n'
- ' necessary. Thus, the two occurrences of "a.x" do not '
- 'necessarily\n'
- ' refer to the same attribute: if the right-hand side '
- 'expression\n'
- ' refers to a class attribute, the left-hand side creates a '
- 'new\n'
- ' instance attribute as the target of the assignment:\n'
+ ' occurs on both sides of the assignment operator, the '
+ 'right-hand side\n'
+ ' expression, "a.x" can access either an instance attribute or '
+ '(if no\n'
+ ' instance attribute exists) a class attribute. The left-hand '
+ 'side\n'
+ ' target "a.x" is always set as an instance attribute, '
+ 'creating it if\n'
+ ' necessary. Thus, the two occurrences of "a.x" do not '
+ 'necessarily\n'
+ ' refer to the same attribute: if the right-hand side '
+ 'expression\n'
+ ' refers to a class attribute, the left-hand side creates a '
+ 'new\n'
+ ' instance attribute as the target of the assignment:\n'
'\n'
' class Cls:\n'
' x = 3 # class variable\n'
@@ -224,27 +224,27 @@ topics = {'assert': 'The "assert" statement\n'
'called with\n'
' appropriate arguments.\n'
'\n'
- '* If the target is a slicing: The primary expression in the '
- 'reference\n'
- ' is evaluated. It should yield a mutable sequence object '
- '(such as a\n'
- ' list). The assigned object should be a sequence object of '
- 'the same\n'
- ' type. Next, the lower and upper bound expressions are '
- 'evaluated,\n'
- ' insofar they are present; defaults are zero and the '
- 'sequence’s\n'
- ' length. The bounds should evaluate to integers. If either '
- 'bound is\n'
- ' negative, the sequence’s length is added to it. The '
- 'resulting\n'
- ' bounds are clipped to lie between zero and the sequence’s '
- 'length,\n'
- ' inclusive. Finally, the sequence object is asked to replace '
+ '* If the target is a slicing: The primary expression in the '
+ 'reference\n'
+ ' is evaluated. It should yield a mutable sequence object '
+ '(such as a\n'
+ ' list). The assigned object should be a sequence object of '
+ 'the same\n'
+ ' type. Next, the lower and upper bound expressions are '
+ 'evaluated,\n'
+ ' insofar they are present; defaults are zero and the '
+ 'sequence’s\n'
+ ' length. The bounds should evaluate to integers. If either '
+ 'bound is\n'
+ ' negative, the sequence’s length is added to it. The '
+ 'resulting\n'
+ ' bounds are clipped to lie between zero and the sequence’s '
+ 'length,\n'
+ ' inclusive. Finally, the sequence object is asked to replace '
'the\n'
- ' slice with the items of the assigned sequence. The length '
- 'of the\n'
- ' slice may be different from the length of the assigned '
+ ' slice with the items of the assigned sequence. The length '
+ 'of the\n'
+ ' slice may be different from the length of the assigned '
'sequence,\n'
' thus changing the length of the target sequence, if the '
'target\n'
@@ -357,13 +357,13 @@ topics = {'assert': 'The "assert" statement\n'
'a variable or attribute annotation and an optional assignment\n'
'statement:\n'
'\n'
- ' annotated_assignment_stmt ::= augtarget ":" expression\n'
- ' ["=" (starred_expression | '
- 'yield_expression)]\n'
+ ' annotated_assignment_stmt ::= augtarget ":" expression\n'
+ ' ["=" (starred_expression | '
+ 'yield_expression)]\n'
'\n'
'The difference from normal Assignment statements is that only '
'single\n'
- 'target is allowed.\n'
+ 'target is allowed.\n'
'\n'
'For simple names as assignment targets, if in class or module '
'scope,\n'
@@ -410,14 +410,14 @@ topics = {'assert': 'The "assert" statement\n'
'standard\n'
' syntax for type annotations that can be used in static '
'analysis\n'
- ' tools and IDEs.\n'
- '\n'
- 'Changed in version 3.8: Now annotated assignments allow same\n'
- 'expressions in the right hand side as the regular '
- 'assignments.\n'
- 'Previously, some expressions (like un-parenthesized tuple '
- 'expressions)\n'
- 'caused a syntax error.\n',
+ ' tools and IDEs.\n'
+ '\n'
+ 'Changed in version 3.8: Now annotated assignments allow same\n'
+ 'expressions in the right hand side as the regular '
+ 'assignments.\n'
+ 'Previously, some expressions (like un-parenthesized tuple '
+ 'expressions)\n'
+ 'caused a syntax error.\n',
'async': 'Coroutines\n'
'**********\n'
'\n'
@@ -459,35 +459,35 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
' async_for_stmt ::= "async" for_stmt\n'
'\n'
- 'An *asynchronous iterable* provides an "__aiter__" method that\n'
- 'directly returns an *asynchronous iterator*, which can call\n'
- 'asynchronous code in its "__anext__" method.\n'
+ 'An *asynchronous iterable* provides an "__aiter__" method that\n'
+ 'directly returns an *asynchronous iterator*, which can call\n'
+ 'asynchronous code in its "__anext__" method.\n'
'\n'
'The "async for" statement allows convenient iteration over\n'
- 'asynchronous iterables.\n'
+ 'asynchronous iterables.\n'
'\n'
'The following code:\n'
'\n'
' async for TARGET in ITER:\n'
- ' SUITE\n'
+ ' SUITE\n'
' else:\n'
- ' SUITE2\n'
+ ' SUITE2\n'
'\n'
'Is semantically equivalent to:\n'
'\n'
' iter = (ITER)\n'
' iter = type(iter).__aiter__(iter)\n'
' running = True\n'
- '\n'
+ '\n'
' while running:\n'
' try:\n'
' TARGET = await type(iter).__anext__(iter)\n'
' except StopAsyncIteration:\n'
' running = False\n'
' else:\n'
- ' SUITE\n'
+ ' SUITE\n'
' else:\n'
- ' SUITE2\n'
+ ' SUITE2\n'
'\n'
'See also "__aiter__()" and "__anext__()" for details.\n'
'\n'
@@ -507,27 +507,27 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
'The following code:\n'
'\n'
- ' async with EXPRESSION as TARGET:\n'
- ' SUITE\n'
+ ' async with EXPRESSION as TARGET:\n'
+ ' SUITE\n'
'\n'
- 'is semantically equivalent to:\n'
+ 'is semantically equivalent to:\n'
'\n'
- ' manager = (EXPRESSION)\n'
- ' aenter = type(manager).__aenter__\n'
- ' aexit = type(manager).__aexit__\n'
- ' value = await aenter(manager)\n'
- ' hit_except = False\n'
+ ' manager = (EXPRESSION)\n'
+ ' aenter = type(manager).__aenter__\n'
+ ' aexit = type(manager).__aexit__\n'
+ ' value = await aenter(manager)\n'
+ ' hit_except = False\n'
'\n'
' try:\n'
- ' TARGET = value\n'
- ' SUITE\n'
+ ' TARGET = value\n'
+ ' SUITE\n'
' except:\n'
- ' hit_except = True\n'
- ' if not await aexit(manager, *sys.exc_info()):\n'
+ ' hit_except = True\n'
+ ' if not await aexit(manager, *sys.exc_info()):\n'
' raise\n'
- ' finally:\n'
- ' if not hit_except:\n'
- ' await aexit(manager, None, None, None)\n'
+ ' finally:\n'
+ ' if not hit_except:\n'
+ ' await aexit(manager, None, None, None)\n'
'\n'
'See also "__aenter__()" and "__aexit__()" for details.\n'
'\n'
@@ -543,17 +543,17 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
'-[ Footnotes ]-\n'
'\n'
- '[1] The exception is propagated to the invocation stack unless '
- 'there\n'
- ' is a "finally" clause which happens to raise another '
- 'exception.\n'
- ' That new exception causes the old one to be lost.\n'
+ '[1] The exception is propagated to the invocation stack unless '
+ 'there\n'
+ ' is a "finally" clause which happens to raise another '
+ 'exception.\n'
+ ' That new exception causes the old one to be lost.\n'
'\n'
- '[2] A string literal appearing as the first statement in the '
- 'function\n'
- ' body is transformed into the function’s "__doc__" attribute '
- 'and\n'
- ' therefore the function’s *docstring*.\n'
+ '[2] A string literal appearing as the first statement in the '
+ 'function\n'
+ ' body is transformed into the function’s "__doc__" attribute '
+ 'and\n'
+ ' therefore the function’s *docstring*.\n'
'\n'
'[3] A string literal appearing as the first statement in the class\n'
' body is transformed into the namespace’s "__doc__" item and\n'
@@ -691,19 +691,19 @@ topics = {'assert': 'The "assert" statement\n'
'needs, for\n'
' example, "object.__getattribute__(self, name)".\n'
'\n'
- ' Note:\n'
- '\n'
- ' This method may still be bypassed when looking up '
- 'special methods\n'
- ' as the result of implicit invocation via language '
- 'syntax or\n'
- ' built-in functions. See Special method lookup.\n'
- '\n'
- ' For certain sensitive attribute accesses, raises an '
- 'auditing event\n'
- ' "object.__getattr__" with arguments "obj" and '
- '"name".\n'
+ ' Note:\n'
'\n'
+ ' This method may still be bypassed when looking up '
+ 'special methods\n'
+ ' as the result of implicit invocation via language '
+ 'syntax or\n'
+ ' built-in functions. See Special method lookup.\n'
+ '\n'
+ ' For certain sensitive attribute accesses, raises an '
+ 'auditing event\n'
+ ' "object.__getattr__" with arguments "obj" and '
+ '"name".\n'
+ '\n'
'object.__setattr__(self, name, value)\n'
'\n'
' Called when an attribute assignment is attempted. '
@@ -720,11 +720,11 @@ topics = {'assert': 'The "assert" statement\n'
'for example,\n'
' "object.__setattr__(self, name, value)".\n'
'\n'
- ' For certain sensitive attribute assignments, raises '
- 'an auditing\n'
- ' event "object.__setattr__" with arguments "obj", '
- '"name", "value".\n'
- '\n'
+ ' For certain sensitive attribute assignments, raises '
+ 'an auditing\n'
+ ' event "object.__setattr__" with arguments "obj", '
+ '"name", "value".\n'
+ '\n'
'object.__delattr__(self, name)\n'
'\n'
' Like "__setattr__()" but for attribute deletion '
@@ -733,11 +733,11 @@ topics = {'assert': 'The "assert" statement\n'
'obj.name" is\n'
' meaningful for the object.\n'
'\n'
- ' For certain sensitive attribute deletions, raises an '
- 'auditing event\n'
- ' "object.__delattr__" with arguments "obj" and '
- '"name".\n'
- '\n'
+ ' For certain sensitive attribute deletions, raises an '
+ 'auditing event\n'
+ ' "object.__delattr__" with arguments "obj" and '
+ '"name".\n'
+ '\n'
'object.__dir__(self)\n'
'\n'
' Called when "dir()" is called on the object. A '
@@ -769,11 +769,11 @@ topics = {'assert': 'The "assert" statement\n'
'returned.\n'
'\n'
'The "__dir__" function should accept no arguments, and '
- 'return a\n'
- 'sequence of strings that represents the names accessible '
- 'on module. If\n'
- 'present, this function overrides the standard "dir()" '
- 'search on a\n'
+ 'return a\n'
+ 'sequence of strings that represents the names accessible '
+ 'on module. If\n'
+ 'present, this function overrides the standard "dir()" '
+ 'search on a\n'
'module.\n'
'\n'
'For a more fine grained customization of the module '
@@ -796,17 +796,17 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
' sys.modules[__name__].__class__ = VerboseModule\n'
'\n'
- 'Note:\n'
- '\n'
- ' Defining module "__getattr__" and setting module '
- '"__class__" only\n'
- ' affect lookups made using the attribute access syntax '
- '– directly\n'
- ' accessing the module globals (whether by code within '
- 'the module, or\n'
- ' via a reference to the module’s globals dictionary) is '
- 'unaffected.\n'
+ 'Note:\n'
'\n'
+ ' Defining module "__getattr__" and setting module '
+ '"__class__" only\n'
+ ' affect lookups made using the attribute access syntax '
+ '– directly\n'
+ ' accessing the module globals (whether by code within '
+ 'the module, or\n'
+ ' via a reference to the module’s globals dictionary) is '
+ 'unaffected.\n'
+ '\n'
'Changed in version 3.5: "__class__" module attribute is '
'now writable.\n'
'\n'
@@ -835,47 +835,47 @@ topics = {'assert': 'The "assert" statement\n'
'whose name is\n'
'the key of the property in the owner class’ "__dict__".\n'
'\n'
- 'object.__get__(self, instance, owner=None)\n'
+ 'object.__get__(self, instance, owner=None)\n'
'\n'
' Called to get the attribute of the owner class (class '
'attribute\n'
' access) or of an instance of that class (instance '
'attribute\n'
- ' access). The optional *owner* argument is the owner '
- 'class, while\n'
- ' *instance* is the instance that the attribute was '
- 'accessed through,\n'
- ' or "None" when the attribute is accessed through the '
- '*owner*.\n'
- '\n'
- ' This method should return the computed attribute '
- 'value or raise an\n'
- ' "AttributeError" exception.\n'
- '\n'
- ' **PEP 252** specifies that "__get__()" is callable '
- 'with one or two\n'
- ' arguments. Python’s own built-in descriptors support '
- 'this\n'
- ' specification; however, it is likely that some '
- 'third-party tools\n'
- ' have descriptors that require both arguments. '
- 'Python’s own\n'
- ' "__getattribute__()" implementation always passes in '
- 'both arguments\n'
- ' whether they are required or not.\n'
+ ' access). The optional *owner* argument is the owner '
+ 'class, while\n'
+ ' *instance* is the instance that the attribute was '
+ 'accessed through,\n'
+ ' or "None" when the attribute is accessed through the '
+ '*owner*.\n'
'\n'
+ ' This method should return the computed attribute '
+ 'value or raise an\n'
+ ' "AttributeError" exception.\n'
+ '\n'
+ ' **PEP 252** specifies that "__get__()" is callable '
+ 'with one or two\n'
+ ' arguments. Python’s own built-in descriptors support '
+ 'this\n'
+ ' specification; however, it is likely that some '
+ 'third-party tools\n'
+ ' have descriptors that require both arguments. '
+ 'Python’s own\n'
+ ' "__getattribute__()" implementation always passes in '
+ 'both arguments\n'
+ ' whether they are required or not.\n'
+ '\n'
'object.__set__(self, instance, value)\n'
'\n'
' Called to set the attribute on an instance *instance* '
'of the owner\n'
' class to a new value, *value*.\n'
'\n'
- ' Note, adding "__set__()" or "__delete__()" changes '
- 'the kind of\n'
- ' descriptor to a “data descriptor”. See Invoking '
- 'Descriptors for\n'
- ' more details.\n'
- '\n'
+ ' Note, adding "__set__()" or "__delete__()" changes '
+ 'the kind of\n'
+ ' descriptor to a “data descriptor”. See Invoking '
+ 'Descriptors for\n'
+ ' more details.\n'
+ '\n'
'object.__delete__(self, instance)\n'
'\n'
' Called to delete the attribute on an instance '
@@ -888,24 +888,24 @@ topics = {'assert': 'The "assert" statement\n'
'created. The\n'
' descriptor has been assigned to *name*.\n'
'\n'
- ' Note:\n'
- '\n'
- ' "__set_name__()" is only called implicitly as part '
- 'of the "type"\n'
- ' constructor, so it will need to be called '
- 'explicitly with the\n'
- ' appropriate parameters when a descriptor is added '
- 'to a class\n'
- ' after initial creation:\n'
- '\n'
- ' class A:\n'
- ' pass\n'
- ' descr = custom_descriptor()\n'
- ' A.attr = descr\n'
- " descr.__set_name__(A, 'attr')\n"
- '\n'
- ' See Creating the class object for more details.\n'
- '\n'
+ ' Note:\n'
+ '\n'
+ ' "__set_name__()" is only called implicitly as part '
+ 'of the "type"\n'
+ ' constructor, so it will need to be called '
+ 'explicitly with the\n'
+ ' appropriate parameters when a descriptor is added '
+ 'to a class\n'
+ ' after initial creation:\n'
+ '\n'
+ ' class A:\n'
+ ' pass\n'
+ ' descr = custom_descriptor()\n'
+ ' A.attr = descr\n'
+ " descr.__set_name__(A, 'attr')\n"
+ '\n'
+ ' See Creating the class object for more details.\n'
+ '\n'
' New in version 3.6.\n'
'\n'
'The attribute "__objclass__" is interpreted by the '
@@ -979,16 +979,16 @@ topics = {'assert': 'The "assert" statement\n'
'"super(B,\n'
' obj).m()" searches "obj.__class__.__mro__" for the '
'base class "A"\n'
- ' immediately following "B" and then invokes the '
+ ' immediately following "B" and then invokes the '
'descriptor with the\n'
' call: "A.__dict__[\'m\'].__get__(obj, '
'obj.__class__)".\n'
'\n'
'For instance bindings, the precedence of descriptor '
'invocation depends\n'
- 'on which descriptor methods are defined. A descriptor '
- 'can define any\n'
- 'combination of "__get__()", "__set__()" and '
+ 'on which descriptor methods are defined. A descriptor '
+ 'can define any\n'
+ 'combination of "__get__()", "__set__()" and '
'"__delete__()". If it\n'
'does not define "__get__()", then accessing the '
'attribute will return\n'
@@ -1002,23 +1002,23 @@ topics = {'assert': 'The "assert" statement\n'
'define both\n'
'"__get__()" and "__set__()", while non-data descriptors '
'have just the\n'
- '"__get__()" method. Data descriptors with "__get__()" '
- 'and "__set__()"\n'
- '(and/or "__delete__()") defined always override a '
- 'redefinition in an\n'
- 'instance dictionary. In contrast, non-data descriptors '
- 'can be\n'
- 'overridden by instances.\n'
+ '"__get__()" method. Data descriptors with "__get__()" '
+ 'and "__set__()"\n'
+ '(and/or "__delete__()") defined always override a '
+ 'redefinition in an\n'
+ 'instance dictionary. In contrast, non-data descriptors '
+ 'can be\n'
+ 'overridden by instances.\n'
'\n'
- 'Python methods (including those decorated with '
- '"@staticmethod" and\n'
- '"@classmethod") are implemented as non-data '
- 'descriptors. Accordingly,\n'
- 'instances can redefine and override methods. This '
- 'allows individual\n'
- 'instances to acquire behaviors that differ from other '
- 'instances of the\n'
- 'same class.\n'
+ 'Python methods (including those decorated with '
+ '"@staticmethod" and\n'
+ '"@classmethod") are implemented as non-data '
+ 'descriptors. Accordingly,\n'
+ 'instances can redefine and override methods. This '
+ 'allows individual\n'
+ 'instances to acquire behaviors that differ from other '
+ 'instances of the\n'
+ 'same class.\n'
'\n'
'The "property()" function is implemented as a data '
'descriptor.\n'
@@ -1031,12 +1031,12 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
'*__slots__* allow us to explicitly declare data members '
'(like\n'
- 'properties) and deny the creation of "__dict__" and '
+ 'properties) and deny the creation of "__dict__" and '
'*__weakref__*\n'
'(unless explicitly declared in *__slots__* or available '
'in a parent.)\n'
'\n'
- 'The space saved over using "__dict__" can be '
+ 'The space saved over using "__dict__" can be '
'significant. Attribute\n'
'lookup speed can be significantly improved as well.\n'
'\n'
@@ -1048,7 +1048,7 @@ topics = {'assert': 'The "assert" statement\n'
'*__slots__*\n'
' reserves space for the declared variables and '
'prevents the\n'
- ' automatic creation of "__dict__" and *__weakref__* '
+ ' automatic creation of "__dict__" and *__weakref__* '
'for each\n'
' instance.\n'
'\n'
@@ -1057,11 +1057,11 @@ topics = {'assert': 'The "assert" statement\n'
'--------------------------\n'
'\n'
'* When inheriting from a class without *__slots__*, the '
- '"__dict__" and\n'
- ' *__weakref__* attribute of the instances will always '
- 'be accessible.\n'
+ '"__dict__" and\n'
+ ' *__weakref__* attribute of the instances will always '
+ 'be accessible.\n'
'\n'
- '* Without a "__dict__" variable, instances cannot be '
+ '* Without a "__dict__" variable, instances cannot be '
'assigned new\n'
' variables not listed in the *__slots__* definition. '
'Attempts to\n'
@@ -1074,41 +1074,41 @@ topics = {'assert': 'The "assert" statement\n'
' declaration.\n'
'\n'
'* Without a *__weakref__* variable for each instance, '
- 'classes defining\n'
- ' *__slots__* do not support "weak references" to its '
- 'instances. If\n'
- ' weak reference support is needed, then add '
- '"\'__weakref__\'" to the\n'
- ' sequence of strings in the *__slots__* declaration.\n'
+ 'classes defining\n'
+ ' *__slots__* do not support "weak references" to its '
+ 'instances. If\n'
+ ' weak reference support is needed, then add '
+ '"\'__weakref__\'" to the\n'
+ ' sequence of strings in the *__slots__* declaration.\n'
'\n'
'* *__slots__* are implemented at the class level by '
'creating\n'
- ' descriptors for each variable name. As a result, '
- 'class attributes\n'
- ' cannot be used to set default values for instance '
- 'variables defined\n'
- ' by *__slots__*; otherwise, the class attribute would '
- 'overwrite the\n'
- ' descriptor assignment.\n'
+ ' descriptors for each variable name. As a result, '
+ 'class attributes\n'
+ ' cannot be used to set default values for instance '
+ 'variables defined\n'
+ ' by *__slots__*; otherwise, the class attribute would '
+ 'overwrite the\n'
+ ' descriptor assignment.\n'
'\n'
'* The action of a *__slots__* declaration is not limited '
- 'to the class\n'
- ' where it is defined. *__slots__* declared in parents '
- 'are available\n'
- ' in child classes. However, child subclasses will get a '
- '"__dict__"\n'
- ' and *__weakref__* unless they also define *__slots__* '
- '(which should\n'
- ' only contain names of any *additional* slots).\n'
+ 'to the class\n'
+ ' where it is defined. *__slots__* declared in parents '
+ 'are available\n'
+ ' in child classes. However, child subclasses will get a '
+ '"__dict__"\n'
+ ' and *__weakref__* unless they also define *__slots__* '
+ '(which should\n'
+ ' only contain names of any *additional* slots).\n'
'\n'
'* If a class defines a slot also defined in a base '
- 'class, the instance\n'
- ' variable defined by the base class slot is '
- 'inaccessible (except by\n'
- ' retrieving its descriptor directly from the base '
- 'class). This\n'
- ' renders the meaning of the program undefined. In the '
- 'future, a\n'
+ 'class, the instance\n'
+ ' variable defined by the base class slot is '
+ 'inaccessible (except by\n'
+ ' retrieving its descriptor directly from the base '
+ 'class). This\n'
+ ' renders the meaning of the program undefined. In the '
+ 'future, a\n'
' check may be added to prevent this.\n'
'\n'
'* Nonempty *__slots__* does not work for classes derived '
@@ -1116,19 +1116,19 @@ topics = {'assert': 'The "assert" statement\n'
' “variable-length” built-in types such as "int", '
'"bytes" and "tuple".\n'
'\n'
- '* Any non-string *iterable* may be assigned to '
- '*__slots__*.\n'
- '\n'
- '* If a "dictionary" is used to assign *__slots__*, the '
- 'dictionary keys\n'
- ' will be used as the slot names. The values of the '
- 'dictionary can be\n'
- ' used to provide per-attribute docstrings that will be '
- 'recognised by\n'
- ' "inspect.getdoc()" and displayed in the output of '
- '"help()".\n'
+ '* Any non-string *iterable* may be assigned to '
+ '*__slots__*.\n'
'\n'
- '* "__class__" assignment works only if both classes have '
+ '* If a "dictionary" is used to assign *__slots__*, the '
+ 'dictionary keys\n'
+ ' will be used as the slot names. The values of the '
+ 'dictionary can be\n'
+ ' used to provide per-attribute docstrings that will be '
+ 'recognised by\n'
+ ' "inspect.getdoc()" and displayed in the output of '
+ '"help()".\n'
+ '\n'
+ '* "__class__" assignment works only if both classes have '
'the same\n'
' *__slots__*.\n'
'\n'
@@ -1138,13 +1138,13 @@ topics = {'assert': 'The "assert" statement\n'
'attributes created by\n'
' slots (the other bases must have empty slot layouts) - '
'violations\n'
- ' raise "TypeError".\n'
- '\n'
- '* If an *iterator* is used for *__slots__* then a '
- '*descriptor* is\n'
- ' created for each of the iterator’s values. However, '
- 'the *__slots__*\n'
- ' attribute will be an empty iterator.\n',
+ ' raise "TypeError".\n'
+ '\n'
+ '* If an *iterator* is used for *__slots__* then a '
+ '*descriptor* is\n'
+ ' created for each of the iterator’s values. However, '
+ 'the *__slots__*\n'
+ ' attribute will be an empty iterator.\n',
'attribute-references': 'Attribute references\n'
'********************\n'
'\n'
@@ -1266,10 +1266,10 @@ topics = {'assert': 'The "assert" statement\n'
'In the latter case, sequence repetition is performed; a negative\n'
'repetition factor yields an empty sequence.\n'
'\n'
- 'This operation can be customized using the special "__mul__()" '
- 'and\n'
- '"__rmul__()" methods.\n'
- '\n'
+ 'This operation can be customized using the special "__mul__()" '
+ 'and\n'
+ '"__rmul__()" methods.\n'
+ '\n'
'The "@" (at) operator is intended to be used for matrix\n'
'multiplication. No builtin Python types implement this operator.\n'
'\n'
@@ -1285,10 +1285,10 @@ topics = {'assert': 'The "assert" statement\n'
'result. Division by zero raises the "ZeroDivisionError" '
'exception.\n'
'\n'
- 'This operation can be customized using the special "__truediv__()" '
- 'and\n'
- '"__floordiv__()" methods.\n'
- '\n'
+ 'This operation can be customized using the special "__truediv__()" '
+ 'and\n'
+ '"__floordiv__()" methods.\n'
+ '\n'
'The "%" (modulo) operator yields the remainder from the division '
'of\n'
'the first argument by the second. The numeric arguments are '
@@ -1320,10 +1320,10 @@ topics = {'assert': 'The "assert" statement\n'
'string formatting is described in the Python Library Reference,\n'
'section printf-style String Formatting.\n'
'\n'
- 'The *modulo* operation can be customized using the special '
- '"__mod__()"\n'
- 'method.\n'
- '\n'
+ 'The *modulo* operation can be customized using the special '
+ '"__mod__()"\n'
+ 'method.\n'
+ '\n'
'The floor division operator, the modulo operator, and the '
'"divmod()"\n'
'function are not defined for complex numbers. Instead, convert to '
@@ -1338,16 +1338,16 @@ topics = {'assert': 'The "assert" statement\n'
'and then added together. In the latter case, the sequences are\n'
'concatenated.\n'
'\n'
- 'This operation can be customized using the special "__add__()" '
- 'and\n'
- '"__radd__()" methods.\n'
- '\n'
+ 'This operation can be customized using the special "__add__()" '
+ 'and\n'
+ '"__radd__()" methods.\n'
+ '\n'
'The "-" (subtraction) operator yields the difference of its '
'arguments.\n'
- 'The numeric arguments are first converted to a common type.\n'
- '\n'
- 'This operation can be customized using the special "__sub__()" '
- 'method.\n',
+ 'The numeric arguments are first converted to a common type.\n'
+ '\n'
+ 'This operation can be customized using the special "__sub__()" '
+ 'method.\n',
'bitwise': 'Binary bitwise operations\n'
'*************************\n'
'\n'
@@ -1360,18 +1360,18 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
'The "&" operator yields the bitwise AND of its arguments, which '
'must\n'
- 'be integers or one of them must be a custom object overriding\n'
- '"__and__()" or "__rand__()" special methods.\n'
+ 'be integers or one of them must be a custom object overriding\n'
+ '"__and__()" or "__rand__()" special methods.\n'
'\n'
'The "^" operator yields the bitwise XOR (exclusive OR) of its\n'
- 'arguments, which must be integers or one of them must be a '
- 'custom\n'
- 'object overriding "__xor__()" or "__rxor__()" special methods.\n'
+ 'arguments, which must be integers or one of them must be a '
+ 'custom\n'
+ 'object overriding "__xor__()" or "__rxor__()" special methods.\n'
'\n'
'The "|" operator yields the bitwise (inclusive) OR of its '
'arguments,\n'
- 'which must be integers or one of them must be a custom object\n'
- 'overriding "__or__()" or "__ror__()" special methods.\n',
+ 'which must be integers or one of them must be a custom object\n'
+ 'overriding "__or__()" or "__ror__()" special methods.\n',
'bltin-code-objects': 'Code Objects\n'
'************\n'
'\n'
@@ -1388,10 +1388,10 @@ topics = {'assert': 'The "assert" statement\n'
'through their "__code__" attribute. See also the '
'"code" module.\n'
'\n'
- 'Accessing "__code__" raises an auditing event '
- '"object.__getattr__"\n'
- 'with arguments "obj" and ""__code__"".\n'
- '\n'
+ 'Accessing "__code__" raises an auditing event '
+ '"object.__getattr__"\n'
+ 'with arguments "obj" and ""__code__"".\n'
+ '\n'
'A code object can be executed or evaluated by passing '
'it (instead of a\n'
'source string) to the "exec()" or "eval()" built-in '
@@ -1512,8 +1512,8 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
' Called when the instance is “called” as a function; if '
'this method\n'
- ' is defined, "x(arg1, arg2, ...)" roughly translates to\n'
- ' "type(x).__call__(x, arg1, ...)".\n',
+ ' is defined, "x(arg1, arg2, ...)" roughly translates to\n'
+ ' "type(x).__call__(x, arg1, ...)".\n',
'calls': 'Calls\n'
'*****\n'
'\n'
@@ -1529,8 +1529,8 @@ topics = {'assert': 'The "assert" statement\n'
' | starred_and_keywords ["," '
'keywords_arguments]\n'
' | keywords_arguments\n'
- ' positional_arguments ::= positional_item ("," positional_item)*\n'
- ' positional_item ::= assignment_expression | "*" expression\n'
+ ' positional_arguments ::= positional_item ("," positional_item)*\n'
+ ' positional_item ::= assignment_expression | "*" expression\n'
' starred_and_keywords ::= ("*" expression | keyword_item)\n'
' ("," "*" expression | "," '
'keyword_item)*\n'
@@ -1770,10 +1770,10 @@ topics = {'assert': 'The "assert" statement\n'
'for\n'
'function decorators. The result is then bound to the class name.\n'
'\n'
- 'Changed in version 3.9: Classes may be decorated with any valid\n'
- '"assignment_expression". Previously, the grammar was much more\n'
- 'restrictive; see **PEP 614** for details.\n'
- '\n'
+ 'Changed in version 3.9: Classes may be decorated with any valid\n'
+ '"assignment_expression". Previously, the grammar was much more\n'
+ 'restrictive; see **PEP 614** for details.\n'
+ '\n'
'**Programmer’s note:** Variables defined in the class definition '
'are\n'
'class attributes; they are shared by instances. Instance '
@@ -1817,11 +1817,11 @@ topics = {'assert': 'The "assert" statement\n'
' comp_operator ::= "<" | ">" | "==" | ">=" | "<=" | "!="\n'
' | "is" ["not"] | ["not"] "in"\n'
'\n'
- 'Comparisons yield boolean values: "True" or "False". Custom '
- '*rich\n'
- 'comparison methods* may return non-boolean values. In this '
- 'case Python\n'
- 'will call "bool()" on such value in boolean contexts.\n'
+ 'Comparisons yield boolean values: "True" or "False". Custom '
+ '*rich\n'
+ 'comparison methods* may return non-boolean values. In this '
+ 'case Python\n'
+ 'will call "bool()" on such value in boolean contexts.\n'
'\n'
'Comparisons can be chained arbitrarily, e.g., "x < y <= z" '
'is\n'
@@ -1934,16 +1934,16 @@ topics = {'assert': 'The "assert" statement\n'
' value is false. A counter-intuitive implication is that '
'not-a-number\n'
' values are not equal to themselves. For example, if "x =\n'
- ' float(\'NaN\')", "3 < x", "x < 3" and "x == x" are all '
- 'false, while "x\n'
- ' != x" is true. This behavior is compliant with IEEE 754.\n'
- '\n'
- '* "None" and "NotImplemented" are singletons. **PEP 8** '
- 'advises that\n'
- ' comparisons for singletons should always be done with "is" '
- 'or "is\n'
- ' not", never the equality operators.\n'
+ ' float(\'NaN\')", "3 < x", "x < 3" and "x == x" are all '
+ 'false, while "x\n'
+ ' != x" is true. This behavior is compliant with IEEE 754.\n'
'\n'
+ '* "None" and "NotImplemented" are singletons. **PEP 8** '
+ 'advises that\n'
+ ' comparisons for singletons should always be done with "is" '
+ 'or "is\n'
+ ' not", never the equality operators.\n'
+ '\n'
'* Binary sequences (instances of "bytes" or "bytearray") can '
'be\n'
' compared within and across their types. They compare\n'
@@ -1958,24 +1958,24 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
' Strings and binary sequences cannot be directly compared.\n'
'\n'
- '* Sequences (instances of "tuple", "list", or "range") can be '
- 'compared\n'
- ' only within each of their types, with the restriction that '
- 'ranges do\n'
- ' not support order comparison. Equality comparison across '
- 'these\n'
- ' types results in inequality, and ordering comparison across '
- 'these\n'
- ' types raises "TypeError".\n'
+ '* Sequences (instances of "tuple", "list", or "range") can be '
+ 'compared\n'
+ ' only within each of their types, with the restriction that '
+ 'ranges do\n'
+ ' not support order comparison. Equality comparison across '
+ 'these\n'
+ ' types results in inequality, and ordering comparison across '
+ 'these\n'
+ ' types raises "TypeError".\n'
'\n'
' Sequences compare lexicographically using comparison of\n'
- ' corresponding elements. The built-in containers typically '
- 'assume\n'
- ' identical objects are equal to themselves. That lets them '
- 'bypass\n'
- ' equality tests for identical objects to improve performance '
- 'and to\n'
- ' maintain their internal invariants.\n'
+ ' corresponding elements. The built-in containers typically '
+ 'assume\n'
+ ' identical objects are equal to themselves. That lets them '
+ 'bypass\n'
+ ' equality tests for identical objects to improve performance '
+ 'and to\n'
+ ' maintain their internal invariants.\n'
'\n'
' Lexicographical comparison between built-in collections '
'works as\n'
@@ -1990,8 +1990,8 @@ topics = {'assert': 'The "assert" statement\n'
' false because the type is not the same).\n'
'\n'
' * Collections that support order comparison are ordered the '
- 'same as\n'
- ' their first unequal elements (for example, "[1,2,x] <= '
+ 'same as\n'
+ ' their first unequal elements (for example, "[1,2,x] <= '
'[1,2,y]"\n'
' has the same value as "x <= y"). If a corresponding '
'element does\n'
@@ -2009,8 +2009,8 @@ topics = {'assert': 'The "assert" statement\n'
'"TypeError".\n'
'\n'
'* Sets (instances of "set" or "frozenset") can be compared '
- 'within and\n'
- ' across their types.\n'
+ 'within and\n'
+ ' across their types.\n'
'\n'
' They define order comparison operators to mean subset and '
'superset\n'
@@ -2029,8 +2029,8 @@ topics = {'assert': 'The "assert" statement\n'
' Comparison of sets enforces reflexivity of its elements.\n'
'\n'
'* Most other built-in types have no comparison methods '
- 'implemented, so\n'
- ' they inherit the default comparison behavior.\n'
+ 'implemented, so\n'
+ ' they inherit the default comparison behavior.\n'
'\n'
'User-defined classes that customize their comparison behavior '
'should\n'
@@ -2079,10 +2079,10 @@ topics = {'assert': 'The "assert" statement\n'
' "total_ordering()" decorator.\n'
'\n'
'* The "hash()" result should be consistent with equality. '
- 'Objects that\n'
- ' are equal should either have the same hash value, or be '
- 'marked as\n'
- ' unhashable.\n'
+ 'Objects that\n'
+ ' are equal should either have the same hash value, or be '
+ 'marked as\n'
+ ' unhashable.\n'
'\n'
'Python does not enforce these consistency rules. In fact, '
'the\n'
@@ -2124,26 +2124,26 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
'For user-defined classes which do not define "__contains__()" '
'but do\n'
- 'define "__iter__()", "x in y" is "True" if some value "z", '
- 'for which\n'
- 'the expression "x is z or x == z" is true, is produced while '
- 'iterating\n'
- 'over "y". If an exception is raised during the iteration, it '
- 'is as if\n'
- '"in" raised that exception.\n'
+ 'define "__iter__()", "x in y" is "True" if some value "z", '
+ 'for which\n'
+ 'the expression "x is z or x == z" is true, is produced while '
+ 'iterating\n'
+ 'over "y". If an exception is raised during the iteration, it '
+ 'is as if\n'
+ '"in" raised that exception.\n'
'\n'
'Lastly, the old-style iteration protocol is tried: if a class '
'defines\n'
'"__getitem__()", "x in y" is "True" if and only if there is a '
'non-\n'
- 'negative integer index *i* such that "x is y[i] or x == '
- 'y[i]", and no\n'
- 'lower integer index raises the "IndexError" exception. (If '
- 'any other\n'
+ 'negative integer index *i* such that "x is y[i] or x == '
+ 'y[i]", and no\n'
+ 'lower integer index raises the "IndexError" exception. (If '
+ 'any other\n'
'exception is raised, it is as if "in" raised that '
'exception).\n'
'\n'
- 'The operator "not in" is defined to have the inverse truth '
+ 'The operator "not in" is defined to have the inverse truth '
'value of\n'
'"in".\n'
'\n'
@@ -2151,13 +2151,13 @@ topics = {'assert': 'The "assert" statement\n'
'Identity comparisons\n'
'====================\n'
'\n'
- 'The operators "is" and "is not" test for an object’s '
- 'identity: "x is\n'
- 'y" is true if and only if *x* and *y* are the same object. '
- 'An\n'
- 'Object’s identity is determined using the "id()" function. '
- '"x is not\n'
- 'y" yields the inverse truth value. [4]\n',
+ 'The operators "is" and "is not" test for an object’s '
+ 'identity: "x is\n'
+ 'y" is true if and only if *x* and *y* are the same object. '
+ 'An\n'
+ 'Object’s identity is determined using the "id()" function. '
+ '"x is not\n'
+ 'y" yields the inverse truth value. [4]\n',
'compound': 'Compound statements\n'
'*******************\n'
'\n'
@@ -2248,8 +2248,8 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
'The "if" statement is used for conditional execution:\n'
'\n'
- ' if_stmt ::= "if" assignment_expression ":" suite\n'
- ' ("elif" assignment_expression ":" suite)*\n'
+ ' if_stmt ::= "if" assignment_expression ":" suite\n'
+ ' ("elif" assignment_expression ":" suite)*\n'
' ["else" ":" suite]\n'
'\n'
'It selects exactly one of the suites by evaluating the '
@@ -2272,7 +2272,7 @@ topics = {'assert': 'The "assert" statement\n'
'an\n'
'expression is true:\n'
'\n'
- ' while_stmt ::= "while" assignment_expression ":" suite\n'
+ ' while_stmt ::= "while" assignment_expression ":" suite\n'
' ["else" ":" suite]\n'
'\n'
'This repeatedly tests the expression and, if it is true, '
@@ -2332,7 +2332,7 @@ topics = {'assert': 'The "assert" statement\n'
'next\n'
'item.\n'
'\n'
- 'The for-loop makes assignments to the variables in the target '
+ 'The for-loop makes assignments to the variables in the target '
'list.\n'
'This overwrites all previous assignments to those variables '
'including\n'
@@ -2356,11 +2356,11 @@ topics = {'assert': 'The "assert" statement\n'
':= a to b do"; e.g., "list(range(3))" returns the list "[0, 1, '
'2]".\n'
'\n'
- 'Note:\n'
- '\n'
- ' There is a subtlety when the sequence is being modified by the '
- 'loop\n'
- ' (this can only occur for mutable sequences, e.g. lists). An\n'
+ 'Note:\n'
+ '\n'
+ ' There is a subtlety when the sequence is being modified by the '
+ 'loop\n'
+ ' (this can only occur for mutable sequences, e.g. lists). An\n'
' internal counter is used to keep track of which item is used '
'next,\n'
' and this is incremented on each iteration. When this counter '
@@ -2418,9 +2418,9 @@ topics = {'assert': 'The "assert" statement\n'
'compatible\n'
'with an exception if it is the class or a base class of the '
'exception\n'
- 'object, or a tuple containing an item that is the class or a '
- 'base\n'
- 'class of the exception object.\n'
+ 'object, or a tuple containing an item that is the class or a '
+ 'base\n'
+ 'class of the exception object.\n'
'\n'
'If no except clause matches the exception, the search for an '
'exception\n'
@@ -2514,9 +2514,9 @@ topics = {'assert': 'The "assert" statement\n'
'saved\n'
'exception is set as the context of the new exception. If the '
'"finally"\n'
- 'clause executes a "return", "break" or "continue" statement, the '
- 'saved\n'
- 'exception is discarded:\n'
+ 'clause executes a "return", "break" or "continue" statement, the '
+ 'saved\n'
+ 'exception is discarded:\n'
'\n'
' >>> def f():\n'
' ... try:\n'
@@ -2535,7 +2535,7 @@ topics = {'assert': 'The "assert" statement\n'
'the\n'
'"try" suite of a "try"…"finally" statement, the "finally" clause '
'is\n'
- 'also executed ‘on the way out.’\n'
+ 'also executed ‘on the way out.’\n'
'\n'
'The return value of a function is determined by the last '
'"return"\n'
@@ -2559,12 +2559,12 @@ topics = {'assert': 'The "assert" statement\n'
'generate\n'
'exceptions may be found in section The raise statement.\n'
'\n'
- 'Changed in version 3.8: Prior to Python 3.8, a "continue" '
- 'statement\n'
- 'was illegal in the "finally" clause due to a problem with the\n'
- 'implementation.\n'
- '\n'
+ 'Changed in version 3.8: Prior to Python 3.8, a "continue" '
+ 'statement\n'
+ 'was illegal in the "finally" clause due to a problem with the\n'
+ 'implementation.\n'
'\n'
+ '\n'
'The "with" statement\n'
'====================\n'
'\n'
@@ -2584,33 +2584,33 @@ topics = {'assert': 'The "assert" statement\n'
'follows:\n'
'\n'
'1. The context expression (the expression given in the '
- '"with_item") is\n'
- ' evaluated to obtain a context manager.\n'
- '\n'
- '2. The context manager’s "__enter__()" is loaded for later use.\n'
+ '"with_item") is\n'
+ ' evaluated to obtain a context manager.\n'
'\n'
- '3. The context manager’s "__exit__()" is loaded for later use.\n'
+ '2. The context manager’s "__enter__()" is loaded for later use.\n'
'\n'
- '4. The context manager’s "__enter__()" method is invoked.\n'
- '\n'
- '5. If a target was included in the "with" statement, the return '
- 'value\n'
- ' from "__enter__()" is assigned to it.\n'
+ '3. The context manager’s "__exit__()" is loaded for later use.\n'
'\n'
- ' Note:\n'
+ '4. The context manager’s "__enter__()" method is invoked.\n'
+ '\n'
+ '5. If a target was included in the "with" statement, the return '
+ 'value\n'
+ ' from "__enter__()" is assigned to it.\n'
'\n'
- ' The "with" statement guarantees that if the "__enter__()" '
- 'method\n'
- ' returns without an error, then "__exit__()" will always be\n'
+ ' Note:\n'
+ '\n'
+ ' The "with" statement guarantees that if the "__enter__()" '
+ 'method\n'
+ ' returns without an error, then "__exit__()" will always be\n'
' called. Thus, if an error occurs during the assignment to '
'the\n'
' target list, it will be treated the same as an error '
'occurring\n'
' within the suite would be. See step 6 below.\n'
'\n'
- '6. The suite is executed.\n'
+ '6. The suite is executed.\n'
'\n'
- '7. The context manager’s "__exit__()" method is invoked. If an\n'
+ '7. The context manager’s "__exit__()" method is invoked. If an\n'
' exception caused the suite to be exited, its type, value, '
'and\n'
' traceback are passed as arguments to "__exit__()". Otherwise, '
@@ -2632,42 +2632,42 @@ topics = {'assert': 'The "assert" statement\n'
'proceeds\n'
' at the normal location for the kind of exit that was taken.\n'
'\n'
- 'The following code:\n'
- '\n'
- ' with EXPRESSION as TARGET:\n'
- ' SUITE\n'
- '\n'
- 'is semantically equivalent to:\n'
- '\n'
- ' manager = (EXPRESSION)\n'
- ' enter = type(manager).__enter__\n'
- ' exit = type(manager).__exit__\n'
- ' value = enter(manager)\n'
- ' hit_except = False\n'
- '\n'
- ' try:\n'
- ' TARGET = value\n'
- ' SUITE\n'
- ' except:\n'
- ' hit_except = True\n'
- ' if not exit(manager, *sys.exc_info()):\n'
- ' raise\n'
- ' finally:\n'
- ' if not hit_except:\n'
- ' exit(manager, None, None, None)\n'
- '\n'
+ 'The following code:\n'
+ '\n'
+ ' with EXPRESSION as TARGET:\n'
+ ' SUITE\n'
+ '\n'
+ 'is semantically equivalent to:\n'
+ '\n'
+ ' manager = (EXPRESSION)\n'
+ ' enter = type(manager).__enter__\n'
+ ' exit = type(manager).__exit__\n'
+ ' value = enter(manager)\n'
+ ' hit_except = False\n'
+ '\n'
+ ' try:\n'
+ ' TARGET = value\n'
+ ' SUITE\n'
+ ' except:\n'
+ ' hit_except = True\n'
+ ' if not exit(manager, *sys.exc_info()):\n'
+ ' raise\n'
+ ' finally:\n'
+ ' if not hit_except:\n'
+ ' exit(manager, None, None, None)\n'
+ '\n'
'With more than one item, the context managers are processed as '
'if\n'
'multiple "with" statements were nested:\n'
'\n'
' with A() as a, B() as b:\n'
- ' SUITE\n'
+ ' SUITE\n'
'\n'
- 'is semantically equivalent to:\n'
+ 'is semantically equivalent to:\n'
'\n'
' with A() as a:\n'
' with B() as b:\n'
- ' SUITE\n'
+ ' SUITE\n'
'\n'
'Changed in version 3.1: Support for multiple context '
'expressions.\n'
@@ -2687,24 +2687,24 @@ topics = {'assert': 'The "assert" statement\n'
'(see\n'
'section The standard type hierarchy):\n'
'\n'
- ' funcdef ::= [decorators] "def" funcname "(" '
+ ' funcdef ::= [decorators] "def" funcname "(" '
'[parameter_list] ")"\n'
' ["->" expression] ":" suite\n'
- ' decorators ::= decorator+\n'
- ' decorator ::= "@" assignment_expression '
- 'NEWLINE\n'
- ' parameter_list ::= defparameter ("," '
- 'defparameter)* "," "/" ["," [parameter_list_no_posonly]]\n'
- ' | parameter_list_no_posonly\n'
- ' parameter_list_no_posonly ::= defparameter ("," '
- 'defparameter)* ["," [parameter_list_starargs]]\n'
- ' | parameter_list_starargs\n'
- ' parameter_list_starargs ::= "*" [parameter] ("," '
+ ' decorators ::= decorator+\n'
+ ' decorator ::= "@" assignment_expression '
+ 'NEWLINE\n'
+ ' parameter_list ::= defparameter ("," '
+ 'defparameter)* "," "/" ["," [parameter_list_no_posonly]]\n'
+ ' | parameter_list_no_posonly\n'
+ ' parameter_list_no_posonly ::= defparameter ("," '
+ 'defparameter)* ["," [parameter_list_starargs]]\n'
+ ' | parameter_list_starargs\n'
+ ' parameter_list_starargs ::= "*" [parameter] ("," '
'defparameter)* ["," ["**" parameter [","]]]\n'
' | "**" parameter [","]\n'
- ' parameter ::= identifier [":" expression]\n'
- ' defparameter ::= parameter ["=" expression]\n'
- ' funcname ::= identifier\n'
+ ' parameter ::= identifier [":" expression]\n'
+ ' defparameter ::= parameter ["=" expression]\n'
+ ' funcname ::= identifier\n'
'\n'
'A function definition is an executable statement. Its execution '
'binds\n'
@@ -2745,11 +2745,11 @@ topics = {'assert': 'The "assert" statement\n'
'the name\n'
'"func".\n'
'\n'
- 'Changed in version 3.9: Functions may be decorated with any '
- 'valid\n'
- '"assignment_expression". Previously, the grammar was much more\n'
- 'restrictive; see **PEP 614** for details.\n'
- '\n'
+ 'Changed in version 3.9: Functions may be decorated with any '
+ 'valid\n'
+ '"assignment_expression". Previously, the grammar was much more\n'
+ 'restrictive; see **PEP 614** for details.\n'
+ '\n'
'When one or more *parameters* have the form *parameter* "="\n'
'*expression*, the function is said to have “default parameter '
'values.”\n'
@@ -2792,7 +2792,7 @@ topics = {'assert': 'The "assert" statement\n'
'Calls.\n'
'A function call always assigns values to all parameters '
'mentioned in\n'
- 'the parameter list, either from positional arguments, from '
+ 'the parameter list, either from positional arguments, from '
'keyword\n'
'arguments, or from default values. If the form “"*identifier"” '
'is\n'
@@ -2804,15 +2804,15 @@ topics = {'assert': 'The "assert" statement\n'
'new\n'
'empty mapping of the same type. Parameters after “"*"” or\n'
'“"*identifier"” are keyword-only parameters and may only be '
- 'passed by\n'
- 'keyword arguments. Parameters before “"/"” are positional-only\n'
- 'parameters and may only be passed by positional arguments.\n'
- '\n'
- 'Changed in version 3.8: The "/" function parameter syntax may be '
- 'used\n'
- 'to indicate positional-only parameters. See **PEP 570** for '
- 'details.\n'
- '\n'
+ 'passed by\n'
+ 'keyword arguments. Parameters before “"/"” are positional-only\n'
+ 'parameters and may only be passed by positional arguments.\n'
+ '\n'
+ 'Changed in version 3.8: The "/" function parameter syntax may be '
+ 'used\n'
+ 'to indicate positional-only parameters. See **PEP 570** for '
+ 'details.\n'
+ '\n'
'Parameters may have an *annotation* of the form “": '
'expression"”\n'
'following the parameter name. Any parameter may have an '
@@ -2957,10 +2957,10 @@ topics = {'assert': 'The "assert" statement\n'
'function decorators. The result is then bound to the class '
'name.\n'
'\n'
- 'Changed in version 3.9: Classes may be decorated with any valid\n'
- '"assignment_expression". Previously, the grammar was much more\n'
- 'restrictive; see **PEP 614** for details.\n'
- '\n'
+ 'Changed in version 3.9: Classes may be decorated with any valid\n'
+ '"assignment_expression". Previously, the grammar was much more\n'
+ 'restrictive; see **PEP 614** for details.\n'
+ '\n'
'**Programmer’s note:** Variables defined in the class definition '
'are\n'
'class attributes; they are shared by instances. Instance '
@@ -3034,35 +3034,35 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
' async_for_stmt ::= "async" for_stmt\n'
'\n'
- 'An *asynchronous iterable* provides an "__aiter__" method that\n'
- 'directly returns an *asynchronous iterator*, which can call\n'
- 'asynchronous code in its "__anext__" method.\n'
+ 'An *asynchronous iterable* provides an "__aiter__" method that\n'
+ 'directly returns an *asynchronous iterator*, which can call\n'
+ 'asynchronous code in its "__anext__" method.\n'
'\n'
'The "async for" statement allows convenient iteration over\n'
- 'asynchronous iterables.\n'
+ 'asynchronous iterables.\n'
'\n'
'The following code:\n'
'\n'
' async for TARGET in ITER:\n'
- ' SUITE\n'
+ ' SUITE\n'
' else:\n'
- ' SUITE2\n'
+ ' SUITE2\n'
'\n'
'Is semantically equivalent to:\n'
'\n'
' iter = (ITER)\n'
' iter = type(iter).__aiter__(iter)\n'
' running = True\n'
- '\n'
+ '\n'
' while running:\n'
' try:\n'
' TARGET = await type(iter).__anext__(iter)\n'
' except StopAsyncIteration:\n'
' running = False\n'
' else:\n'
- ' SUITE\n'
+ ' SUITE\n'
' else:\n'
- ' SUITE2\n'
+ ' SUITE2\n'
'\n'
'See also "__aiter__()" and "__anext__()" for details.\n'
'\n'
@@ -3082,27 +3082,27 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
'The following code:\n'
'\n'
- ' async with EXPRESSION as TARGET:\n'
- ' SUITE\n'
+ ' async with EXPRESSION as TARGET:\n'
+ ' SUITE\n'
'\n'
- 'is semantically equivalent to:\n'
+ 'is semantically equivalent to:\n'
'\n'
- ' manager = (EXPRESSION)\n'
- ' aenter = type(manager).__aenter__\n'
- ' aexit = type(manager).__aexit__\n'
- ' value = await aenter(manager)\n'
- ' hit_except = False\n'
+ ' manager = (EXPRESSION)\n'
+ ' aenter = type(manager).__aenter__\n'
+ ' aexit = type(manager).__aexit__\n'
+ ' value = await aenter(manager)\n'
+ ' hit_except = False\n'
'\n'
' try:\n'
- ' TARGET = value\n'
- ' SUITE\n'
+ ' TARGET = value\n'
+ ' SUITE\n'
' except:\n'
- ' hit_except = True\n'
- ' if not await aexit(manager, *sys.exc_info()):\n'
+ ' hit_except = True\n'
+ ' if not await aexit(manager, *sys.exc_info()):\n'
' raise\n'
- ' finally:\n'
- ' if not hit_except:\n'
- ' await aexit(manager, None, None, None)\n'
+ ' finally:\n'
+ ' if not hit_except:\n'
+ ' await aexit(manager, None, None, None)\n'
'\n'
'See also "__aenter__()" and "__aexit__()" for details.\n'
'\n'
@@ -3119,17 +3119,17 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
'-[ Footnotes ]-\n'
'\n'
- '[1] The exception is propagated to the invocation stack unless '
- 'there\n'
- ' is a "finally" clause which happens to raise another '
- 'exception.\n'
- ' That new exception causes the old one to be lost.\n'
+ '[1] The exception is propagated to the invocation stack unless '
+ 'there\n'
+ ' is a "finally" clause which happens to raise another '
+ 'exception.\n'
+ ' That new exception causes the old one to be lost.\n'
'\n'
- '[2] A string literal appearing as the first statement in the '
- 'function\n'
- ' body is transformed into the function’s "__doc__" attribute '
- 'and\n'
- ' therefore the function’s *docstring*.\n'
+ '[2] A string literal appearing as the first statement in the '
+ 'function\n'
+ ' body is transformed into the function’s "__doc__" attribute '
+ 'and\n'
+ ' therefore the function’s *docstring*.\n'
'\n'
'[3] A string literal appearing as the first statement in the '
'class\n'
@@ -3204,10 +3204,10 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
'"continue" may only occur syntactically nested in a "for" or '
'"while"\n'
- 'loop, but not nested in a function or class definition within '
- 'that\n'
- 'loop. It continues with the next cycle of the nearest enclosing '
- 'loop.\n'
+ 'loop, but not nested in a function or class definition within '
+ 'that\n'
+ 'loop. It continues with the next cycle of the nearest enclosing '
+ 'loop.\n'
'\n'
'When "continue" passes control out of a "try" statement with a\n'
'"finally" clause, that "finally" clause is executed before '
@@ -3218,7 +3218,7 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
'When a description of an arithmetic operator below uses the '
'phrase\n'
- '“the numeric arguments are converted to a common type”, this '
+ '“the numeric arguments are converted to a common type”, this '
'means\n'
'that the operator implementation for built-in types works as '
'follows:\n'
@@ -3228,8 +3228,8 @@ topics = {'assert': 'The "assert" statement\n'
' complex;\n'
'\n'
'* otherwise, if either argument is a floating point number, '
- 'the other\n'
- ' is converted to floating point;\n'
+ 'the other\n'
+ ' is converted to floating point;\n'
'\n'
'* otherwise, both must be integers and no conversion is '
'necessary.\n'
@@ -3267,15 +3267,15 @@ topics = {'assert': 'The "assert" statement\n'
'returning\n'
' it.\n'
'\n'
- ' If "__new__()" is invoked during object construction and '
- 'it returns\n'
- ' an instance of *cls*, then the new instance’s '
- '"__init__()" method\n'
- ' will be invoked like "__init__(self[, ...])", where '
- '*self* is the\n'
- ' new instance and the remaining arguments are the same as '
- 'were\n'
- ' passed to the object constructor.\n'
+ ' If "__new__()" is invoked during object construction and '
+ 'it returns\n'
+ ' an instance of *cls*, then the new instance’s '
+ '"__init__()" method\n'
+ ' will be invoked like "__init__(self[, ...])", where '
+ '*self* is the\n'
+ ' new instance and the remaining arguments are the same as '
+ 'were\n'
+ ' passed to the object constructor.\n'
'\n'
' If "__new__()" does not return an instance of *cls*, '
'then the new\n'
@@ -3339,9 +3339,9 @@ topics = {'assert': 'The "assert" statement\n'
'for\n'
' objects that still exist when the interpreter exits.\n'
'\n'
- ' Note:\n'
- '\n'
- ' "del x" doesn’t directly call "x.__del__()" — the '
+ ' Note:\n'
+ '\n'
+ ' "del x" doesn’t directly call "x.__del__()" — the '
'former\n'
' decrements the reference count for "x" by one, and the '
'latter is\n'
@@ -3365,16 +3365,16 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
' See also: Documentation for the "gc" module.\n'
'\n'
- ' Warning:\n'
- '\n'
- ' Due to the precarious circumstances under which '
- '"__del__()"\n'
- ' methods are invoked, exceptions that occur during '
- 'their execution\n'
- ' are ignored, and a warning is printed to "sys.stderr" '
- 'instead.\n'
- ' In particular:\n'
+ ' Warning:\n'
'\n'
+ ' Due to the precarious circumstances under which '
+ '"__del__()"\n'
+ ' methods are invoked, exceptions that occur during '
+ 'their execution\n'
+ ' are ignored, and a warning is printed to "sys.stderr" '
+ 'instead.\n'
+ ' In particular:\n'
+ '\n'
' * "__del__()" can be invoked when arbitrary code is '
'being\n'
' executed, including from any arbitrary thread. If '
@@ -3386,20 +3386,20 @@ topics = {'assert': 'The "assert" statement\n'
' that gets interrupted to execute "__del__()".\n'
'\n'
' * "__del__()" can be executed during interpreter '
- 'shutdown. As a\n'
- ' consequence, the global variables it needs to access '
- '(including\n'
- ' other modules) may already have been deleted or set '
- 'to "None".\n'
- ' Python guarantees that globals whose name begins '
- 'with a single\n'
- ' underscore are deleted from their module before '
- 'other globals\n'
- ' are deleted; if no other references to such globals '
- 'exist, this\n'
- ' may help in assuring that imported modules are still '
- 'available\n'
- ' at the time when the "__del__()" method is called.\n'
+ 'shutdown. As a\n'
+ ' consequence, the global variables it needs to access '
+ '(including\n'
+ ' other modules) may already have been deleted or set '
+ 'to "None".\n'
+ ' Python guarantees that globals whose name begins '
+ 'with a single\n'
+ ' underscore are deleted from their module before '
+ 'other globals\n'
+ ' are deleted; if no other references to such globals '
+ 'exist, this\n'
+ ' may help in assuring that imported modules are still '
+ 'available\n'
+ ' at the time when the "__del__()" method is called.\n'
'\n'
'object.__repr__(self)\n'
'\n'
@@ -3460,11 +3460,11 @@ topics = {'assert': 'The "assert" statement\n'
'"str.format()"\n'
' method, to produce a “formatted” string representation '
'of an\n'
- ' object. The *format_spec* argument is a string that '
+ ' object. The *format_spec* argument is a string that '
'contains a\n'
' description of the formatting options desired. The '
'interpretation\n'
- ' of the *format_spec* argument is up to the type '
+ ' of the *format_spec* argument is up to the type '
'implementing\n'
' "__format__()", however most classes will either '
'delegate\n'
@@ -3484,7 +3484,7 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
' Changed in version 3.7: "object.__format__(x, \'\')" is '
'now\n'
- ' equivalent to "str(x)" rather than "format(str(x), '
+ ' equivalent to "str(x)" rather than "format(str(x), '
'\'\')".\n'
'\n'
'object.__lt__(self, other)\n'
@@ -3518,21 +3518,21 @@ topics = {'assert': 'The "assert" statement\n'
' on the value to determine if the result is true or '
'false.\n'
'\n'
- ' By default, "object" implements "__eq__()" by using '
- '"is", returning\n'
- ' "NotImplemented" in the case of a false comparison: '
- '"True if x is y\n'
- ' else NotImplemented". For "__ne__()", by default it '
- 'delegates to\n'
- ' "__eq__()" and inverts the result unless it is '
- '"NotImplemented".\n'
- ' There are no other implied relationships among the '
- 'comparison\n'
- ' operators or default implementations; for example, the '
- 'truth of\n'
- ' "(x<y or x==y)" does not imply "x<=y". To automatically '
- 'generate\n'
- ' ordering operations from a single root operation, see\n'
+ ' By default, "object" implements "__eq__()" by using '
+ '"is", returning\n'
+ ' "NotImplemented" in the case of a false comparison: '
+ '"True if x is y\n'
+ ' else NotImplemented". For "__ne__()", by default it '
+ 'delegates to\n'
+ ' "__eq__()" and inverts the result unless it is '
+ '"NotImplemented".\n'
+ ' There are no other implied relationships among the '
+ 'comparison\n'
+ ' operators or default implementations; for example, the '
+ 'truth of\n'
+ ' "(x<y or x==y)" does not imply "x<=y". To automatically '
+ 'generate\n'
+ ' ordering operations from a single root operation, see\n'
' "functools.total_ordering()".\n'
'\n'
' See the paragraph on "__hash__()" for some important '
@@ -3580,22 +3580,22 @@ topics = {'assert': 'The "assert" statement\n'
' def __hash__(self):\n'
' return hash((self.name, self.nick, self.color))\n'
'\n'
- ' Note:\n'
- '\n'
- ' "hash()" truncates the value returned from an object’s '
- 'custom\n'
- ' "__hash__()" method to the size of a "Py_ssize_t". '
- 'This is\n'
- ' typically 8 bytes on 64-bit builds and 4 bytes on '
- '32-bit builds.\n'
- ' If an object’s "__hash__()" must interoperate on '
- 'builds of\n'
- ' different bit sizes, be sure to check the width on all '
- 'supported\n'
- ' builds. An easy way to do this is with "python -c '
- '"import sys;\n'
- ' print(sys.hash_info.width)"".\n'
- '\n'
+ ' Note:\n'
+ '\n'
+ ' "hash()" truncates the value returned from an object’s '
+ 'custom\n'
+ ' "__hash__()" method to the size of a "Py_ssize_t". '
+ 'This is\n'
+ ' typically 8 bytes on 64-bit builds and 4 bytes on '
+ '32-bit builds.\n'
+ ' If an object’s "__hash__()" must interoperate on '
+ 'builds of\n'
+ ' different bit sizes, be sure to check the width on all '
+ 'supported\n'
+ ' builds. An easy way to do this is with "python -c '
+ '"import sys;\n'
+ ' print(sys.hash_info.width)"".\n'
+ '\n'
' If a class does not define an "__eq__()" method it '
'should not\n'
' define a "__hash__()" operation either; if it defines '
@@ -3652,24 +3652,24 @@ topics = {'assert': 'The "assert" statement\n'
' hashable by an "isinstance(obj, '
'collections.abc.Hashable)" call.\n'
'\n'
- ' Note:\n'
- '\n'
- ' By default, the "__hash__()" values of str and bytes '
- 'objects are\n'
- ' “salted” with an unpredictable random value. Although '
- 'they\n'
- ' remain constant within an individual Python process, '
- 'they are not\n'
- ' predictable between repeated invocations of '
- 'Python.This is\n'
- ' intended to provide protection against a '
- 'denial-of-service caused\n'
- ' by carefully-chosen inputs that exploit the worst '
- 'case\n'
- ' performance of a dict insertion, O(n^2) complexity. '
- 'See\n'
- ' http://www.ocert.org/advisories/ocert-2011-003.html '
- 'for\n'
+ ' Note:\n'
+ '\n'
+ ' By default, the "__hash__()" values of str and bytes '
+ 'objects are\n'
+ ' “salted” with an unpredictable random value. Although '
+ 'they\n'
+ ' remain constant within an individual Python process, '
+ 'they are not\n'
+ ' predictable between repeated invocations of '
+ 'Python.This is\n'
+ ' intended to provide protection against a '
+ 'denial-of-service caused\n'
+ ' by carefully-chosen inputs that exploit the worst '
+ 'case\n'
+ ' performance of a dict insertion, O(n^2) complexity. '
+ 'See\n'
+ ' http://www.ocert.org/advisories/ocert-2011-003.html '
+ 'for\n'
' details.Changing hash values affects the iteration '
'order of sets.\n'
' Python has never made guarantees about this ordering '
@@ -3770,16 +3770,16 @@ topics = {'assert': 'The "assert" statement\n'
'debugger will pause execution just before the first line of the\n'
'module.\n'
'\n'
- 'The typical usage to break into the debugger is to insert:\n'
+ 'The typical usage to break into the debugger is to insert:\n'
'\n'
' import pdb; pdb.set_trace()\n'
'\n'
- 'at the location you want to break into the debugger, and then '
- 'run the\n'
- 'program. You can then step through the code following this '
- 'statement,\n'
- 'and continue running without the debugger using the "continue"\n'
- 'command.\n'
+ 'at the location you want to break into the debugger, and then '
+ 'run the\n'
+ 'program. You can then step through the code following this '
+ 'statement,\n'
+ 'and continue running without the debugger using the "continue"\n'
+ 'command.\n'
'\n'
'New in version 3.7: The built-in "breakpoint()", when called '
'with\n'
@@ -3911,8 +3911,8 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
" import pdb; pdb.Pdb(skip=['django.*']).set_trace()\n"
'\n'
- ' Raises an auditing event "pdb.Pdb" with no arguments.\n'
- '\n'
+ ' Raises an auditing event "pdb.Pdb" with no arguments.\n'
+ '\n'
' New in version 3.1: The *skip* argument.\n'
'\n'
' New in version 3.2: The *nosigint* argument. Previously, a '
@@ -4058,7 +4058,7 @@ topics = {'assert': 'The "assert" statement\n'
'is\n'
' first hit. The arguments are the same as for "break".\n'
'\n'
- 'cl(ear) [filename:lineno | bpnumber ...]\n'
+ 'cl(ear) [filename:lineno | bpnumber ...]\n'
'\n'
' With a *filename:lineno* argument, clear all the breakpoints '
'at\n'
@@ -4068,7 +4068,7 @@ topics = {'assert': 'The "assert" statement\n'
'first\n'
' ask confirmation).\n'
'\n'
- 'disable [bpnumber ...]\n'
+ 'disable [bpnumber ...]\n'
'\n'
' Disable the breakpoints given as a space separated list of\n'
' breakpoint numbers. Disabling a breakpoint means it cannot '
@@ -4077,7 +4077,7 @@ topics = {'assert': 'The "assert" statement\n'
'breakpoint, it\n'
' remains in the list of breakpoints and can be (re-)enabled.\n'
'\n'
- 'enable [bpnumber ...]\n'
+ 'enable [bpnumber ...]\n'
'\n'
' Enable the breakpoints specified.\n'
'\n'
@@ -4248,12 +4248,12 @@ topics = {'assert': 'The "assert" statement\n'
'its\n'
' value.\n'
'\n'
- ' Note:\n'
- '\n'
- ' "print()" can also be used, but is not a debugger command — '
- 'this\n'
- ' executes the Python "print()" function.\n'
+ ' Note:\n'
'\n'
+ ' "print()" can also be used, but is not a debugger command — '
+ 'this\n'
+ ' executes the Python "print()" function.\n'
+ '\n'
'pp expression\n'
'\n'
' Like the "p" command, except the value of the expression is '
@@ -4369,23 +4369,23 @@ topics = {'assert': 'The "assert" statement\n'
' Quit from the debugger. The program being executed is '
'aborted.\n'
'\n'
- 'debug code\n'
- '\n'
- ' Enter a recursive debugger that steps through the code '
- 'argument\n'
- ' (which is an arbitrary expression or statement to be executed '
- 'in\n'
- ' the current environment).\n'
- '\n'
- 'retval\n'
- '\n'
- ' Print the return value for the last return of a function.\n'
- '\n'
+ 'debug code\n'
+ '\n'
+ ' Enter a recursive debugger that steps through the code '
+ 'argument\n'
+ ' (which is an arbitrary expression or statement to be executed '
+ 'in\n'
+ ' the current environment).\n'
+ '\n'
+ 'retval\n'
+ '\n'
+ ' Print the return value for the last return of a function.\n'
+ '\n'
'-[ Footnotes ]-\n'
'\n'
'[1] Whether a frame is considered to originate in a certain '
- 'module is\n'
- ' determined by the "__name__" in the frame globals.\n',
+ 'module is\n'
+ ' determined by the "__name__" in the frame globals.\n',
'del': 'The "del" statement\n'
'*******************\n'
'\n'
@@ -4458,14 +4458,14 @@ topics = {'assert': 'The "assert" statement\n'
'section The standard type hierarchy. (To summarize, the key type\n'
'should be *hashable*, which excludes all mutable objects.) Clashes\n'
'between duplicate keys are not detected; the last datum (textually\n'
- 'rightmost in the display) stored for a given key value prevails.\n'
- '\n'
- 'Changed in version 3.8: Prior to Python 3.8, in dict '
- 'comprehensions,\n'
- 'the evaluation order of key and value was not well-defined. In\n'
- 'CPython, the value was evaluated before the key. Starting with '
- '3.8,\n'
- 'the key is evaluated before the value, as proposed by **PEP 572**.\n',
+ 'rightmost in the display) stored for a given key value prevails.\n'
+ '\n'
+ 'Changed in version 3.8: Prior to Python 3.8, in dict '
+ 'comprehensions,\n'
+ 'the evaluation order of key and value was not well-defined. In\n'
+ 'CPython, the value was evaluated before the key. Starting with '
+ '3.8,\n'
+ 'the key is evaluated before the value, as proposed by **PEP 572**.\n',
'dynamic-features': 'Interaction with dynamic features\n'
'*********************************\n'
'\n'
@@ -4497,8 +4497,8 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
'The "if" statement is used for conditional execution:\n'
'\n'
- ' if_stmt ::= "if" assignment_expression ":" suite\n'
- ' ("elif" assignment_expression ":" suite)*\n'
+ ' if_stmt ::= "if" assignment_expression ":" suite\n'
+ ' ("elif" assignment_expression ":" suite)*\n'
' ["else" ":" suite]\n'
'\n'
'It selects exactly one of the suites by evaluating the expressions '
@@ -4551,7 +4551,7 @@ topics = {'assert': 'The "assert" statement\n'
'terminates\n'
'execution of the program, or returns to its interactive main '
'loop. In\n'
- 'either case, it prints a stack traceback, except when the '
+ 'either case, it prints a stack traceback, except when the '
'exception is\n'
'"SystemExit".\n'
'\n'
@@ -4565,16 +4565,16 @@ topics = {'assert': 'The "assert" statement\n'
'about the\n'
'exceptional condition.\n'
'\n'
- 'Note:\n'
- '\n'
- ' Exception messages are not part of the Python API. Their '
- 'contents\n'
- ' may change from one version of Python to the next without '
- 'warning\n'
- ' and should not be relied on by code which will run under '
- 'multiple\n'
- ' versions of the interpreter.\n'
+ 'Note:\n'
'\n'
+ ' Exception messages are not part of the Python API. Their '
+ 'contents\n'
+ ' may change from one version of Python to the next without '
+ 'warning\n'
+ ' and should not be relied on by code which will run under '
+ 'multiple\n'
+ ' versions of the interpreter.\n'
+ '\n'
'See also the description of the "try" statement in section The '
'try\n'
'statement and "raise" statement in section The raise '
@@ -4583,9 +4583,9 @@ topics = {'assert': 'The "assert" statement\n'
'-[ Footnotes ]-\n'
'\n'
'[1] This limitation occurs because the code that is executed '
- 'by these\n'
- ' operations is not available at the time the module is '
- 'compiled.\n',
+ 'by these\n'
+ ' operations is not available at the time the module is '
+ 'compiled.\n',
'execmodel': 'Execution model\n'
'***************\n'
'\n'
@@ -4607,13 +4607,13 @@ topics = {'assert': 'The "assert" statement\n'
'(a\n'
'command specified on the interpreter command line with the '
'"-c"\n'
- 'option) is a code block. A module run as a top level script (as '
- 'module\n'
- '"__main__") from the command line using a "-m" argument is also '
- 'a code\n'
- 'block. The string argument passed to the built-in functions '
- '"eval()"\n'
- 'and "exec()" is a code block.\n'
+ 'option) is a code block. A module run as a top level script (as '
+ 'module\n'
+ '"__main__") from the command line using a "-m" argument is also '
+ 'a code\n'
+ 'block. The string argument passed to the built-in functions '
+ '"eval()"\n'
+ 'and "exec()" is a code block.\n'
'\n'
'A code block is executed in an *execution frame*. A frame '
'contains\n'
@@ -4721,9 +4721,9 @@ topics = {'assert': 'The "assert" statement\n'
'operations.\n'
'\n'
'If the "global" statement occurs within a block, all uses of '
- 'the names\n'
- 'specified in the statement refer to the bindings of those names '
- 'in the\n'
+ 'the names\n'
+ 'specified in the statement refer to the bindings of those names '
+ 'in the\n'
'top-level namespace. Names are resolved in the top-level '
'namespace by\n'
'searching the global namespace, i.e. the namespace of the '
@@ -4732,10 +4732,10 @@ topics = {'assert': 'The "assert" statement\n'
'namespace\n'
'of the module "builtins". The global namespace is searched '
'first. If\n'
- 'the names are not found there, the builtins namespace is '
- 'searched.\n'
- 'The "global" statement must precede all uses of the listed '
- 'names.\n'
+ 'the names are not found there, the builtins namespace is '
+ 'searched.\n'
+ 'The "global" statement must precede all uses of the listed '
+ 'names.\n'
'\n'
'The "global" statement has the same scope as a name binding '
'operation\n'
@@ -4878,7 +4878,7 @@ topics = {'assert': 'The "assert" statement\n'
'terminates\n'
'execution of the program, or returns to its interactive main '
'loop. In\n'
- 'either case, it prints a stack traceback, except when the '
+ 'either case, it prints a stack traceback, except when the '
'exception is\n'
'"SystemExit".\n'
'\n'
@@ -4892,16 +4892,16 @@ topics = {'assert': 'The "assert" statement\n'
'about the\n'
'exceptional condition.\n'
'\n'
- 'Note:\n'
- '\n'
- ' Exception messages are not part of the Python API. Their '
- 'contents\n'
- ' may change from one version of Python to the next without '
- 'warning\n'
- ' and should not be relied on by code which will run under '
- 'multiple\n'
- ' versions of the interpreter.\n'
+ 'Note:\n'
'\n'
+ ' Exception messages are not part of the Python API. Their '
+ 'contents\n'
+ ' may change from one version of Python to the next without '
+ 'warning\n'
+ ' and should not be relied on by code which will run under '
+ 'multiple\n'
+ ' versions of the interpreter.\n'
+ '\n'
'See also the description of the "try" statement in section The '
'try\n'
'statement and "raise" statement in section The raise '
@@ -4909,10 +4909,10 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
'-[ Footnotes ]-\n'
'\n'
- '[1] This limitation occurs because the code that is executed by '
- 'these\n'
- ' operations is not available at the time the module is '
- 'compiled.\n',
+ '[1] This limitation occurs because the code that is executed by '
+ 'these\n'
+ ' operations is not available at the time the module is '
+ 'compiled.\n',
'exprlists': 'Expression lists\n'
'****************\n'
'\n'
@@ -4921,7 +4921,7 @@ topics = {'assert': 'The "assert" statement\n'
'[","]\n'
' starred_expression ::= expression | (starred_item ",")* '
'[starred_item]\n'
- ' starred_item ::= assignment_expression | "*" or_expr\n'
+ ' starred_item ::= assignment_expression | "*" or_expr\n'
'\n'
'Except when part of a list or set display, an expression list\n'
'containing at least one comma yields a tuple. The length of '
@@ -5012,7 +5012,7 @@ topics = {'assert': 'The "assert" statement\n'
'with the next item, or with the "else" clause if there is no next\n'
'item.\n'
'\n'
- 'The for-loop makes assignments to the variables in the target list.\n'
+ 'The for-loop makes assignments to the variables in the target list.\n'
'This overwrites all previous assignments to those variables '
'including\n'
'those made in the suite of the for-loop:\n'
@@ -5031,11 +5031,11 @@ topics = {'assert': 'The "assert" statement\n'
'i\n'
':= a to b do"; e.g., "list(range(3))" returns the list "[0, 1, 2]".\n'
'\n'
- 'Note:\n'
- '\n'
- ' There is a subtlety when the sequence is being modified by the '
- 'loop\n'
- ' (this can only occur for mutable sequences, e.g. lists). An\n'
+ 'Note:\n'
+ '\n'
+ ' There is a subtlety when the sequence is being modified by the '
+ 'loop\n'
+ ' (this can only occur for mutable sequences, e.g. lists). An\n'
' internal counter is used to keep track of which item is used next,\n'
' and this is incremented on each iteration. When this counter has\n'
' reached the length of the sequence the loop terminates. This '
@@ -5062,11 +5062,11 @@ topics = {'assert': 'The "assert" statement\n'
'"Formatter",\n'
'subclasses can define their own format string syntax). The '
'syntax is\n'
- 'related to that of formatted string literals, but it is '
- 'less\n'
- 'sophisticated and, in particular, does not support '
- 'arbitrary\n'
- 'expressions.\n'
+ 'related to that of formatted string literals, but it is '
+ 'less\n'
+ 'sophisticated and, in particular, does not support '
+ 'arbitrary\n'
+ 'expressions.\n'
'\n'
'Format strings contain “replacement fields” surrounded by '
'curly braces\n'
@@ -5236,11 +5236,11 @@ topics = {'assert': 'The "assert" statement\n'
'only\n'
'supported by the numeric types.\n'
'\n'
- 'A general convention is that an empty format specification '
+ 'A general convention is that an empty format specification '
'produces\n'
'the same result as if you had called "str()" on the value. '
'A non-empty\n'
- 'format specification typically modifies the result.\n'
+ 'format specification typically modifies the result.\n'
'\n'
'The general form of a *standard format specifier* is:\n'
'\n'
@@ -5260,7 +5260,7 @@ topics = {'assert': 'The "assert" statement\n'
'character that can be any character and defaults to a space '
'if\n'
'omitted. It is not possible to use a literal curly brace '
- '(”"{"” or\n'
+ '(”"{"” or\n'
'“"}"”) as the *fill* character in a formatted string '
'literal or when\n'
'using the "str.format()" method. However, it is possible '
@@ -5278,7 +5278,7 @@ topics = {'assert': 'The "assert" statement\n'
'Meaning '
'|\n'
' '
- '|===========|============================================================|\n'
+ '|===========|============================================================|\n'
' | "\'<\'" | Forces the field to be left-aligned '
'within the available |\n'
' | | space (this is the default for most '
@@ -5327,7 +5327,7 @@ topics = {'assert': 'The "assert" statement\n'
'Meaning '
'|\n'
' '
- '|===========|============================================================|\n'
+ '|===========|============================================================|\n'
' | "\'+\'" | indicates that a sign should be used for '
'both positive as |\n'
' | | well as negative '
@@ -5351,23 +5351,23 @@ topics = {'assert': 'The "assert" statement\n'
'for the\n'
'conversion. The alternate form is defined differently for '
'different\n'
- 'types. This option is only valid for integer, float and '
- 'complex\n'
- 'types. For integers, when binary, octal, or hexadecimal '
- 'output is\n'
- 'used, this option adds the respective prefix "\'0b\'", '
- '"\'0o\'", "\'0x\'",\n'
- 'or "\'0X\'" to the output value. For float and complex the '
- 'alternate\n'
- 'form causes the result of the conversion to always contain '
- 'a decimal-\n'
- 'point character, even if no digits follow it. Normally, a '
- 'decimal-\n'
- 'point character appears in the result of these conversions '
- 'only if a\n'
- 'digit follows it. In addition, for "\'g\'" and "\'G\'" '
- 'conversions,\n'
- 'trailing zeros are not removed from the result.\n'
+ 'types. This option is only valid for integer, float and '
+ 'complex\n'
+ 'types. For integers, when binary, octal, or hexadecimal '
+ 'output is\n'
+ 'used, this option adds the respective prefix "\'0b\'", '
+ '"\'0o\'", "\'0x\'",\n'
+ 'or "\'0X\'" to the output value. For float and complex the '
+ 'alternate\n'
+ 'form causes the result of the conversion to always contain '
+ 'a decimal-\n'
+ 'point character, even if no digits follow it. Normally, a '
+ 'decimal-\n'
+ 'point character appears in the result of these conversions '
+ 'only if a\n'
+ 'digit follows it. In addition, for "\'g\'" and "\'G\'" '
+ 'conversions,\n'
+ 'trailing zeros are not removed from the result.\n'
'\n'
'The "\',\'" option signals the use of a comma for a '
'thousands separator.\n'
@@ -5392,12 +5392,12 @@ topics = {'assert': 'The "assert" statement\n'
'Changed in version 3.6: Added the "\'_\'" option (see also '
'**PEP 515**).\n'
'\n'
- '*width* is a decimal integer defining the minimum total '
- 'field width,\n'
- 'including any prefixes, separators, and other formatting '
- 'characters.\n'
- 'If not specified, then the field width will be determined '
- 'by the\n'
+ '*width* is a decimal integer defining the minimum total '
+ 'field width,\n'
+ 'including any prefixes, separators, and other formatting '
+ 'characters.\n'
+ 'If not specified, then the field width will be determined '
+ 'by the\n'
'content.\n'
'\n'
'When no explicit alignment is given, preceding the *width* '
@@ -5433,7 +5433,7 @@ topics = {'assert': 'The "assert" statement\n'
'Meaning '
'|\n'
' '
- '|===========|============================================================|\n'
+ '|===========|============================================================|\n'
' | "\'s\'" | String format. This is the default type '
'for strings and |\n'
' | | may be '
@@ -5453,7 +5453,7 @@ topics = {'assert': 'The "assert" statement\n'
'Meaning '
'|\n'
' '
- '|===========|============================================================|\n'
+ '|===========|============================================================|\n'
' | "\'b\'" | Binary format. Outputs the number in '
'base 2. |\n'
' '
@@ -5480,12 +5480,12 @@ topics = {'assert': 'The "assert" statement\n'
'+-----------+------------------------------------------------------------+\n'
' | "\'X\'" | Hex format. Outputs the number in base '
'16, using upper- |\n'
- ' | | case letters for the digits above 9. In '
- 'case "\'#\'" is |\n'
- ' | | specified, the prefix "\'0x\'" will be '
- 'upper-cased to "\'0X\'" |\n'
- ' | | as '
- 'well. |\n'
+ ' | | case letters for the digits above 9. In '
+ 'case "\'#\'" is |\n'
+ ' | | specified, the prefix "\'0x\'" will be '
+ 'upper-cased to "\'0X\'" |\n'
+ ' | | as '
+ 'well. |\n'
' '
'+-----------+------------------------------------------------------------+\n'
' | "\'n\'" | Number. This is the same as "\'d\'", '
@@ -5509,8 +5509,8 @@ topics = {'assert': 'The "assert" statement\n'
'the integer\n'
'to a floating point number before formatting.\n'
'\n'
- 'The available presentation types for "float" and "Decimal" '
- 'values are:\n'
+ 'The available presentation types for "float" and "Decimal" '
+ 'values are:\n'
'\n'
' '
'+-----------+------------------------------------------------------------+\n'
@@ -5518,51 +5518,51 @@ topics = {'assert': 'The "assert" statement\n'
'Meaning '
'|\n'
' '
- '|===========|============================================================|\n'
- ' | "\'e\'" | Scientific notation. For a given '
- 'precision "p", formats |\n'
- ' | | the number in scientific notation with the '
- 'letter ‘e’ |\n'
- ' | | separating the coefficient from the '
- 'exponent. The |\n'
- ' | | coefficient has one digit before and "p" '
- 'digits after the |\n'
- ' | | decimal point, for a total of "p + 1" '
- 'significant digits. |\n'
- ' | | With no precision given, uses a precision '
- 'of "6" digits |\n'
- ' | | after the decimal point for "float", and '
- 'shows all |\n'
- ' | | coefficient digits for "Decimal". If no '
- 'digits follow the |\n'
- ' | | decimal point, the decimal point is also '
- 'removed unless |\n'
- ' | | the "#" option is '
- 'used. |\n'
+ '|===========|============================================================|\n'
+ ' | "\'e\'" | Scientific notation. For a given '
+ 'precision "p", formats |\n'
+ ' | | the number in scientific notation with the '
+ 'letter ‘e’ |\n'
+ ' | | separating the coefficient from the '
+ 'exponent. The |\n'
+ ' | | coefficient has one digit before and "p" '
+ 'digits after the |\n'
+ ' | | decimal point, for a total of "p + 1" '
+ 'significant digits. |\n'
+ ' | | With no precision given, uses a precision '
+ 'of "6" digits |\n'
+ ' | | after the decimal point for "float", and '
+ 'shows all |\n'
+ ' | | coefficient digits for "Decimal". If no '
+ 'digits follow the |\n'
+ ' | | decimal point, the decimal point is also '
+ 'removed unless |\n'
+ ' | | the "#" option is '
+ 'used. |\n'
' '
'+-----------+------------------------------------------------------------+\n'
- ' | "\'E\'" | Scientific notation. Same as "\'e\'" '
- 'except it uses an upper |\n'
+ ' | "\'E\'" | Scientific notation. Same as "\'e\'" '
+ 'except it uses an upper |\n'
' | | case ‘E’ as the separator '
'character. |\n'
' '
'+-----------+------------------------------------------------------------+\n'
- ' | "\'f\'" | Fixed-point notation. For a given '
- 'precision "p", formats |\n'
- ' | | the number as a decimal number with '
- 'exactly "p" digits |\n'
- ' | | following the decimal point. With no '
- 'precision given, uses |\n'
- ' | | a precision of "6" digits after the '
- 'decimal point for |\n'
- ' | | "float", and uses a precision large enough '
- 'to show all |\n'
- ' | | coefficient digits for "Decimal". If no '
- 'digits follow the |\n'
- ' | | decimal point, the decimal point is also '
- 'removed unless |\n'
- ' | | the "#" option is '
- 'used. |\n'
+ ' | "\'f\'" | Fixed-point notation. For a given '
+ 'precision "p", formats |\n'
+ ' | | the number as a decimal number with '
+ 'exactly "p" digits |\n'
+ ' | | following the decimal point. With no '
+ 'precision given, uses |\n'
+ ' | | a precision of "6" digits after the '
+ 'decimal point for |\n'
+ ' | | "float", and uses a precision large enough '
+ 'to show all |\n'
+ ' | | coefficient digits for "Decimal". If no '
+ 'digits follow the |\n'
+ ' | | decimal point, the decimal point is also '
+ 'removed unless |\n'
+ ' | | the "#" option is '
+ 'used. |\n'
' '
'+-----------+------------------------------------------------------------+\n'
' | "\'F\'" | Fixed-point notation. Same as "\'f\'", '
@@ -5578,51 +5578,51 @@ topics = {'assert': 'The "assert" statement\n'
' | | formats the result in either fixed-point '
'format or in |\n'
' | | scientific notation, depending on its '
- 'magnitude. A |\n'
- ' | | precision of "0" is treated as equivalent '
- 'to a precision |\n'
- ' | | of "1". The precise rules are as follows: '
- 'suppose that |\n'
- ' | | the result formatted with presentation '
- 'type "\'e\'" and |\n'
- ' | | precision "p-1" would have exponent '
- '"exp". Then, if "m <= |\n'
- ' | | exp < p", where "m" is -4 for floats and '
- '-6 for |\n'
- ' | | "Decimals", the number is formatted with '
- 'presentation type |\n'
- ' | | "\'f\'" and precision "p-1-exp". '
- 'Otherwise, the number is |\n'
+ 'magnitude. A |\n'
+ ' | | precision of "0" is treated as equivalent '
+ 'to a precision |\n'
+ ' | | of "1". The precise rules are as follows: '
+ 'suppose that |\n'
+ ' | | the result formatted with presentation '
+ 'type "\'e\'" and |\n'
+ ' | | precision "p-1" would have exponent '
+ '"exp". Then, if "m <= |\n'
+ ' | | exp < p", where "m" is -4 for floats and '
+ '-6 for |\n'
+ ' | | "Decimals", the number is formatted with '
+ 'presentation type |\n'
+ ' | | "\'f\'" and precision "p-1-exp". '
+ 'Otherwise, the number is |\n'
' | | formatted with presentation type "\'e\'" '
- 'and precision |\n'
- ' | | "p-1". In both cases insignificant '
- 'trailing zeros are |\n'
- ' | | removed from the significand, and the '
- 'decimal point is |\n'
- ' | | also removed if there are no remaining '
- 'digits following |\n'
- ' | | it, unless the "\'#\'" option is used. '
- 'With no precision |\n'
- ' | | given, uses a precision of "6" significant '
- 'digits for |\n'
- ' | | "float". For "Decimal", the coefficient of '
- 'the result is |\n'
- ' | | formed from the coefficient digits of the '
- 'value; |\n'
- ' | | scientific notation is used for values '
- 'smaller than "1e-6" |\n'
- ' | | in absolute value and values where the '
- 'place value of the |\n'
- ' | | least significant digit is larger than 1, '
- 'and fixed-point |\n'
- ' | | notation is used otherwise. Positive and '
- 'negative |\n'
- ' | | infinity, positive and negative zero, and '
- 'nans, are |\n'
- ' | | formatted as "inf", "-inf", "0", "-0" and '
- '"nan" |\n'
- ' | | respectively, regardless of the '
- 'precision. |\n'
+ 'and precision |\n'
+ ' | | "p-1". In both cases insignificant '
+ 'trailing zeros are |\n'
+ ' | | removed from the significand, and the '
+ 'decimal point is |\n'
+ ' | | also removed if there are no remaining '
+ 'digits following |\n'
+ ' | | it, unless the "\'#\'" option is used. '
+ 'With no precision |\n'
+ ' | | given, uses a precision of "6" significant '
+ 'digits for |\n'
+ ' | | "float". For "Decimal", the coefficient of '
+ 'the result is |\n'
+ ' | | formed from the coefficient digits of the '
+ 'value; |\n'
+ ' | | scientific notation is used for values '
+ 'smaller than "1e-6" |\n'
+ ' | | in absolute value and values where the '
+ 'place value of the |\n'
+ ' | | least significant digit is larger than 1, '
+ 'and fixed-point |\n'
+ ' | | notation is used otherwise. Positive and '
+ 'negative |\n'
+ ' | | infinity, positive and negative zero, and '
+ 'nans, are |\n'
+ ' | | formatted as "inf", "-inf", "0", "-0" and '
+ '"nan" |\n'
+ ' | | respectively, regardless of the '
+ 'precision. |\n'
' '
'+-----------+------------------------------------------------------------+\n'
' | "\'G\'" | General format. Same as "\'g\'" except '
@@ -5647,24 +5647,24 @@ topics = {'assert': 'The "assert" statement\n'
'percent sign. |\n'
' '
'+-----------+------------------------------------------------------------+\n'
- ' | None | For "float" this is the same as "\'g\'", '
- 'except that when |\n'
- ' | | fixed-point notation is used to format the '
- 'result, it |\n'
- ' | | always includes at least one digit past '
- 'the decimal point. |\n'
- ' | | The precision used is as large as needed '
- 'to represent the |\n'
- ' | | given value faithfully. For "Decimal", '
- 'this is the same |\n'
- ' | | as either "\'g\'" or "\'G\'" depending on '
- 'the value of |\n'
- ' | | "context.capitals" for the current decimal '
- 'context. The |\n'
- ' | | overall effect is to match the output of '
- '"str()" as |\n'
- ' | | altered by the other format '
- 'modifiers. |\n'
+ ' | None | For "float" this is the same as "\'g\'", '
+ 'except that when |\n'
+ ' | | fixed-point notation is used to format the '
+ 'result, it |\n'
+ ' | | always includes at least one digit past '
+ 'the decimal point. |\n'
+ ' | | The precision used is as large as needed '
+ 'to represent the |\n'
+ ' | | given value faithfully. For "Decimal", '
+ 'this is the same |\n'
+ ' | | as either "\'g\'" or "\'G\'" depending on '
+ 'the value of |\n'
+ ' | | "context.capitals" for the current decimal '
+ 'context. The |\n'
+ ' | | overall effect is to match the output of '
+ '"str()" as |\n'
+ ' | | altered by the other format '
+ 'modifiers. |\n'
' '
'+-----------+------------------------------------------------------------+\n'
'\n'
@@ -5836,24 +5836,24 @@ topics = {'assert': 'The "assert" statement\n'
'(see\n'
'section The standard type hierarchy):\n'
'\n'
- ' funcdef ::= [decorators] "def" funcname "(" '
+ ' funcdef ::= [decorators] "def" funcname "(" '
'[parameter_list] ")"\n'
' ["->" expression] ":" suite\n'
- ' decorators ::= decorator+\n'
- ' decorator ::= "@" assignment_expression '
- 'NEWLINE\n'
- ' parameter_list ::= defparameter ("," '
- 'defparameter)* "," "/" ["," [parameter_list_no_posonly]]\n'
- ' | parameter_list_no_posonly\n'
- ' parameter_list_no_posonly ::= defparameter ("," '
- 'defparameter)* ["," [parameter_list_starargs]]\n'
- ' | parameter_list_starargs\n'
- ' parameter_list_starargs ::= "*" [parameter] ("," '
+ ' decorators ::= decorator+\n'
+ ' decorator ::= "@" assignment_expression '
+ 'NEWLINE\n'
+ ' parameter_list ::= defparameter ("," '
+ 'defparameter)* "," "/" ["," [parameter_list_no_posonly]]\n'
+ ' | parameter_list_no_posonly\n'
+ ' parameter_list_no_posonly ::= defparameter ("," '
+ 'defparameter)* ["," [parameter_list_starargs]]\n'
+ ' | parameter_list_starargs\n'
+ ' parameter_list_starargs ::= "*" [parameter] ("," '
'defparameter)* ["," ["**" parameter [","]]]\n'
' | "**" parameter [","]\n'
- ' parameter ::= identifier [":" expression]\n'
- ' defparameter ::= parameter ["=" expression]\n'
- ' funcname ::= identifier\n'
+ ' parameter ::= identifier [":" expression]\n'
+ ' defparameter ::= parameter ["=" expression]\n'
+ ' funcname ::= identifier\n'
'\n'
'A function definition is an executable statement. Its execution '
'binds\n'
@@ -5894,11 +5894,11 @@ topics = {'assert': 'The "assert" statement\n'
'the name\n'
'"func".\n'
'\n'
- 'Changed in version 3.9: Functions may be decorated with any '
- 'valid\n'
- '"assignment_expression". Previously, the grammar was much more\n'
- 'restrictive; see **PEP 614** for details.\n'
- '\n'
+ 'Changed in version 3.9: Functions may be decorated with any '
+ 'valid\n'
+ '"assignment_expression". Previously, the grammar was much more\n'
+ 'restrictive; see **PEP 614** for details.\n'
+ '\n'
'When one or more *parameters* have the form *parameter* "="\n'
'*expression*, the function is said to have “default parameter '
'values.”\n'
@@ -5941,7 +5941,7 @@ topics = {'assert': 'The "assert" statement\n'
'Calls.\n'
'A function call always assigns values to all parameters '
'mentioned in\n'
- 'the parameter list, either from positional arguments, from '
+ 'the parameter list, either from positional arguments, from '
'keyword\n'
'arguments, or from default values. If the form “"*identifier"” '
'is\n'
@@ -5953,15 +5953,15 @@ topics = {'assert': 'The "assert" statement\n'
'new\n'
'empty mapping of the same type. Parameters after “"*"” or\n'
'“"*identifier"” are keyword-only parameters and may only be '
- 'passed by\n'
- 'keyword arguments. Parameters before “"/"” are positional-only\n'
- 'parameters and may only be passed by positional arguments.\n'
- '\n'
- 'Changed in version 3.8: The "/" function parameter syntax may be '
- 'used\n'
- 'to indicate positional-only parameters. See **PEP 570** for '
- 'details.\n'
- '\n'
+ 'passed by\n'
+ 'keyword arguments. Parameters before “"/"” are positional-only\n'
+ 'parameters and may only be passed by positional arguments.\n'
+ '\n'
+ 'Changed in version 3.8: The "/" function parameter syntax may be '
+ 'used\n'
+ 'to indicate positional-only parameters. See **PEP 570** for '
+ 'details.\n'
+ '\n'
'Parameters may have an *annotation* of the form “": '
'expression"”\n'
'following the parameter name. Any parameter may have an '
@@ -6095,26 +6095,26 @@ topics = {'assert': 'The "assert" statement\n'
'defined.\n'
' See section The import statement.\n'
'\n'
- ' Note:\n'
- '\n'
- ' The name "_" is often used in conjunction with\n'
+ ' Note:\n'
+ '\n'
+ ' The name "_" is often used in conjunction with\n'
' internationalization; refer to the documentation for the\n'
' "gettext" module for more information on this '
'convention.\n'
'\n'
'"__*__"\n'
- ' System-defined names, informally known as “dunder” names. '
- 'These\n'
- ' names are defined by the interpreter and its '
- 'implementation\n'
- ' (including the standard library). Current system names are\n'
- ' discussed in the Special method names section and '
- 'elsewhere. More\n'
- ' will likely be defined in future versions of Python. *Any* '
- 'use of\n'
- ' "__*__" names, in any context, that does not follow '
- 'explicitly\n'
- ' documented use, is subject to breakage without warning.\n'
+ ' System-defined names, informally known as “dunder” names. '
+ 'These\n'
+ ' names are defined by the interpreter and its '
+ 'implementation\n'
+ ' (including the standard library). Current system names are\n'
+ ' discussed in the Special method names section and '
+ 'elsewhere. More\n'
+ ' will likely be defined in future versions of Python. *Any* '
+ 'use of\n'
+ ' "__*__" names, in any context, that does not follow '
+ 'explicitly\n'
+ ' documented use, is subject to breakage without warning.\n'
'\n'
'"__*"\n'
' Class-private names. Names in this category, when used '
@@ -6201,8 +6201,8 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
'A non-normative HTML file listing all valid identifier '
'characters for\n'
- 'Unicode 4.1 can be found at\n'
- 'https://www.unicode.org/Public/13.0.0/ucd/DerivedCoreProperties.txt\n'
+ 'Unicode 4.1 can be found at\n'
+ 'https://www.unicode.org/Public/13.0.0/ucd/DerivedCoreProperties.txt\n'
'\n'
'\n'
'Keywords\n'
@@ -6243,28 +6243,28 @@ topics = {'assert': 'The "assert" statement\n'
'defined.\n'
' See section The import statement.\n'
'\n'
- ' Note:\n'
- '\n'
- ' The name "_" is often used in conjunction with\n'
+ ' Note:\n'
+ '\n'
+ ' The name "_" is often used in conjunction with\n'
' internationalization; refer to the documentation for '
'the\n'
' "gettext" module for more information on this '
'convention.\n'
'\n'
'"__*__"\n'
- ' System-defined names, informally known as “dunder” names. '
- 'These\n'
- ' names are defined by the interpreter and its '
- 'implementation\n'
- ' (including the standard library). Current system names '
- 'are\n'
- ' discussed in the Special method names section and '
- 'elsewhere. More\n'
- ' will likely be defined in future versions of Python. '
- '*Any* use of\n'
- ' "__*__" names, in any context, that does not follow '
- 'explicitly\n'
- ' documented use, is subject to breakage without warning.\n'
+ ' System-defined names, informally known as “dunder” names. '
+ 'These\n'
+ ' names are defined by the interpreter and its '
+ 'implementation\n'
+ ' (including the standard library). Current system names '
+ 'are\n'
+ ' discussed in the Special method names section and '
+ 'elsewhere. More\n'
+ ' will likely be defined in future versions of Python. '
+ '*Any* use of\n'
+ ' "__*__" names, in any context, that does not follow '
+ 'explicitly\n'
+ ' documented use, is subject to breakage without warning.\n'
'\n'
'"__*"\n'
' Class-private names. Names in this category, when used '
@@ -6279,8 +6279,8 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
'The "if" statement is used for conditional execution:\n'
'\n'
- ' if_stmt ::= "if" assignment_expression ":" suite\n'
- ' ("elif" assignment_expression ":" suite)*\n'
+ ' if_stmt ::= "if" assignment_expression ":" suite\n'
+ ' ("elif" assignment_expression ":" suite)*\n'
' ["else" ":" suite]\n'
'\n'
'It selects exactly one of the suites by evaluating the expressions '
@@ -6321,7 +6321,7 @@ topics = {'assert': 'The "assert" statement\n'
' | "from" relative_module "import" "(" '
'identifier ["as" identifier]\n'
' ("," identifier ["as" identifier])* [","] ")"\n'
- ' | "from" relative_module "import" "*"\n'
+ ' | "from" relative_module "import" "*"\n'
' module ::= (identifier ".")* identifier\n'
' relative_module ::= "."* module | "."+\n'
'\n'
@@ -6330,9 +6330,9 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
'1. find a module, loading and initializing it if necessary\n'
'\n'
- '2. define a name or names in the local namespace for the scope '
- 'where\n'
- ' the "import" statement occurs.\n'
+ '2. define a name or names in the local namespace for the scope '
+ 'where\n'
+ ' the "import" statement occurs.\n'
'\n'
'When the statement contains multiple clauses (separated by commas) '
'the\n'
@@ -6358,9 +6358,9 @@ topics = {'assert': 'The "assert" statement\n'
'made\n'
'available in the local namespace in one of three ways:\n'
'\n'
- '* If the module name is followed by "as", then the name following '
- '"as"\n'
- ' is bound directly to the imported module.\n'
+ '* If the module name is followed by "as", then the name following '
+ '"as"\n'
+ ' is bound directly to the imported module.\n'
'\n'
'* If no other name is specified, and the module being imported is '
'a\n'
@@ -6460,18 +6460,18 @@ topics = {'assert': 'The "assert" statement\n'
'end up importing "pkg.mod". If you execute "from ..subpkg2 import '
'mod"\n'
'from within "pkg.subpkg1" you will import "pkg.subpkg2.mod". The\n'
- 'specification for relative imports is contained in the Package\n'
- 'Relative Imports section.\n'
+ 'specification for relative imports is contained in the Package\n'
+ 'Relative Imports section.\n'
'\n'
'"importlib.import_module()" is provided to support applications '
'that\n'
'determine dynamically the modules to be loaded.\n'
'\n'
- 'Raises an auditing event "import" with arguments "module", '
- '"filename",\n'
- '"sys.path", "sys.meta_path", "sys.path_hooks".\n'
- '\n'
+ 'Raises an auditing event "import" with arguments "module", '
+ '"filename",\n'
+ '"sys.path", "sys.meta_path", "sys.path_hooks".\n'
'\n'
+ '\n'
'Future statements\n'
'=================\n'
'\n'
@@ -6509,8 +6509,8 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
'* other future statements.\n'
'\n'
- 'The only feature that requires using the future statement is\n'
- '"annotations" (see **PEP 563**).\n'
+ 'The only feature that requires using the future statement is\n'
+ '"annotations" (see **PEP 563**).\n'
'\n'
'All historical features enabled by the future statement are still\n'
'recognized by Python 3. The list includes "absolute_import",\n'
@@ -6602,19 +6602,19 @@ topics = {'assert': 'The "assert" statement\n'
'"False" otherwise.\n'
'\n'
'For user-defined classes which do not define "__contains__()" but do\n'
- 'define "__iter__()", "x in y" is "True" if some value "z", for which\n'
- 'the expression "x is z or x == z" is true, is produced while '
- 'iterating\n'
- 'over "y". If an exception is raised during the iteration, it is as if\n'
- '"in" raised that exception.\n'
+ 'define "__iter__()", "x in y" is "True" if some value "z", for which\n'
+ 'the expression "x is z or x == z" is true, is produced while '
+ 'iterating\n'
+ 'over "y". If an exception is raised during the iteration, it is as if\n'
+ '"in" raised that exception.\n'
'\n'
'Lastly, the old-style iteration protocol is tried: if a class defines\n'
'"__getitem__()", "x in y" is "True" if and only if there is a non-\n'
- 'negative integer index *i* such that "x is y[i] or x == y[i]", and no\n'
- 'lower integer index raises the "IndexError" exception. (If any other\n'
+ 'negative integer index *i* such that "x is y[i] or x == y[i]", and no\n'
+ 'lower integer index raises the "IndexError" exception. (If any other\n'
'exception is raised, it is as if "in" raised that exception).\n'
'\n'
- 'The operator "not in" is defined to have the inverse truth value of\n'
+ 'The operator "not in" is defined to have the inverse truth value of\n'
'"in".\n',
'integers': 'Integer literals\n'
'****************\n'
@@ -6665,7 +6665,7 @@ topics = {'assert': 'The "assert" statement\n'
'lambda': 'Lambdas\n'
'*******\n'
'\n'
- ' lambda_expr ::= "lambda" [parameter_list] ":" expression\n'
+ ' lambda_expr ::= "lambda" [parameter_list] ":" expression\n'
'\n'
'Lambda expressions (sometimes called lambda forms) are used to '
'create\n'
@@ -6793,8 +6793,8 @@ topics = {'assert': 'The "assert" statement\n'
'operations.\n'
'\n'
'If the "global" statement occurs within a block, all uses of the '
- 'names\n'
- 'specified in the statement refer to the bindings of those names in '
+ 'names\n'
+ 'specified in the statement refer to the bindings of those names in '
'the\n'
'top-level namespace. Names are resolved in the top-level '
'namespace by\n'
@@ -6803,9 +6803,9 @@ topics = {'assert': 'The "assert" statement\n'
'namespace\n'
'of the module "builtins". The global namespace is searched '
'first. If\n'
- 'the names are not found there, the builtins namespace is '
- 'searched.\n'
- 'The "global" statement must precede all uses of the listed names.\n'
+ 'the names are not found there, the builtins namespace is '
+ 'searched.\n'
+ 'The "global" statement must precede all uses of the listed names.\n'
'\n'
'The "global" statement has the same scope as a name binding '
'operation\n'
@@ -6945,7 +6945,7 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
'Note that numeric literals do not include a sign; a phrase like '
'"-1"\n'
- 'is actually an expression composed of the unary operator ‘"-"’ '
+ 'is actually an expression composed of the unary operator ‘"-"’ '
'and the\n'
'literal "1".\n',
'numeric-types': 'Emulating numeric types\n'
@@ -7006,7 +7006,7 @@ topics = {'assert': 'The "assert" statement\n'
'object.__rfloordiv__(self, other)\n'
'object.__rmod__(self, other)\n'
'object.__rdivmod__(self, other)\n'
- 'object.__rpow__(self, other[, modulo])\n'
+ 'object.__rpow__(self, other[, modulo])\n'
'object.__rlshift__(self, other)\n'
'object.__rrshift__(self, other)\n'
'object.__rand__(self, other)\n'
@@ -7035,19 +7035,19 @@ topics = {'assert': 'The "assert" statement\n'
'"__rpow__()" (the\n'
' coercion rules would become too complicated).\n'
'\n'
- ' Note:\n'
- '\n'
- ' If the right operand’s type is a subclass of the left '
- 'operand’s\n'
- ' type and that subclass provides a different '
- 'implementation of the\n'
- ' reflected method for the operation, this method will '
- 'be called\n'
- ' before the left operand’s non-reflected method. This '
- 'behavior\n'
- ' allows subclasses to override their ancestors’ '
- 'operations.\n'
- '\n'
+ ' Note:\n'
+ '\n'
+ ' If the right operand’s type is a subclass of the left '
+ 'operand’s\n'
+ ' type and that subclass provides a different '
+ 'implementation of the\n'
+ ' reflected method for the operation, this method will '
+ 'be called\n'
+ ' before the left operand’s non-reflected method. This '
+ 'behavior\n'
+ ' allows subclasses to override their ancestors’ '
+ 'operations.\n'
+ '\n'
'object.__iadd__(self, other)\n'
'object.__isub__(self, other)\n'
'object.__imul__(self, other)\n'
@@ -7090,16 +7090,16 @@ topics = {'assert': 'The "assert" statement\n'
'the data\n'
' model.\n'
'\n'
- ' Note:\n'
- '\n'
- ' Due to a bug in the dispatching mechanism for "**=", a '
- 'class that\n'
- ' defines "__ipow__()" but returns "NotImplemented" '
- 'would fail to\n'
- ' fall back to "x.__pow__(y)" and "y.__rpow__(x)". This '
- 'bug is\n'
- ' fixed in Python 3.10.\n'
- '\n'
+ ' Note:\n'
+ '\n'
+ ' Due to a bug in the dispatching mechanism for "**=", a '
+ 'class that\n'
+ ' defines "__ipow__()" but returns "NotImplemented" '
+ 'would fail to\n'
+ ' fall back to "x.__pow__(y)" and "y.__rpow__(x)". This '
+ 'bug is\n'
+ ' fixed in Python 3.10.\n'
+ '\n'
'object.__neg__(self)\n'
'object.__pos__(self)\n'
'object.__abs__(self)\n'
@@ -7130,11 +7130,11 @@ topics = {'assert': 'The "assert" statement\n'
'numeric\n'
' object is an integer type. Must return an integer.\n'
'\n'
- ' If "__int__()", "__float__()" and "__complex__()" are '
- 'not defined\n'
- ' then corresponding built-in functions "int()", "float()" '
- 'and\n'
- ' "complex()" fall back to "__index__()".\n'
+ ' If "__int__()", "__float__()" and "__complex__()" are '
+ 'not defined\n'
+ ' then corresponding built-in functions "int()", "float()" '
+ 'and\n'
+ ' "complex()" fall back to "__index__()".\n'
'\n'
'object.__round__(self[, ndigits])\n'
'object.__trunc__(self)\n'
@@ -7150,9 +7150,9 @@ topics = {'assert': 'The "assert" statement\n'
' of the object truncated to an "Integral" (typically an '
'"int").\n'
'\n'
- ' The built-in function "int()" falls back to '
- '"__trunc__()" if\n'
- ' neither "__int__()" nor "__index__()" is defined.\n',
+ ' The built-in function "int()" falls back to '
+ '"__trunc__()" if\n'
+ ' neither "__int__()" nor "__index__()" is defined.\n',
'objects': 'Objects, values and types\n'
'*************************\n'
'\n'
@@ -7161,7 +7161,7 @@ topics = {'assert': 'The "assert" statement\n'
'program is represented by objects or by relations between '
'objects. (In\n'
'a sense, and in conformance to Von Neumann’s model of a “stored\n'
- 'program computer”, code is also represented by objects.)\n'
+ 'program computer”, code is also represented by objects.)\n'
'\n'
'Every object has an identity, a type and a value. An object’s\n'
'*identity* never changes once it has been created; you may think '
@@ -7286,8 +7286,8 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
'The following table summarizes the operator precedence '
'in Python, from\n'
- 'highest precedence (most binding) to lowest precedence '
- '(least\n'
+ 'highest precedence (most binding) to lowest precedence '
+ '(least\n'
'binding). Operators in the same box have the same '
'precedence. Unless\n'
'the syntax is explicitly given, operators are binary. '
@@ -7305,79 +7305,79 @@ topics = {'assert': 'The "assert" statement\n'
'+-------------------------------------------------+---------------------------------------+\n'
'| Operator | '
'Description |\n'
- '|=================================================|=======================================|\n'
- '| "(expressions...)", "[expressions...]", "{key: | '
- 'Binding or parenthesized expression, |\n'
- '| value...}", "{expressions...}" | list '
- 'display, dictionary display, set |\n'
- '| | '
- 'display |\n'
- '+-------------------------------------------------+---------------------------------------+\n'
- '| "x[index]", "x[index:index]", | '
- 'Subscription, slicing, call, |\n'
- '| "x(arguments...)", "x.attribute" | '
- 'attribute reference |\n'
+ '|=================================================|=======================================|\n'
+ '| "(expressions...)", "[expressions...]", "{key: | '
+ 'Binding or parenthesized expression, |\n'
+ '| value...}", "{expressions...}" | list '
+ 'display, dictionary display, set |\n'
+ '| | '
+ 'display |\n'
+ '+-------------------------------------------------+---------------------------------------+\n'
+ '| "x[index]", "x[index:index]", | '
+ 'Subscription, slicing, call, |\n'
+ '| "x(arguments...)", "x.attribute" | '
+ 'attribute reference |\n'
'+-------------------------------------------------+---------------------------------------+\n'
- '| "await" "x" | '
- 'Await expression |\n'
+ '| "await" "x" | '
+ 'Await expression |\n'
'+-------------------------------------------------+---------------------------------------+\n'
- '| "**" | '
- 'Exponentiation [5] |\n'
+ '| "**" | '
+ 'Exponentiation [5] |\n'
'+-------------------------------------------------+---------------------------------------+\n'
- '| "+x", "-x", "~x" | '
- 'Positive, negative, bitwise NOT |\n'
+ '| "+x", "-x", "~x" | '
+ 'Positive, negative, bitwise NOT |\n'
'+-------------------------------------------------+---------------------------------------+\n'
- '| "*", "@", "/", "//", "%" | '
- 'Multiplication, matrix |\n'
- '| | '
- 'multiplication, division, floor |\n'
- '| | '
- 'division, remainder [6] |\n'
+ '| "*", "@", "/", "//", "%" | '
+ 'Multiplication, matrix |\n'
+ '| | '
+ 'multiplication, division, floor |\n'
+ '| | '
+ 'division, remainder [6] |\n'
'+-------------------------------------------------+---------------------------------------+\n'
- '| "+", "-" | '
- 'Addition and subtraction |\n'
+ '| "+", "-" | '
+ 'Addition and subtraction |\n'
'+-------------------------------------------------+---------------------------------------+\n'
- '| "<<", ">>" | '
- 'Shifts |\n'
- '+-------------------------------------------------+---------------------------------------+\n'
- '| "&" | '
- 'Bitwise AND |\n'
+ '| "<<", ">>" | '
+ 'Shifts |\n'
'+-------------------------------------------------+---------------------------------------+\n'
+ '| "&" | '
+ 'Bitwise AND |\n'
+ '+-------------------------------------------------+---------------------------------------+\n'
'| "^" | '
'Bitwise XOR |\n'
'+-------------------------------------------------+---------------------------------------+\n'
- '| "|" | '
- 'Bitwise OR |\n'
+ '| "|" | '
+ 'Bitwise OR |\n'
'+-------------------------------------------------+---------------------------------------+\n'
- '| "in", "not in", "is", "is not", "<", "<=", ">", | '
- 'Comparisons, including membership |\n'
- '| ">=", "!=", "==" | '
- 'tests and identity tests |\n'
+ '| "in", "not in", "is", "is not", "<", "<=", ">", | '
+ 'Comparisons, including membership |\n'
+ '| ">=", "!=", "==" | '
+ 'tests and identity tests |\n'
'+-------------------------------------------------+---------------------------------------+\n'
- '| "not" "x" | '
- 'Boolean NOT |\n'
+ '| "not" "x" | '
+ 'Boolean NOT |\n'
'+-------------------------------------------------+---------------------------------------+\n'
- '| "and" | '
- 'Boolean AND |\n'
+ '| "and" | '
+ 'Boolean AND |\n'
'+-------------------------------------------------+---------------------------------------+\n'
- '| "or" | '
- 'Boolean OR |\n'
+ '| "or" | '
+ 'Boolean OR |\n'
'+-------------------------------------------------+---------------------------------------+\n'
- '| "if" – "else" | '
- 'Conditional expression |\n'
+ '| "if" – "else" | '
+ 'Conditional expression |\n'
'+-------------------------------------------------+---------------------------------------+\n'
- '| "lambda" | '
- 'Lambda expression |\n'
+ '| "lambda" | '
+ 'Lambda expression |\n'
'+-------------------------------------------------+---------------------------------------+\n'
- '| ":=" | '
- 'Assignment expression |\n'
+ '| ":=" | '
+ 'Assignment expression |\n'
'+-------------------------------------------------+---------------------------------------+\n'
'\n'
'-[ Footnotes ]-\n'
'\n'
'[1] While "abs(x%y) < abs(y)" is true mathematically, '
- 'for floats it\n'
- ' may not be true numerically due to roundoff. For '
+ 'for floats it\n'
+ ' may not be true numerically due to roundoff. For '
'example, and\n'
' assuming a platform on which a Python float is an '
'IEEE 754 double-\n'
@@ -7442,23 +7442,23 @@ topics = {'assert': 'The "assert" statement\n'
'"unicodedata.normalize()".\n'
'\n'
'[4] Due to automatic garbage-collection, free lists, and '
- 'the dynamic\n'
- ' nature of descriptors, you may notice seemingly '
- 'unusual behaviour\n'
- ' in certain uses of the "is" operator, like those '
- 'involving\n'
- ' comparisons between instance methods, or constants. '
- 'Check their\n'
- ' documentation for more info.\n'
- '\n'
- '[5] The power operator "**" binds less tightly than an '
- 'arithmetic or\n'
- ' bitwise unary operator on its right, that is, '
- '"2**-1" is "0.5".\n'
+ 'the dynamic\n'
+ ' nature of descriptors, you may notice seemingly '
+ 'unusual behaviour\n'
+ ' in certain uses of the "is" operator, like those '
+ 'involving\n'
+ ' comparisons between instance methods, or constants. '
+ 'Check their\n'
+ ' documentation for more info.\n'
'\n'
- '[6] The "%" operator is also used for string formatting; '
- 'the same\n'
- ' precedence applies.\n',
+ '[5] The power operator "**" binds less tightly than an '
+ 'arithmetic or\n'
+ ' bitwise unary operator on its right, that is, '
+ '"2**-1" is "0.5".\n'
+ '\n'
+ '[6] The "%" operator is also used for string formatting; '
+ 'the same\n'
+ ' precedence applies.\n',
'pass': 'The "pass" statement\n'
'********************\n'
'\n'
@@ -7506,10 +7506,10 @@ topics = {'assert': 'The "assert" statement\n'
'"ZeroDivisionError".\n'
'Raising a negative number to a fractional power results in a '
'"complex"\n'
- 'number. (In earlier versions it raised a "ValueError".)\n'
- '\n'
- 'This operation can be customized using the special "__pow__()" '
- 'method.\n',
+ 'number. (In earlier versions it raised a "ValueError".)\n'
+ '\n'
+ 'This operation can be customized using the special "__pow__()" '
+ 'method.\n',
'raise': 'The "raise" statement\n'
'*********************\n'
'\n'
@@ -7546,18 +7546,18 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
'The "from" clause is used for exception chaining: if given, the '
'second\n'
- '*expression* must be another exception class or instance. If the\n'
- 'second expression is an exception instance, it will be attached to '
- 'the\n'
- 'raised exception as the "__cause__" attribute (which is writable). '
- 'If\n'
- 'the expression is an exception class, the class will be '
- 'instantiated\n'
- 'and the resulting exception instance will be attached to the '
- 'raised\n'
- 'exception as the "__cause__" attribute. If the raised exception is '
- 'not\n'
- 'handled, both exceptions will be printed:\n'
+ '*expression* must be another exception class or instance. If the\n'
+ 'second expression is an exception instance, it will be attached to '
+ 'the\n'
+ 'raised exception as the "__cause__" attribute (which is writable). '
+ 'If\n'
+ 'the expression is an exception class, the class will be '
+ 'instantiated\n'
+ 'and the resulting exception instance will be attached to the '
+ 'raised\n'
+ 'exception as the "__cause__" attribute. If the raised exception is '
+ 'not\n'
+ 'handled, both exceptions will be printed:\n'
'\n'
' >>> try:\n'
' ... print(1 / 0)\n'
@@ -7661,62 +7661,62 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
'The following methods can be defined to implement '
'container objects.\n'
- 'Containers usually are *sequences* (such as "lists" or '
- '"tuples") or\n'
- '*mappings* (like "dictionaries"), but can represent other '
- 'containers\n'
- 'as well. The first set of methods is used either to '
- 'emulate a\n'
- 'sequence or to emulate a mapping; the difference is that '
- 'for a\n'
- 'sequence, the allowable keys should be the integers *k* '
- 'for which "0\n'
- '<= k < N" where *N* is the length of the sequence, or '
- '"slice" objects,\n'
- 'which define a range of items. It is also recommended '
- 'that mappings\n'
- 'provide the methods "keys()", "values()", "items()", '
- '"get()",\n'
- '"clear()", "setdefault()", "pop()", "popitem()", "copy()", '
- 'and\n'
- '"update()" behaving similar to those for Python’s '
- 'standard\n'
- '"dictionary" objects. The "collections.abc" module '
- 'provides a\n'
- '"MutableMapping" *abstract base class* to help create '
- 'those methods\n'
- 'from a base set of "__getitem__()", "__setitem__()", '
- '"__delitem__()",\n'
- 'and "keys()". Mutable sequences should provide methods '
- '"append()",\n'
- '"count()", "index()", "extend()", "insert()", "pop()", '
- '"remove()",\n'
- '"reverse()" and "sort()", like Python standard "list" '
+ 'Containers usually are *sequences* (such as "lists" or '
+ '"tuples") or\n'
+ '*mappings* (like "dictionaries"), but can represent other '
+ 'containers\n'
+ 'as well. The first set of methods is used either to '
+ 'emulate a\n'
+ 'sequence or to emulate a mapping; the difference is that '
+ 'for a\n'
+ 'sequence, the allowable keys should be the integers *k* '
+ 'for which "0\n'
+ '<= k < N" where *N* is the length of the sequence, or '
+ '"slice" objects,\n'
+ 'which define a range of items. It is also recommended '
+ 'that mappings\n'
+ 'provide the methods "keys()", "values()", "items()", '
+ '"get()",\n'
+ '"clear()", "setdefault()", "pop()", "popitem()", "copy()", '
+ 'and\n'
+ '"update()" behaving similar to those for Python’s '
+ 'standard\n'
+ '"dictionary" objects. The "collections.abc" module '
+ 'provides a\n'
+ '"MutableMapping" *abstract base class* to help create '
+ 'those methods\n'
+ 'from a base set of "__getitem__()", "__setitem__()", '
+ '"__delitem__()",\n'
+ 'and "keys()". Mutable sequences should provide methods '
+ '"append()",\n'
+ '"count()", "index()", "extend()", "insert()", "pop()", '
+ '"remove()",\n'
+ '"reverse()" and "sort()", like Python standard "list" '
'objects.\n'
- 'Finally, sequence types should implement addition '
+ 'Finally, sequence types should implement addition '
'(meaning\n'
- 'concatenation) and multiplication (meaning repetition) by '
- 'defining the\n'
- 'methods "__add__()", "__radd__()", "__iadd__()", '
- '"__mul__()",\n'
- '"__rmul__()" and "__imul__()" described below; they should '
- 'not define\n'
- 'other numerical operators. It is recommended that both '
- 'mappings and\n'
- 'sequences implement the "__contains__()" method to allow '
- 'efficient use\n'
- 'of the "in" operator; for mappings, "in" should search the '
- 'mapping’s\n'
- 'keys; for sequences, it should search through the values. '
+ 'concatenation) and multiplication (meaning repetition) by '
+ 'defining the\n'
+ 'methods "__add__()", "__radd__()", "__iadd__()", '
+ '"__mul__()",\n'
+ '"__rmul__()" and "__imul__()" described below; they should '
+ 'not define\n'
+ 'other numerical operators. It is recommended that both '
+ 'mappings and\n'
+ 'sequences implement the "__contains__()" method to allow '
+ 'efficient use\n'
+ 'of the "in" operator; for mappings, "in" should search the '
+ 'mapping’s\n'
+ 'keys; for sequences, it should search through the values. '
'It is\n'
- 'further recommended that both mappings and sequences '
- 'implement the\n'
- '"__iter__()" method to allow efficient iteration through '
+ 'further recommended that both mappings and sequences '
+ 'implement the\n'
+ '"__iter__()" method to allow efficient iteration through '
'the\n'
- 'container; for mappings, "__iter__()" should iterate '
- 'through the\n'
- 'object’s keys; for sequences, it should iterate through '
- 'the values.\n'
+ 'container; for mappings, "__iter__()" should iterate '
+ 'through the\n'
+ 'object’s keys; for sequences, it should iterate through '
+ 'the values.\n'
'\n'
'object.__len__(self)\n'
'\n'
@@ -7746,23 +7746,23 @@ topics = {'assert': 'The "assert" statement\n'
' estimated length for the object (which may be greater '
'or less than\n'
' the actual length). The length must be an integer ">=" '
- '0. The\n'
- ' return value may also be "NotImplemented", which is '
- 'treated the\n'
- ' same as if the "__length_hint__" method didn’t exist at '
- 'all. This\n'
+ '0. The\n'
+ ' return value may also be "NotImplemented", which is '
+ 'treated the\n'
+ ' same as if the "__length_hint__" method didn’t exist at '
+ 'all. This\n'
' method is purely an optimization and is never required '
'for\n'
' correctness.\n'
'\n'
' New in version 3.4.\n'
'\n'
- 'Note:\n'
- '\n'
- ' Slicing is done exclusively with the following three '
- 'methods. A\n'
- ' call like\n'
+ 'Note:\n'
'\n'
+ ' Slicing is done exclusively with the following three '
+ 'methods. A\n'
+ ' call like\n'
+ '\n'
' a[1:2] = b\n'
'\n'
' is translated to\n'
@@ -7775,42 +7775,42 @@ topics = {'assert': 'The "assert" statement\n'
'object.__getitem__(self, key)\n'
'\n'
' Called to implement evaluation of "self[key]". For '
- '*sequence*\n'
- ' types, the accepted keys should be integers and slice '
- 'objects.\n'
- ' Note that the special interpretation of negative '
- 'indexes (if the\n'
- ' class wishes to emulate a *sequence* type) is up to '
+ '*sequence*\n'
+ ' types, the accepted keys should be integers and slice '
+ 'objects.\n'
+ ' Note that the special interpretation of negative '
+ 'indexes (if the\n'
+ ' class wishes to emulate a *sequence* type) is up to '
'the\n'
- ' "__getitem__()" method. If *key* is of an inappropriate '
- 'type,\n'
- ' "TypeError" may be raised; if of a value outside the '
- 'set of indexes\n'
- ' for the sequence (after any special interpretation of '
- 'negative\n'
- ' values), "IndexError" should be raised. For *mapping* '
- 'types, if\n'
- ' *key* is missing (not in the container), "KeyError" '
- 'should be\n'
- ' raised.\n'
- '\n'
- ' Note:\n'
- '\n'
- ' "for" loops expect that an "IndexError" will be '
+ ' "__getitem__()" method. If *key* is of an inappropriate '
+ 'type,\n'
+ ' "TypeError" may be raised; if of a value outside the '
+ 'set of indexes\n'
+ ' for the sequence (after any special interpretation of '
+ 'negative\n'
+ ' values), "IndexError" should be raised. For *mapping* '
+ 'types, if\n'
+ ' *key* is missing (not in the container), "KeyError" '
+ 'should be\n'
+ ' raised.\n'
+ '\n'
+ ' Note:\n'
+ '\n'
+ ' "for" loops expect that an "IndexError" will be '
'raised for\n'
' illegal indexes to allow proper detection of the end '
'of the\n'
' sequence.\n'
'\n'
- ' Note:\n'
- '\n'
- ' When subscripting a *class*, the special class '
- 'method\n'
- ' "__class_getitem__()" may be called instead of '
- '"__getitem__()".\n'
- ' See __class_getitem__ versus __getitem__ for more '
- 'details.\n'
- '\n'
+ ' Note:\n'
+ '\n'
+ ' When subscripting a *class*, the special class '
+ 'method\n'
+ ' "__class_getitem__()" may be called instead of '
+ '"__getitem__()".\n'
+ ' See __class_getitem__ versus __getitem__ for more '
+ 'details.\n'
+ '\n'
'object.__setitem__(self, key, value)\n'
'\n'
' Called to implement assignment to "self[key]". Same '
@@ -7883,12 +7883,12 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
'The membership test operators ("in" and "not in") are '
'normally\n'
- 'implemented as an iteration through a container. However, '
+ 'implemented as an iteration through a container. However, '
'container\n'
'objects can supply the following special method with a '
'more efficient\n'
'implementation, which also does not require the object be '
- 'iterable.\n'
+ 'iterable.\n'
'\n'
'object.__contains__(self, item)\n'
'\n'
@@ -7921,10 +7921,10 @@ topics = {'assert': 'The "assert" statement\n'
'the\n'
'second argument.\n'
'\n'
- 'This operation can be customized using the special '
- '"__lshift__()" and\n'
- '"__rshift__()" methods.\n'
- '\n'
+ 'This operation can be customized using the special '
+ '"__lshift__()" and\n'
+ '"__rshift__()" methods.\n'
+ '\n'
'A right shift by *n* bits is defined as floor division by '
'"pow(2,n)".\n'
'A left shift by *n* bits is defined as multiplication with '
@@ -8037,7 +8037,7 @@ topics = {'assert': 'The "assert" statement\n'
'immediate\n'
' subclasses. This method returns a list of all those '
'references\n'
- ' still alive. The list is in definition order. Example:\n'
+ ' still alive. The list is in definition order. Example:\n'
'\n'
' >>> int.__subclasses__()\n'
" [<class 'bool'>]\n"
@@ -8045,26 +8045,26 @@ topics = {'assert': 'The "assert" statement\n'
'-[ Footnotes ]-\n'
'\n'
'[1] Additional information on these special methods may be '
- 'found in\n'
- ' the Python Reference Manual (Basic customization).\n'
+ 'found in\n'
+ ' the Python Reference Manual (Basic customization).\n'
'\n'
'[2] As a consequence, the list "[1, 2]" is considered equal '
- 'to "[1.0,\n'
- ' 2.0]", and similarly for tuples.\n'
+ 'to "[1.0,\n'
+ ' 2.0]", and similarly for tuples.\n'
'\n'
'[3] They must have since the parser can’t tell the type of '
'the\n'
' operands.\n'
'\n'
'[4] Cased characters are those with general category '
- 'property being\n'
- ' one of “Lu” (Letter, uppercase), “Ll” (Letter, '
- 'lowercase), or “Lt”\n'
- ' (Letter, titlecase).\n'
- '\n'
- '[5] To format only a tuple you should therefore provide a '
- 'singleton\n'
- ' tuple whose only element is the tuple to be formatted.\n',
+ 'property being\n'
+ ' one of “Lu” (Letter, uppercase), “Ll” (Letter, '
+ 'lowercase), or “Lt”\n'
+ ' (Letter, titlecase).\n'
+ '\n'
+ '[5] To format only a tuple you should therefore provide a '
+ 'singleton\n'
+ ' tuple whose only element is the tuple to be formatted.\n',
'specialnames': 'Special method names\n'
'********************\n'
'\n'
@@ -8137,15 +8137,15 @@ topics = {'assert': 'The "assert" statement\n'
'returning\n'
' it.\n'
'\n'
- ' If "__new__()" is invoked during object construction and '
- 'it returns\n'
- ' an instance of *cls*, then the new instance’s '
- '"__init__()" method\n'
- ' will be invoked like "__init__(self[, ...])", where '
- '*self* is the\n'
- ' new instance and the remaining arguments are the same as '
- 'were\n'
- ' passed to the object constructor.\n'
+ ' If "__new__()" is invoked during object construction and '
+ 'it returns\n'
+ ' an instance of *cls*, then the new instance’s '
+ '"__init__()" method\n'
+ ' will be invoked like "__init__(self[, ...])", where '
+ '*self* is the\n'
+ ' new instance and the remaining arguments are the same as '
+ 'were\n'
+ ' passed to the object constructor.\n'
'\n'
' If "__new__()" does not return an instance of *cls*, then '
'the new\n'
@@ -8209,9 +8209,9 @@ topics = {'assert': 'The "assert" statement\n'
'for\n'
' objects that still exist when the interpreter exits.\n'
'\n'
- ' Note:\n'
- '\n'
- ' "del x" doesn’t directly call "x.__del__()" — the '
+ ' Note:\n'
+ '\n'
+ ' "del x" doesn’t directly call "x.__del__()" — the '
'former\n'
' decrements the reference count for "x" by one, and the '
'latter is\n'
@@ -8235,16 +8235,16 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
' See also: Documentation for the "gc" module.\n'
'\n'
- ' Warning:\n'
- '\n'
- ' Due to the precarious circumstances under which '
- '"__del__()"\n'
- ' methods are invoked, exceptions that occur during their '
- 'execution\n'
- ' are ignored, and a warning is printed to "sys.stderr" '
- 'instead.\n'
- ' In particular:\n'
+ ' Warning:\n'
'\n'
+ ' Due to the precarious circumstances under which '
+ '"__del__()"\n'
+ ' methods are invoked, exceptions that occur during their '
+ 'execution\n'
+ ' are ignored, and a warning is printed to "sys.stderr" '
+ 'instead.\n'
+ ' In particular:\n'
+ '\n'
' * "__del__()" can be invoked when arbitrary code is '
'being\n'
' executed, including from any arbitrary thread. If '
@@ -8256,20 +8256,20 @@ topics = {'assert': 'The "assert" statement\n'
' that gets interrupted to execute "__del__()".\n'
'\n'
' * "__del__()" can be executed during interpreter '
- 'shutdown. As a\n'
- ' consequence, the global variables it needs to access '
- '(including\n'
- ' other modules) may already have been deleted or set '
- 'to "None".\n'
- ' Python guarantees that globals whose name begins with '
- 'a single\n'
- ' underscore are deleted from their module before other '
- 'globals\n'
- ' are deleted; if no other references to such globals '
- 'exist, this\n'
- ' may help in assuring that imported modules are still '
- 'available\n'
- ' at the time when the "__del__()" method is called.\n'
+ 'shutdown. As a\n'
+ ' consequence, the global variables it needs to access '
+ '(including\n'
+ ' other modules) may already have been deleted or set '
+ 'to "None".\n'
+ ' Python guarantees that globals whose name begins with '
+ 'a single\n'
+ ' underscore are deleted from their module before other '
+ 'globals\n'
+ ' are deleted; if no other references to such globals '
+ 'exist, this\n'
+ ' may help in assuring that imported modules are still '
+ 'available\n'
+ ' at the time when the "__del__()" method is called.\n'
'\n'
'object.__repr__(self)\n'
'\n'
@@ -8330,11 +8330,11 @@ topics = {'assert': 'The "assert" statement\n'
'"str.format()"\n'
' method, to produce a “formatted” string representation of '
'an\n'
- ' object. The *format_spec* argument is a string that '
+ ' object. The *format_spec* argument is a string that '
'contains a\n'
' description of the formatting options desired. The '
'interpretation\n'
- ' of the *format_spec* argument is up to the type '
+ ' of the *format_spec* argument is up to the type '
'implementing\n'
' "__format__()", however most classes will either '
'delegate\n'
@@ -8354,7 +8354,7 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
' Changed in version 3.7: "object.__format__(x, \'\')" is '
'now\n'
- ' equivalent to "str(x)" rather than "format(str(x), '
+ ' equivalent to "str(x)" rather than "format(str(x), '
'\'\')".\n'
'\n'
'object.__lt__(self, other)\n'
@@ -8388,21 +8388,21 @@ topics = {'assert': 'The "assert" statement\n'
' on the value to determine if the result is true or '
'false.\n'
'\n'
- ' By default, "object" implements "__eq__()" by using "is", '
- 'returning\n'
- ' "NotImplemented" in the case of a false comparison: "True '
- 'if x is y\n'
- ' else NotImplemented". For "__ne__()", by default it '
- 'delegates to\n'
- ' "__eq__()" and inverts the result unless it is '
- '"NotImplemented".\n'
- ' There are no other implied relationships among the '
- 'comparison\n'
- ' operators or default implementations; for example, the '
- 'truth of\n'
- ' "(x<y or x==y)" does not imply "x<=y". To automatically '
- 'generate\n'
- ' ordering operations from a single root operation, see\n'
+ ' By default, "object" implements "__eq__()" by using "is", '
+ 'returning\n'
+ ' "NotImplemented" in the case of a false comparison: "True '
+ 'if x is y\n'
+ ' else NotImplemented". For "__ne__()", by default it '
+ 'delegates to\n'
+ ' "__eq__()" and inverts the result unless it is '
+ '"NotImplemented".\n'
+ ' There are no other implied relationships among the '
+ 'comparison\n'
+ ' operators or default implementations; for example, the '
+ 'truth of\n'
+ ' "(x<y or x==y)" does not imply "x<=y". To automatically '
+ 'generate\n'
+ ' ordering operations from a single root operation, see\n'
' "functools.total_ordering()".\n'
'\n'
' See the paragraph on "__hash__()" for some important '
@@ -8450,22 +8450,22 @@ topics = {'assert': 'The "assert" statement\n'
' def __hash__(self):\n'
' return hash((self.name, self.nick, self.color))\n'
'\n'
- ' Note:\n'
- '\n'
- ' "hash()" truncates the value returned from an object’s '
- 'custom\n'
- ' "__hash__()" method to the size of a "Py_ssize_t". '
- 'This is\n'
- ' typically 8 bytes on 64-bit builds and 4 bytes on '
- '32-bit builds.\n'
- ' If an object’s "__hash__()" must interoperate on '
- 'builds of\n'
- ' different bit sizes, be sure to check the width on all '
- 'supported\n'
- ' builds. An easy way to do this is with "python -c '
- '"import sys;\n'
- ' print(sys.hash_info.width)"".\n'
- '\n'
+ ' Note:\n'
+ '\n'
+ ' "hash()" truncates the value returned from an object’s '
+ 'custom\n'
+ ' "__hash__()" method to the size of a "Py_ssize_t". '
+ 'This is\n'
+ ' typically 8 bytes on 64-bit builds and 4 bytes on '
+ '32-bit builds.\n'
+ ' If an object’s "__hash__()" must interoperate on '
+ 'builds of\n'
+ ' different bit sizes, be sure to check the width on all '
+ 'supported\n'
+ ' builds. An easy way to do this is with "python -c '
+ '"import sys;\n'
+ ' print(sys.hash_info.width)"".\n'
+ '\n'
' If a class does not define an "__eq__()" method it should '
'not\n'
' define a "__hash__()" operation either; if it defines '
@@ -8520,22 +8520,22 @@ topics = {'assert': 'The "assert" statement\n'
' hashable by an "isinstance(obj, '
'collections.abc.Hashable)" call.\n'
'\n'
- ' Note:\n'
- '\n'
- ' By default, the "__hash__()" values of str and bytes '
- 'objects are\n'
- ' “salted” with an unpredictable random value. Although '
- 'they\n'
- ' remain constant within an individual Python process, '
- 'they are not\n'
- ' predictable between repeated invocations of Python.This '
- 'is\n'
- ' intended to provide protection against a '
- 'denial-of-service caused\n'
- ' by carefully-chosen inputs that exploit the worst case\n'
- ' performance of a dict insertion, O(n^2) complexity. '
- 'See\n'
- ' http://www.ocert.org/advisories/ocert-2011-003.html '
+ ' Note:\n'
+ '\n'
+ ' By default, the "__hash__()" values of str and bytes '
+ 'objects are\n'
+ ' “salted” with an unpredictable random value. Although '
+ 'they\n'
+ ' remain constant within an individual Python process, '
+ 'they are not\n'
+ ' predictable between repeated invocations of Python.This '
+ 'is\n'
+ ' intended to provide protection against a '
+ 'denial-of-service caused\n'
+ ' by carefully-chosen inputs that exploit the worst case\n'
+ ' performance of a dict insertion, O(n^2) complexity. '
+ 'See\n'
+ ' http://www.ocert.org/advisories/ocert-2011-003.html '
'for\n'
' details.Changing hash values affects the iteration '
'order of sets.\n'
@@ -8624,18 +8624,18 @@ topics = {'assert': 'The "assert" statement\n'
'needs, for\n'
' example, "object.__getattribute__(self, name)".\n'
'\n'
- ' Note:\n'
- '\n'
- ' This method may still be bypassed when looking up '
- 'special methods\n'
- ' as the result of implicit invocation via language '
- 'syntax or\n'
- ' built-in functions. See Special method lookup.\n'
- '\n'
- ' For certain sensitive attribute accesses, raises an '
- 'auditing event\n'
- ' "object.__getattr__" with arguments "obj" and "name".\n'
- '\n'
+ ' Note:\n'
+ '\n'
+ ' This method may still be bypassed when looking up '
+ 'special methods\n'
+ ' as the result of implicit invocation via language '
+ 'syntax or\n'
+ ' built-in functions. See Special method lookup.\n'
+ '\n'
+ ' For certain sensitive attribute accesses, raises an '
+ 'auditing event\n'
+ ' "object.__getattr__" with arguments "obj" and "name".\n'
+ '\n'
'object.__setattr__(self, name, value)\n'
'\n'
' Called when an attribute assignment is attempted. This '
@@ -8652,11 +8652,11 @@ topics = {'assert': 'The "assert" statement\n'
'example,\n'
' "object.__setattr__(self, name, value)".\n'
'\n'
- ' For certain sensitive attribute assignments, raises an '
- 'auditing\n'
- ' event "object.__setattr__" with arguments "obj", "name", '
- '"value".\n'
- '\n'
+ ' For certain sensitive attribute assignments, raises an '
+ 'auditing\n'
+ ' event "object.__setattr__" with arguments "obj", "name", '
+ '"value".\n'
+ '\n'
'object.__delattr__(self, name)\n'
'\n'
' Like "__setattr__()" but for attribute deletion instead '
@@ -8665,10 +8665,10 @@ topics = {'assert': 'The "assert" statement\n'
'obj.name" is\n'
' meaningful for the object.\n'
'\n'
- ' For certain sensitive attribute deletions, raises an '
- 'auditing event\n'
- ' "object.__delattr__" with arguments "obj" and "name".\n'
- '\n'
+ ' For certain sensitive attribute deletions, raises an '
+ 'auditing event\n'
+ ' "object.__delattr__" with arguments "obj" and "name".\n'
+ '\n'
'object.__dir__(self)\n'
'\n'
' Called when "dir()" is called on the object. A sequence '
@@ -8700,11 +8700,11 @@ topics = {'assert': 'The "assert" statement\n'
'returned.\n'
'\n'
'The "__dir__" function should accept no arguments, and '
- 'return a\n'
- 'sequence of strings that represents the names accessible on '
- 'module. If\n'
- 'present, this function overrides the standard "dir()" search '
- 'on a\n'
+ 'return a\n'
+ 'sequence of strings that represents the names accessible on '
+ 'module. If\n'
+ 'present, this function overrides the standard "dir()" search '
+ 'on a\n'
'module.\n'
'\n'
'For a more fine grained customization of the module behavior '
@@ -8727,17 +8727,17 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
' sys.modules[__name__].__class__ = VerboseModule\n'
'\n'
- 'Note:\n'
- '\n'
- ' Defining module "__getattr__" and setting module '
- '"__class__" only\n'
- ' affect lookups made using the attribute access syntax – '
- 'directly\n'
- ' accessing the module globals (whether by code within the '
- 'module, or\n'
- ' via a reference to the module’s globals dictionary) is '
- 'unaffected.\n'
- '\n'
+ 'Note:\n'
+ '\n'
+ ' Defining module "__getattr__" and setting module '
+ '"__class__" only\n'
+ ' affect lookups made using the attribute access syntax – '
+ 'directly\n'
+ ' accessing the module globals (whether by code within the '
+ 'module, or\n'
+ ' via a reference to the module’s globals dictionary) is '
+ 'unaffected.\n'
+ '\n'
'Changed in version 3.5: "__class__" module attribute is now '
'writable.\n'
'\n'
@@ -8766,47 +8766,47 @@ topics = {'assert': 'The "assert" statement\n'
'whose name is\n'
'the key of the property in the owner class’ "__dict__".\n'
'\n'
- 'object.__get__(self, instance, owner=None)\n'
+ 'object.__get__(self, instance, owner=None)\n'
'\n'
' Called to get the attribute of the owner class (class '
'attribute\n'
' access) or of an instance of that class (instance '
'attribute\n'
- ' access). The optional *owner* argument is the owner '
- 'class, while\n'
- ' *instance* is the instance that the attribute was '
- 'accessed through,\n'
- ' or "None" when the attribute is accessed through the '
- '*owner*.\n'
- '\n'
- ' This method should return the computed attribute value or '
- 'raise an\n'
- ' "AttributeError" exception.\n'
- '\n'
- ' **PEP 252** specifies that "__get__()" is callable with '
- 'one or two\n'
- ' arguments. Python’s own built-in descriptors support '
- 'this\n'
- ' specification; however, it is likely that some '
- 'third-party tools\n'
- ' have descriptors that require both arguments. Python’s '
- 'own\n'
- ' "__getattribute__()" implementation always passes in both '
- 'arguments\n'
- ' whether they are required or not.\n'
- '\n'
+ ' access). The optional *owner* argument is the owner '
+ 'class, while\n'
+ ' *instance* is the instance that the attribute was '
+ 'accessed through,\n'
+ ' or "None" when the attribute is accessed through the '
+ '*owner*.\n'
+ '\n'
+ ' This method should return the computed attribute value or '
+ 'raise an\n'
+ ' "AttributeError" exception.\n'
+ '\n'
+ ' **PEP 252** specifies that "__get__()" is callable with '
+ 'one or two\n'
+ ' arguments. Python’s own built-in descriptors support '
+ 'this\n'
+ ' specification; however, it is likely that some '
+ 'third-party tools\n'
+ ' have descriptors that require both arguments. Python’s '
+ 'own\n'
+ ' "__getattribute__()" implementation always passes in both '
+ 'arguments\n'
+ ' whether they are required or not.\n'
+ '\n'
'object.__set__(self, instance, value)\n'
'\n'
' Called to set the attribute on an instance *instance* of '
'the owner\n'
' class to a new value, *value*.\n'
'\n'
- ' Note, adding "__set__()" or "__delete__()" changes the '
- 'kind of\n'
- ' descriptor to a “data descriptor”. See Invoking '
- 'Descriptors for\n'
- ' more details.\n'
- '\n'
+ ' Note, adding "__set__()" or "__delete__()" changes the '
+ 'kind of\n'
+ ' descriptor to a “data descriptor”. See Invoking '
+ 'Descriptors for\n'
+ ' more details.\n'
+ '\n'
'object.__delete__(self, instance)\n'
'\n'
' Called to delete the attribute on an instance *instance* '
@@ -8819,24 +8819,24 @@ topics = {'assert': 'The "assert" statement\n'
'The\n'
' descriptor has been assigned to *name*.\n'
'\n'
- ' Note:\n'
- '\n'
- ' "__set_name__()" is only called implicitly as part of '
- 'the "type"\n'
- ' constructor, so it will need to be called explicitly '
- 'with the\n'
- ' appropriate parameters when a descriptor is added to a '
- 'class\n'
- ' after initial creation:\n'
- '\n'
- ' class A:\n'
- ' pass\n'
- ' descr = custom_descriptor()\n'
- ' A.attr = descr\n'
- " descr.__set_name__(A, 'attr')\n"
- '\n'
- ' See Creating the class object for more details.\n'
- '\n'
+ ' Note:\n'
+ '\n'
+ ' "__set_name__()" is only called implicitly as part of '
+ 'the "type"\n'
+ ' constructor, so it will need to be called explicitly '
+ 'with the\n'
+ ' appropriate parameters when a descriptor is added to a '
+ 'class\n'
+ ' after initial creation:\n'
+ '\n'
+ ' class A:\n'
+ ' pass\n'
+ ' descr = custom_descriptor()\n'
+ ' A.attr = descr\n'
+ " descr.__set_name__(A, 'attr')\n"
+ '\n'
+ ' See Creating the class object for more details.\n'
+ '\n'
' New in version 3.6.\n'
'\n'
'The attribute "__objclass__" is interpreted by the "inspect" '
@@ -8909,16 +8909,16 @@ topics = {'assert': 'The "assert" statement\n'
'"super(B,\n'
' obj).m()" searches "obj.__class__.__mro__" for the base '
'class "A"\n'
- ' immediately following "B" and then invokes the descriptor '
+ ' immediately following "B" and then invokes the descriptor '
'with the\n'
' call: "A.__dict__[\'m\'].__get__(obj, obj.__class__)".\n'
'\n'
'For instance bindings, the precedence of descriptor '
'invocation depends\n'
- 'on which descriptor methods are defined. A descriptor can '
- 'define any\n'
- 'combination of "__get__()", "__set__()" and "__delete__()". '
- 'If it\n'
+ 'on which descriptor methods are defined. A descriptor can '
+ 'define any\n'
+ 'combination of "__get__()", "__set__()" and "__delete__()". '
+ 'If it\n'
'does not define "__get__()", then accessing the attribute '
'will return\n'
'the descriptor object itself unless there is a value in the '
@@ -8931,22 +8931,22 @@ topics = {'assert': 'The "assert" statement\n'
'both\n'
'"__get__()" and "__set__()", while non-data descriptors have '
'just the\n'
- '"__get__()" method. Data descriptors with "__get__()" and '
- '"__set__()"\n'
- '(and/or "__delete__()") defined always override a '
- 'redefinition in an\n'
- 'instance dictionary. In contrast, non-data descriptors can '
- 'be\n'
- 'overridden by instances.\n'
- '\n'
- 'Python methods (including those decorated with '
- '"@staticmethod" and\n'
- '"@classmethod") are implemented as non-data descriptors. '
- 'Accordingly,\n'
- 'instances can redefine and override methods. This allows '
- 'individual\n'
- 'instances to acquire behaviors that differ from other '
- 'instances of the\n'
+ '"__get__()" method. Data descriptors with "__get__()" and '
+ '"__set__()"\n'
+ '(and/or "__delete__()") defined always override a '
+ 'redefinition in an\n'
+ 'instance dictionary. In contrast, non-data descriptors can '
+ 'be\n'
+ 'overridden by instances.\n'
+ '\n'
+ 'Python methods (including those decorated with '
+ '"@staticmethod" and\n'
+ '"@classmethod") are implemented as non-data descriptors. '
+ 'Accordingly,\n'
+ 'instances can redefine and override methods. This allows '
+ 'individual\n'
+ 'instances to acquire behaviors that differ from other '
+ 'instances of the\n'
'same class.\n'
'\n'
'The "property()" function is implemented as a data '
@@ -8960,12 +8960,12 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
'*__slots__* allow us to explicitly declare data members '
'(like\n'
- 'properties) and deny the creation of "__dict__" and '
+ 'properties) and deny the creation of "__dict__" and '
'*__weakref__*\n'
'(unless explicitly declared in *__slots__* or available in a '
'parent.)\n'
'\n'
- 'The space saved over using "__dict__" can be significant. '
+ 'The space saved over using "__dict__" can be significant. '
'Attribute\n'
'lookup speed can be significantly improved as well.\n'
'\n'
@@ -8977,7 +8977,7 @@ topics = {'assert': 'The "assert" statement\n'
'*__slots__*\n'
' reserves space for the declared variables and prevents '
'the\n'
- ' automatic creation of "__dict__" and *__weakref__* for '
+ ' automatic creation of "__dict__" and *__weakref__* for '
'each\n'
' instance.\n'
'\n'
@@ -8986,11 +8986,11 @@ topics = {'assert': 'The "assert" statement\n'
'~~~~~~~~~~~~~~~~~~~~~~~~~~\n'
'\n'
'* When inheriting from a class without *__slots__*, the '
- '"__dict__" and\n'
- ' *__weakref__* attribute of the instances will always be '
- 'accessible.\n'
+ '"__dict__" and\n'
+ ' *__weakref__* attribute of the instances will always be '
+ 'accessible.\n'
'\n'
- '* Without a "__dict__" variable, instances cannot be '
+ '* Without a "__dict__" variable, instances cannot be '
'assigned new\n'
' variables not listed in the *__slots__* definition. '
'Attempts to\n'
@@ -9002,40 +9002,40 @@ topics = {'assert': 'The "assert" statement\n'
' declaration.\n'
'\n'
'* Without a *__weakref__* variable for each instance, '
- 'classes defining\n'
- ' *__slots__* do not support "weak references" to its '
- 'instances. If\n'
- ' weak reference support is needed, then add '
- '"\'__weakref__\'" to the\n'
- ' sequence of strings in the *__slots__* declaration.\n'
+ 'classes defining\n'
+ ' *__slots__* do not support "weak references" to its '
+ 'instances. If\n'
+ ' weak reference support is needed, then add '
+ '"\'__weakref__\'" to the\n'
+ ' sequence of strings in the *__slots__* declaration.\n'
'\n'
'* *__slots__* are implemented at the class level by '
'creating\n'
- ' descriptors for each variable name. As a result, class '
- 'attributes\n'
- ' cannot be used to set default values for instance '
- 'variables defined\n'
- ' by *__slots__*; otherwise, the class attribute would '
- 'overwrite the\n'
- ' descriptor assignment.\n'
+ ' descriptors for each variable name. As a result, class '
+ 'attributes\n'
+ ' cannot be used to set default values for instance '
+ 'variables defined\n'
+ ' by *__slots__*; otherwise, the class attribute would '
+ 'overwrite the\n'
+ ' descriptor assignment.\n'
'\n'
'* The action of a *__slots__* declaration is not limited to '
- 'the class\n'
- ' where it is defined. *__slots__* declared in parents are '
- 'available\n'
- ' in child classes. However, child subclasses will get a '
- '"__dict__"\n'
- ' and *__weakref__* unless they also define *__slots__* '
- '(which should\n'
- ' only contain names of any *additional* slots).\n'
+ 'the class\n'
+ ' where it is defined. *__slots__* declared in parents are '
+ 'available\n'
+ ' in child classes. However, child subclasses will get a '
+ '"__dict__"\n'
+ ' and *__weakref__* unless they also define *__slots__* '
+ '(which should\n'
+ ' only contain names of any *additional* slots).\n'
'\n'
'* If a class defines a slot also defined in a base class, '
- 'the instance\n'
- ' variable defined by the base class slot is inaccessible '
- '(except by\n'
- ' retrieving its descriptor directly from the base class). '
- 'This\n'
- ' renders the meaning of the program undefined. In the '
+ 'the instance\n'
+ ' variable defined by the base class slot is inaccessible '
+ '(except by\n'
+ ' retrieving its descriptor directly from the base class). '
+ 'This\n'
+ ' renders the meaning of the program undefined. In the '
'future, a\n'
' check may be added to prevent this.\n'
'\n'
@@ -9044,18 +9044,18 @@ topics = {'assert': 'The "assert" statement\n'
' “variable-length” built-in types such as "int", "bytes" '
'and "tuple".\n'
'\n'
- '* Any non-string *iterable* may be assigned to *__slots__*.\n'
- '\n'
- '* If a "dictionary" is used to assign *__slots__*, the '
- 'dictionary keys\n'
- ' will be used as the slot names. The values of the '
- 'dictionary can be\n'
- ' used to provide per-attribute docstrings that will be '
- 'recognised by\n'
- ' "inspect.getdoc()" and displayed in the output of '
- '"help()".\n'
- '\n'
- '* "__class__" assignment works only if both classes have the '
+ '* Any non-string *iterable* may be assigned to *__slots__*.\n'
+ '\n'
+ '* If a "dictionary" is used to assign *__slots__*, the '
+ 'dictionary keys\n'
+ ' will be used as the slot names. The values of the '
+ 'dictionary can be\n'
+ ' used to provide per-attribute docstrings that will be '
+ 'recognised by\n'
+ ' "inspect.getdoc()" and displayed in the output of '
+ '"help()".\n'
+ '\n'
+ '* "__class__" assignment works only if both classes have the '
'same\n'
' *__slots__*.\n'
'\n'
@@ -9067,18 +9067,18 @@ topics = {'assert': 'The "assert" statement\n'
'violations\n'
' raise "TypeError".\n'
'\n'
- '* If an *iterator* is used for *__slots__* then a '
- '*descriptor* is\n'
- ' created for each of the iterator’s values. However, the '
- '*__slots__*\n'
- ' attribute will be an empty iterator.\n'
- '\n'
+ '* If an *iterator* is used for *__slots__* then a '
+ '*descriptor* is\n'
+ ' created for each of the iterator’s values. However, the '
+ '*__slots__*\n'
+ ' attribute will be an empty iterator.\n'
'\n'
+ '\n'
'Customizing class creation\n'
'==========================\n'
'\n'
'Whenever a class inherits from another class, '
- '"__init_subclass__()" is\n'
+ '"__init_subclass__()" is\n'
'called on that class. This way, it is possible to write '
'classes which\n'
'change the behavior of subclasses. This is closely related '
@@ -9109,7 +9109,7 @@ topics = {'assert': 'The "assert" statement\n'
' in:\n'
'\n'
' class Philosopher:\n'
- ' def __init_subclass__(cls, /, default_name, '
+ ' def __init_subclass__(cls, /, default_name, '
'**kwargs):\n'
' super().__init_subclass__(**kwargs)\n'
' cls.default_name = default_name\n'
@@ -9122,11 +9122,11 @@ topics = {'assert': 'The "assert" statement\n'
'does nothing,\n'
' but raises an error if it is called with any arguments.\n'
'\n'
- ' Note:\n'
- '\n'
- ' The metaclass hint "metaclass" is consumed by the rest '
- 'of the\n'
- ' type machinery, and is never passed to '
+ ' Note:\n'
+ '\n'
+ ' The metaclass hint "metaclass" is consumed by the rest '
+ 'of the\n'
+ ' type machinery, and is never passed to '
'"__init_subclass__"\n'
' implementations. The actual metaclass (rather than the '
'explicit\n'
@@ -9170,15 +9170,15 @@ topics = {'assert': 'The "assert" statement\n'
'When a class definition is executed, the following steps '
'occur:\n'
'\n'
- '* MRO entries are resolved;\n'
+ '* MRO entries are resolved;\n'
'\n'
- '* the appropriate metaclass is determined;\n'
+ '* the appropriate metaclass is determined;\n'
'\n'
- '* the class namespace is prepared;\n'
+ '* the class namespace is prepared;\n'
'\n'
- '* the class body is executed;\n'
+ '* the class body is executed;\n'
'\n'
- '* the class object is created.\n'
+ '* the class object is created.\n'
'\n'
'\n'
'Resolving MRO entries\n'
@@ -9194,12 +9194,12 @@ topics = {'assert': 'The "assert" statement\n'
'tuple may\n'
'be empty, in such case the original base is ignored.\n'
'\n'
- 'See also:\n'
- '\n'
- ' **PEP 560** - Core support for typing module and generic '
- 'types\n'
+ 'See also:\n'
'\n'
+ ' **PEP 560** - Core support for typing module and generic '
+ 'types\n'
'\n'
+ '\n'
'Determining the appropriate metaclass\n'
'-------------------------------------\n'
'\n'
@@ -9209,16 +9209,16 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
'* if no bases and no explicit metaclass are given, then '
'"type()" is\n'
- ' used;\n'
+ ' used;\n'
'\n'
'* if an explicit metaclass is given and it is *not* an '
'instance of\n'
- ' "type()", then it is used directly as the metaclass;\n'
+ ' "type()", then it is used directly as the metaclass;\n'
'\n'
'* if an instance of "type()" is given as the explicit '
'metaclass, or\n'
' bases are defined, then the most derived metaclass is '
- 'used.\n'
+ 'used.\n'
'\n'
'The most derived metaclass is selected from the explicitly '
'specified\n'
@@ -9244,13 +9244,13 @@ topics = {'assert': 'The "assert" statement\n'
'bases,\n'
'**kwds)" (where the additional keyword arguments, if any, '
'come from\n'
- 'the class definition). The "__prepare__" method should be '
- 'implemented\n'
- 'as a "classmethod". The namespace returned by "__prepare__" '
- 'is passed\n'
- 'in to "__new__", but when the final class object is created '
- 'the\n'
- 'namespace is copied into a new "dict".\n'
+ 'the class definition). The "__prepare__" method should be '
+ 'implemented\n'
+ 'as a "classmethod". The namespace returned by "__prepare__" '
+ 'is passed\n'
+ 'in to "__new__", but when the final class object is created '
+ 'the\n'
+ 'namespace is copied into a new "dict".\n'
'\n'
'If the metaclass has no "__prepare__" attribute, then the '
'class\n'
@@ -9322,9 +9322,9 @@ topics = {'assert': 'The "assert" statement\n'
'up to the\n'
'"type.__new__" call in order for the class to be '
'initialised\n'
- 'correctly. Failing to do so will result in a "RuntimeError" '
- 'in Python\n'
- '3.8.\n'
+ 'correctly. Failing to do so will result in a "RuntimeError" '
+ 'in Python\n'
+ '3.8.\n'
'\n'
'When using the default metaclass "type", or any metaclass '
'that\n'
@@ -9340,7 +9340,7 @@ topics = {'assert': 'The "assert" statement\n'
'with the\n'
' class being defined and the assigned name of that '
'particular\n'
- ' descriptor;\n'
+ ' descriptor;\n'
'\n'
'* finally, the "__init_subclass__()" hook is called on the '
'immediate\n'
@@ -9437,180 +9437,180 @@ topics = {'assert': 'The "assert" statement\n'
'Emulating generic types\n'
'=======================\n'
'\n'
- 'When using *type annotations*, it is often useful to '
- '*parameterize* a\n'
- '*generic type* using Python’s square-brackets notation. For '
- 'example,\n'
- 'the annotation "list[int]" might be used to signify a "list" '
- 'in which\n'
- 'all the elements are of type "int".\n'
- '\n'
- 'See also:\n'
- '\n'
- ' **PEP 484** - Type Hints\n'
- ' Introducing Python’s framework for type annotations\n'
- '\n'
- ' Generic Alias Types\n'
- ' Documentation for objects representing parameterized '
- 'generic\n'
- ' classes\n'
- '\n'
- ' Generics, user-defined generics and "typing.Generic"\n'
- ' Documentation on how to implement generic classes that '
- 'can be\n'
- ' parameterized at runtime and understood by static '
- 'type-checkers.\n'
- '\n'
- 'A class can *generally* only be parameterized if it defines '
- 'the\n'
- 'special class method "__class_getitem__()".\n'
- '\n'
+ 'When using *type annotations*, it is often useful to '
+ '*parameterize* a\n'
+ '*generic type* using Python’s square-brackets notation. For '
+ 'example,\n'
+ 'the annotation "list[int]" might be used to signify a "list" '
+ 'in which\n'
+ 'all the elements are of type "int".\n'
+ '\n'
+ 'See also:\n'
+ '\n'
+ ' **PEP 484** - Type Hints\n'
+ ' Introducing Python’s framework for type annotations\n'
+ '\n'
+ ' Generic Alias Types\n'
+ ' Documentation for objects representing parameterized '
+ 'generic\n'
+ ' classes\n'
+ '\n'
+ ' Generics, user-defined generics and "typing.Generic"\n'
+ ' Documentation on how to implement generic classes that '
+ 'can be\n'
+ ' parameterized at runtime and understood by static '
+ 'type-checkers.\n'
+ '\n'
+ 'A class can *generally* only be parameterized if it defines '
+ 'the\n'
+ 'special class method "__class_getitem__()".\n'
+ '\n'
'classmethod object.__class_getitem__(cls, key)\n'
'\n'
' Return an object representing the specialization of a '
'generic class\n'
' by type arguments found in *key*.\n'
'\n'
- ' When defined on a class, "__class_getitem__()" is '
- 'automatically a\n'
- ' class method. As such, there is no need for it to be '
- 'decorated with\n'
- ' "@classmethod" when it is defined.\n'
- '\n'
- '\n'
- 'The purpose of *__class_getitem__*\n'
- '----------------------------------\n'
- '\n'
- 'The purpose of "__class_getitem__()" is to allow runtime\n'
- 'parameterization of standard-library generic classes in '
- 'order to more\n'
- 'easily apply *type hints* to these classes.\n'
- '\n'
- 'To implement custom generic classes that can be '
- 'parameterized at\n'
- 'runtime and understood by static type-checkers, users should '
- 'either\n'
- 'inherit from a standard library class that already '
- 'implements\n'
- '"__class_getitem__()", or inherit from "typing.Generic", '
- 'which has its\n'
- 'own implementation of "__class_getitem__()".\n'
- '\n'
- 'Custom implementations of "__class_getitem__()" on classes '
- 'defined\n'
- 'outside of the standard library may not be understood by '
- 'third-party\n'
- 'type-checkers such as mypy. Using "__class_getitem__()" on '
- 'any class\n'
- 'for purposes other than type hinting is discouraged.\n'
- '\n'
- '\n'
- '*__class_getitem__* versus *__getitem__*\n'
- '----------------------------------------\n'
- '\n'
- 'Usually, the subscription of an object using square brackets '
- 'will call\n'
- 'the "__getitem__()" instance method defined on the object’s '
- 'class.\n'
- 'However, if the object being subscribed is itself a class, '
- 'the class\n'
- 'method "__class_getitem__()" may be called instead.\n'
- '"__class_getitem__()" should return a GenericAlias object if '
- 'it is\n'
- 'properly defined.\n'
- '\n'
- 'Presented with the *expression* "obj[x]", the Python '
- 'interpreter\n'
- 'follows something like the following process to decide '
- 'whether\n'
- '"__getitem__()" or "__class_getitem__()" should be called:\n'
- '\n'
- ' from inspect import isclass\n'
- '\n'
- ' def subscribe(obj, x):\n'
- ' """Return the result of the expression `obj[x]`"""\n'
- '\n'
- ' class_of_obj = type(obj)\n'
- '\n'
- ' # If the class of obj defines __getitem__,\n'
- ' # call class_of_obj.__getitem__(obj, x)\n'
- " if hasattr(class_of_obj, '__getitem__'):\n"
- ' return class_of_obj.__getitem__(obj, x)\n'
- '\n'
- ' # Else, if obj is a class and defines '
- '__class_getitem__,\n'
- ' # call obj.__class_getitem__(x)\n'
- ' elif isclass(obj) and hasattr(obj, '
- "'__class_getitem__'):\n"
- ' return obj.__class_getitem__(x)\n'
- '\n'
- ' # Else, raise an exception\n'
- ' else:\n'
- ' raise TypeError(\n'
- ' f"\'{class_of_obj.__name__}\' object is not '
- 'subscriptable"\n'
- ' )\n'
- '\n'
- 'In Python, all classes are themselves instances of other '
- 'classes. The\n'
- 'class of a class is known as that class’s *metaclass*, and '
- 'most\n'
- 'classes have the "type" class as their metaclass. "type" '
- 'does not\n'
- 'define "__getitem__()", meaning that expressions such as '
- '"list[int]",\n'
- '"dict[str, float]" and "tuple[str, bytes]" all result in\n'
- '"__class_getitem__()" being called:\n'
- '\n'
- ' >>> # list has class "type" as its metaclass, like most '
- 'classes:\n'
- ' >>> type(list)\n'
- " <class 'type'>\n"
- ' >>> type(dict) == type(list) == type(tuple) == type(str) '
- '== type(bytes)\n'
- ' True\n'
- ' >>> # "list[int]" calls "list.__class_getitem__(int)"\n'
- ' >>> list[int]\n'
- ' list[int]\n'
- ' >>> # list.__class_getitem__ returns a GenericAlias '
- 'object:\n'
- ' >>> type(list[int])\n'
- " <class 'types.GenericAlias'>\n"
- '\n'
- 'However, if a class has a custom metaclass that defines\n'
- '"__getitem__()", subscribing the class may result in '
- 'different\n'
- 'behaviour. An example of this can be found in the "enum" '
- 'module:\n'
- '\n'
- ' >>> from enum import Enum\n'
- ' >>> class Menu(Enum):\n'
- ' ... """A breakfast menu"""\n'
- " ... SPAM = 'spam'\n"
- " ... BACON = 'bacon'\n"
- ' ...\n'
- ' >>> # Enum classes have a custom metaclass:\n'
- ' >>> type(Menu)\n'
- " <class 'enum.EnumMeta'>\n"
- ' >>> # EnumMeta defines __getitem__,\n'
- ' >>> # so __class_getitem__ is not called,\n'
- ' >>> # and the result is not a GenericAlias object:\n'
- " >>> Menu['SPAM']\n"
- " <Menu.SPAM: 'spam'>\n"
- " >>> type(Menu['SPAM'])\n"
- " <enum 'Menu'>\n"
- '\n'
- 'See also:\n'
- '\n'
- ' **PEP 560** - Core Support for typing module and generic '
- 'types\n'
- ' Introducing "__class_getitem__()", and outlining when '
- 'a\n'
- ' subscription results in "__class_getitem__()" being '
- 'called\n'
- ' instead of "__getitem__()"\n'
- '\n'
- '\n'
+ ' When defined on a class, "__class_getitem__()" is '
+ 'automatically a\n'
+ ' class method. As such, there is no need for it to be '
+ 'decorated with\n'
+ ' "@classmethod" when it is defined.\n'
+ '\n'
+ '\n'
+ 'The purpose of *__class_getitem__*\n'
+ '----------------------------------\n'
+ '\n'
+ 'The purpose of "__class_getitem__()" is to allow runtime\n'
+ 'parameterization of standard-library generic classes in '
+ 'order to more\n'
+ 'easily apply *type hints* to these classes.\n'
+ '\n'
+ 'To implement custom generic classes that can be '
+ 'parameterized at\n'
+ 'runtime and understood by static type-checkers, users should '
+ 'either\n'
+ 'inherit from a standard library class that already '
+ 'implements\n'
+ '"__class_getitem__()", or inherit from "typing.Generic", '
+ 'which has its\n'
+ 'own implementation of "__class_getitem__()".\n'
+ '\n'
+ 'Custom implementations of "__class_getitem__()" on classes '
+ 'defined\n'
+ 'outside of the standard library may not be understood by '
+ 'third-party\n'
+ 'type-checkers such as mypy. Using "__class_getitem__()" on '
+ 'any class\n'
+ 'for purposes other than type hinting is discouraged.\n'
+ '\n'
+ '\n'
+ '*__class_getitem__* versus *__getitem__*\n'
+ '----------------------------------------\n'
+ '\n'
+ 'Usually, the subscription of an object using square brackets '
+ 'will call\n'
+ 'the "__getitem__()" instance method defined on the object’s '
+ 'class.\n'
+ 'However, if the object being subscribed is itself a class, '
+ 'the class\n'
+ 'method "__class_getitem__()" may be called instead.\n'
+ '"__class_getitem__()" should return a GenericAlias object if '
+ 'it is\n'
+ 'properly defined.\n'
+ '\n'
+ 'Presented with the *expression* "obj[x]", the Python '
+ 'interpreter\n'
+ 'follows something like the following process to decide '
+ 'whether\n'
+ '"__getitem__()" or "__class_getitem__()" should be called:\n'
+ '\n'
+ ' from inspect import isclass\n'
+ '\n'
+ ' def subscribe(obj, x):\n'
+ ' """Return the result of the expression `obj[x]`"""\n'
+ '\n'
+ ' class_of_obj = type(obj)\n'
+ '\n'
+ ' # If the class of obj defines __getitem__,\n'
+ ' # call class_of_obj.__getitem__(obj, x)\n'
+ " if hasattr(class_of_obj, '__getitem__'):\n"
+ ' return class_of_obj.__getitem__(obj, x)\n'
+ '\n'
+ ' # Else, if obj is a class and defines '
+ '__class_getitem__,\n'
+ ' # call obj.__class_getitem__(x)\n'
+ ' elif isclass(obj) and hasattr(obj, '
+ "'__class_getitem__'):\n"
+ ' return obj.__class_getitem__(x)\n'
+ '\n'
+ ' # Else, raise an exception\n'
+ ' else:\n'
+ ' raise TypeError(\n'
+ ' f"\'{class_of_obj.__name__}\' object is not '
+ 'subscriptable"\n'
+ ' )\n'
+ '\n'
+ 'In Python, all classes are themselves instances of other '
+ 'classes. The\n'
+ 'class of a class is known as that class’s *metaclass*, and '
+ 'most\n'
+ 'classes have the "type" class as their metaclass. "type" '
+ 'does not\n'
+ 'define "__getitem__()", meaning that expressions such as '
+ '"list[int]",\n'
+ '"dict[str, float]" and "tuple[str, bytes]" all result in\n'
+ '"__class_getitem__()" being called:\n'
+ '\n'
+ ' >>> # list has class "type" as its metaclass, like most '
+ 'classes:\n'
+ ' >>> type(list)\n'
+ " <class 'type'>\n"
+ ' >>> type(dict) == type(list) == type(tuple) == type(str) '
+ '== type(bytes)\n'
+ ' True\n'
+ ' >>> # "list[int]" calls "list.__class_getitem__(int)"\n'
+ ' >>> list[int]\n'
+ ' list[int]\n'
+ ' >>> # list.__class_getitem__ returns a GenericAlias '
+ 'object:\n'
+ ' >>> type(list[int])\n'
+ " <class 'types.GenericAlias'>\n"
+ '\n'
+ 'However, if a class has a custom metaclass that defines\n'
+ '"__getitem__()", subscribing the class may result in '
+ 'different\n'
+ 'behaviour. An example of this can be found in the "enum" '
+ 'module:\n'
+ '\n'
+ ' >>> from enum import Enum\n'
+ ' >>> class Menu(Enum):\n'
+ ' ... """A breakfast menu"""\n'
+ " ... SPAM = 'spam'\n"
+ " ... BACON = 'bacon'\n"
+ ' ...\n'
+ ' >>> # Enum classes have a custom metaclass:\n'
+ ' >>> type(Menu)\n'
+ " <class 'enum.EnumMeta'>\n"
+ ' >>> # EnumMeta defines __getitem__,\n'
+ ' >>> # so __class_getitem__ is not called,\n'
+ ' >>> # and the result is not a GenericAlias object:\n'
+ " >>> Menu['SPAM']\n"
+ " <Menu.SPAM: 'spam'>\n"
+ " >>> type(Menu['SPAM'])\n"
+ " <enum 'Menu'>\n"
+ '\n'
+ 'See also:\n'
+ '\n'
+ ' **PEP 560** - Core Support for typing module and generic '
+ 'types\n'
+ ' Introducing "__class_getitem__()", and outlining when '
+ 'a\n'
+ ' subscription results in "__class_getitem__()" being '
+ 'called\n'
+ ' instead of "__getitem__()"\n'
+ '\n'
+ '\n'
'Emulating callable objects\n'
'==========================\n'
'\n'
@@ -9618,8 +9618,8 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
' Called when the instance is “called” as a function; if '
'this method\n'
- ' is defined, "x(arg1, arg2, ...)" roughly translates to\n'
- ' "type(x).__call__(x, arg1, ...)".\n'
+ ' is defined, "x(arg1, arg2, ...)" roughly translates to\n'
+ ' "type(x).__call__(x, arg1, ...)".\n'
'\n'
'\n'
'Emulating container types\n'
@@ -9627,60 +9627,60 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
'The following methods can be defined to implement container '
'objects.\n'
- 'Containers usually are *sequences* (such as "lists" or '
- '"tuples") or\n'
- '*mappings* (like "dictionaries"), but can represent other '
- 'containers\n'
- 'as well. The first set of methods is used either to emulate '
- 'a\n'
- 'sequence or to emulate a mapping; the difference is that for '
- 'a\n'
- 'sequence, the allowable keys should be the integers *k* for '
- 'which "0\n'
- '<= k < N" where *N* is the length of the sequence, or '
- '"slice" objects,\n'
- 'which define a range of items. It is also recommended that '
- 'mappings\n'
- 'provide the methods "keys()", "values()", "items()", '
- '"get()",\n'
- '"clear()", "setdefault()", "pop()", "popitem()", "copy()", '
- 'and\n'
- '"update()" behaving similar to those for Python’s standard\n'
- '"dictionary" objects. The "collections.abc" module provides '
- 'a\n'
- '"MutableMapping" *abstract base class* to help create those '
- 'methods\n'
- 'from a base set of "__getitem__()", "__setitem__()", '
- '"__delitem__()",\n'
- 'and "keys()". Mutable sequences should provide methods '
- '"append()",\n'
- '"count()", "index()", "extend()", "insert()", "pop()", '
- '"remove()",\n'
- '"reverse()" and "sort()", like Python standard "list" '
- 'objects.\n'
- 'Finally, sequence types should implement addition (meaning\n'
- 'concatenation) and multiplication (meaning repetition) by '
- 'defining the\n'
- 'methods "__add__()", "__radd__()", "__iadd__()", '
- '"__mul__()",\n'
- '"__rmul__()" and "__imul__()" described below; they should '
- 'not define\n'
- 'other numerical operators. It is recommended that both '
- 'mappings and\n'
- 'sequences implement the "__contains__()" method to allow '
- 'efficient use\n'
- 'of the "in" operator; for mappings, "in" should search the '
- 'mapping’s\n'
- 'keys; for sequences, it should search through the values. '
- 'It is\n'
- 'further recommended that both mappings and sequences '
- 'implement the\n'
- '"__iter__()" method to allow efficient iteration through '
- 'the\n'
- 'container; for mappings, "__iter__()" should iterate through '
+ 'Containers usually are *sequences* (such as "lists" or '
+ '"tuples") or\n'
+ '*mappings* (like "dictionaries"), but can represent other '
+ 'containers\n'
+ 'as well. The first set of methods is used either to emulate '
+ 'a\n'
+ 'sequence or to emulate a mapping; the difference is that for '
+ 'a\n'
+ 'sequence, the allowable keys should be the integers *k* for '
+ 'which "0\n'
+ '<= k < N" where *N* is the length of the sequence, or '
+ '"slice" objects,\n'
+ 'which define a range of items. It is also recommended that '
+ 'mappings\n'
+ 'provide the methods "keys()", "values()", "items()", '
+ '"get()",\n'
+ '"clear()", "setdefault()", "pop()", "popitem()", "copy()", '
+ 'and\n'
+ '"update()" behaving similar to those for Python’s standard\n'
+ '"dictionary" objects. The "collections.abc" module provides '
+ 'a\n'
+ '"MutableMapping" *abstract base class* to help create those '
+ 'methods\n'
+ 'from a base set of "__getitem__()", "__setitem__()", '
+ '"__delitem__()",\n'
+ 'and "keys()". Mutable sequences should provide methods '
+ '"append()",\n'
+ '"count()", "index()", "extend()", "insert()", "pop()", '
+ '"remove()",\n'
+ '"reverse()" and "sort()", like Python standard "list" '
+ 'objects.\n'
+ 'Finally, sequence types should implement addition (meaning\n'
+ 'concatenation) and multiplication (meaning repetition) by '
+ 'defining the\n'
+ 'methods "__add__()", "__radd__()", "__iadd__()", '
+ '"__mul__()",\n'
+ '"__rmul__()" and "__imul__()" described below; they should '
+ 'not define\n'
+ 'other numerical operators. It is recommended that both '
+ 'mappings and\n'
+ 'sequences implement the "__contains__()" method to allow '
+ 'efficient use\n'
+ 'of the "in" operator; for mappings, "in" should search the '
+ 'mapping’s\n'
+ 'keys; for sequences, it should search through the values. '
+ 'It is\n'
+ 'further recommended that both mappings and sequences '
+ 'implement the\n'
+ '"__iter__()" method to allow efficient iteration through '
'the\n'
- 'object’s keys; for sequences, it should iterate through the '
- 'values.\n'
+ 'container; for mappings, "__iter__()" should iterate through '
+ 'the\n'
+ 'object’s keys; for sequences, it should iterate through the '
+ 'values.\n'
'\n'
'object.__len__(self)\n'
'\n'
@@ -9709,23 +9709,23 @@ topics = {'assert': 'The "assert" statement\n'
' estimated length for the object (which may be greater or '
'less than\n'
' the actual length). The length must be an integer ">=" 0. '
- 'The\n'
- ' return value may also be "NotImplemented", which is '
- 'treated the\n'
- ' same as if the "__length_hint__" method didn’t exist at '
- 'all. This\n'
+ 'The\n'
+ ' return value may also be "NotImplemented", which is '
+ 'treated the\n'
+ ' same as if the "__length_hint__" method didn’t exist at '
+ 'all. This\n'
' method is purely an optimization and is never required '
'for\n'
' correctness.\n'
'\n'
' New in version 3.4.\n'
'\n'
- 'Note:\n'
- '\n'
- ' Slicing is done exclusively with the following three '
- 'methods. A\n'
- ' call like\n'
+ 'Note:\n'
'\n'
+ ' Slicing is done exclusively with the following three '
+ 'methods. A\n'
+ ' call like\n'
+ '\n'
' a[1:2] = b\n'
'\n'
' is translated to\n'
@@ -9738,40 +9738,40 @@ topics = {'assert': 'The "assert" statement\n'
'object.__getitem__(self, key)\n'
'\n'
' Called to implement evaluation of "self[key]". For '
- '*sequence*\n'
- ' types, the accepted keys should be integers and slice '
- 'objects.\n'
- ' Note that the special interpretation of negative indexes '
- '(if the\n'
- ' class wishes to emulate a *sequence* type) is up to the\n'
- ' "__getitem__()" method. If *key* is of an inappropriate '
- 'type,\n'
- ' "TypeError" may be raised; if of a value outside the set '
- 'of indexes\n'
- ' for the sequence (after any special interpretation of '
- 'negative\n'
- ' values), "IndexError" should be raised. For *mapping* '
- 'types, if\n'
- ' *key* is missing (not in the container), "KeyError" '
+ '*sequence*\n'
+ ' types, the accepted keys should be integers and slice '
+ 'objects.\n'
+ ' Note that the special interpretation of negative indexes '
+ '(if the\n'
+ ' class wishes to emulate a *sequence* type) is up to the\n'
+ ' "__getitem__()" method. If *key* is of an inappropriate '
+ 'type,\n'
+ ' "TypeError" may be raised; if of a value outside the set '
+ 'of indexes\n'
+ ' for the sequence (after any special interpretation of '
+ 'negative\n'
+ ' values), "IndexError" should be raised. For *mapping* '
+ 'types, if\n'
+ ' *key* is missing (not in the container), "KeyError" '
'should be\n'
- ' raised.\n'
- '\n'
- ' Note:\n'
+ ' raised.\n'
'\n'
- ' "for" loops expect that an "IndexError" will be raised '
- 'for\n'
+ ' Note:\n'
+ '\n'
+ ' "for" loops expect that an "IndexError" will be raised '
+ 'for\n'
' illegal indexes to allow proper detection of the end of '
'the\n'
' sequence.\n'
'\n'
- ' Note:\n'
- '\n'
- ' When subscripting a *class*, the special class method\n'
- ' "__class_getitem__()" may be called instead of '
- '"__getitem__()".\n'
- ' See __class_getitem__ versus __getitem__ for more '
- 'details.\n'
- '\n'
+ ' Note:\n'
+ '\n'
+ ' When subscripting a *class*, the special class method\n'
+ ' "__class_getitem__()" may be called instead of '
+ '"__getitem__()".\n'
+ ' See __class_getitem__ versus __getitem__ for more '
+ 'details.\n'
+ '\n'
'object.__setitem__(self, key, value)\n'
'\n'
' Called to implement assignment to "self[key]". Same note '
@@ -9844,12 +9844,12 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
'The membership test operators ("in" and "not in") are '
'normally\n'
- 'implemented as an iteration through a container. However, '
+ 'implemented as an iteration through a container. However, '
'container\n'
'objects can supply the following special method with a more '
'efficient\n'
- 'implementation, which also does not require the object be '
- 'iterable.\n'
+ 'implementation, which also does not require the object be '
+ 'iterable.\n'
'\n'
'object.__contains__(self, item)\n'
'\n'
@@ -9928,7 +9928,7 @@ topics = {'assert': 'The "assert" statement\n'
'object.__rfloordiv__(self, other)\n'
'object.__rmod__(self, other)\n'
'object.__rdivmod__(self, other)\n'
- 'object.__rpow__(self, other[, modulo])\n'
+ 'object.__rpow__(self, other[, modulo])\n'
'object.__rlshift__(self, other)\n'
'object.__rrshift__(self, other)\n'
'object.__rand__(self, other)\n'
@@ -9957,19 +9957,19 @@ topics = {'assert': 'The "assert" statement\n'
'"__rpow__()" (the\n'
' coercion rules would become too complicated).\n'
'\n'
- ' Note:\n'
- '\n'
- ' If the right operand’s type is a subclass of the left '
- 'operand’s\n'
- ' type and that subclass provides a different '
- 'implementation of the\n'
- ' reflected method for the operation, this method will be '
- 'called\n'
- ' before the left operand’s non-reflected method. This '
- 'behavior\n'
- ' allows subclasses to override their ancestors’ '
- 'operations.\n'
- '\n'
+ ' Note:\n'
+ '\n'
+ ' If the right operand’s type is a subclass of the left '
+ 'operand’s\n'
+ ' type and that subclass provides a different '
+ 'implementation of the\n'
+ ' reflected method for the operation, this method will be '
+ 'called\n'
+ ' before the left operand’s non-reflected method. This '
+ 'behavior\n'
+ ' allows subclasses to override their ancestors’ '
+ 'operations.\n'
+ '\n'
'object.__iadd__(self, other)\n'
'object.__isub__(self, other)\n'
'object.__imul__(self, other)\n'
@@ -10012,16 +10012,16 @@ topics = {'assert': 'The "assert" statement\n'
'the data\n'
' model.\n'
'\n'
- ' Note:\n'
- '\n'
- ' Due to a bug in the dispatching mechanism for "**=", a '
- 'class that\n'
- ' defines "__ipow__()" but returns "NotImplemented" would '
- 'fail to\n'
- ' fall back to "x.__pow__(y)" and "y.__rpow__(x)". This '
- 'bug is\n'
- ' fixed in Python 3.10.\n'
- '\n'
+ ' Note:\n'
+ '\n'
+ ' Due to a bug in the dispatching mechanism for "**=", a '
+ 'class that\n'
+ ' defines "__ipow__()" but returns "NotImplemented" would '
+ 'fail to\n'
+ ' fall back to "x.__pow__(y)" and "y.__rpow__(x)". This '
+ 'bug is\n'
+ ' fixed in Python 3.10.\n'
+ '\n'
'object.__neg__(self)\n'
'object.__pos__(self)\n'
'object.__abs__(self)\n'
@@ -10052,11 +10052,11 @@ topics = {'assert': 'The "assert" statement\n'
'numeric\n'
' object is an integer type. Must return an integer.\n'
'\n'
- ' If "__int__()", "__float__()" and "__complex__()" are not '
- 'defined\n'
- ' then corresponding built-in functions "int()", "float()" '
- 'and\n'
- ' "complex()" fall back to "__index__()".\n'
+ ' If "__int__()", "__float__()" and "__complex__()" are not '
+ 'defined\n'
+ ' then corresponding built-in functions "int()", "float()" '
+ 'and\n'
+ ' "complex()" fall back to "__index__()".\n'
'\n'
'object.__round__(self[, ndigits])\n'
'object.__trunc__(self)\n'
@@ -10072,9 +10072,9 @@ topics = {'assert': 'The "assert" statement\n'
' of the object truncated to an "Integral" (typically an '
'"int").\n'
'\n'
- ' The built-in function "int()" falls back to "__trunc__()" '
- 'if\n'
- ' neither "__int__()" nor "__index__()" is defined.\n'
+ ' The built-in function "int()" falls back to "__trunc__()" '
+ 'if\n'
+ ' neither "__int__()" nor "__index__()" is defined.\n'
'\n'
'\n'
'With Statement Context Managers\n'
@@ -10262,14 +10262,14 @@ topics = {'assert': 'The "assert" statement\n'
'capitalized\n'
' and the rest lowercased.\n'
'\n'
- ' Changed in version 3.8: The first character is now put '
- 'into\n'
- ' titlecase rather than uppercase. This means that '
- 'characters like\n'
- ' digraphs will only have their first letter capitalized, '
- 'instead of\n'
- ' the full character.\n'
- '\n'
+ ' Changed in version 3.8: The first character is now put '
+ 'into\n'
+ ' titlecase rather than uppercase. This means that '
+ 'characters like\n'
+ ' digraphs will only have their first letter capitalized, '
+ 'instead of\n'
+ ' the full character.\n'
+ '\n'
'str.casefold()\n'
'\n'
' Return a casefolded copy of the string. Casefolded '
@@ -10310,7 +10310,7 @@ topics = {'assert': 'The "assert" statement\n'
'*start* and\n'
' *end* are interpreted as in slice notation.\n'
'\n'
- 'str.encode(encoding="utf-8", errors="strict")\n'
+ 'str.encode(encoding="utf-8", errors="strict")\n'
'\n'
' Return an encoded version of the string as a bytes '
'object. Default\n'
@@ -10328,20 +10328,20 @@ topics = {'assert': 'The "assert" statement\n'
'For a list\n'
' of possible encodings, see section Standard Encodings.\n'
'\n'
- ' By default, the *errors* argument is not checked for '
- 'best\n'
- ' performances, but only used at the first encoding '
- 'error. Enable the\n'
- ' Python Development Mode, or use a debug build to check '
- '*errors*.\n'
- '\n'
+ ' By default, the *errors* argument is not checked for '
+ 'best\n'
+ ' performances, but only used at the first encoding '
+ 'error. Enable the\n'
+ ' Python Development Mode, or use a debug build to check '
+ '*errors*.\n'
+ '\n'
' Changed in version 3.1: Support for keyword arguments '
'added.\n'
'\n'
- ' Changed in version 3.9: The *errors* is now checked in '
- 'development\n'
- ' mode and in debug mode.\n'
- '\n'
+ ' Changed in version 3.9: The *errors* is now checked in '
+ 'development\n'
+ ' mode and in debug mode.\n'
+ '\n'
'str.endswith(suffix[, start[, end]])\n'
'\n'
' Return "True" if the string ends with the specified '
@@ -10396,14 +10396,14 @@ topics = {'assert': 'The "assert" statement\n'
'"-1" if\n'
' *sub* is not found.\n'
'\n'
- ' Note:\n'
- '\n'
- ' The "find()" method should be used only if you need '
- 'to know the\n'
- ' position of *sub*. To check if *sub* is a substring '
- 'or not, use\n'
- ' the "in" operator:\n'
+ ' Note:\n'
'\n'
+ ' The "find()" method should be used only if you need '
+ 'to know the\n'
+ ' position of *sub*. To check if *sub* is a substring '
+ 'or not, use\n'
+ ' the "in" operator:\n'
+ '\n'
" >>> 'Py' in 'Python'\n"
' True\n'
'\n'
@@ -10431,9 +10431,9 @@ topics = {'assert': 'The "assert" statement\n'
' formatting options that can be specified in format '
'strings.\n'
'\n'
- ' Note:\n'
- '\n'
- ' When formatting a number ("int", "float", "complex",\n'
+ ' Note:\n'
+ '\n'
+ ' When formatting a number ("int", "float", "complex",\n'
' "decimal.Decimal" and subclasses) with the "n" type '
'(ex:\n'
' "\'{:n}\'.format(1234)"), the function temporarily '
@@ -10480,20 +10480,20 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
'str.isalnum()\n'
'\n'
- ' Return "True" if all characters in the string are '
+ ' Return "True" if all characters in the string are '
'alphanumeric and\n'
- ' there is at least one character, "False" otherwise. A '
- 'character\n'
- ' "c" is alphanumeric if one of the following returns '
+ ' there is at least one character, "False" otherwise. A '
+ 'character\n'
+ ' "c" is alphanumeric if one of the following returns '
'"True":\n'
' "c.isalpha()", "c.isdecimal()", "c.isdigit()", or '
'"c.isnumeric()".\n'
'\n'
'str.isalpha()\n'
'\n'
- ' Return "True" if all characters in the string are '
+ ' Return "True" if all characters in the string are '
'alphabetic and\n'
- ' there is at least one character, "False" otherwise. '
+ ' there is at least one character, "False" otherwise. '
'Alphabetic\n'
' characters are those characters defined in the Unicode '
'character\n'
@@ -10507,91 +10507,91 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
'str.isascii()\n'
'\n'
- ' Return "True" if the string is empty or all characters '
- 'in the\n'
- ' string are ASCII, "False" otherwise. ASCII characters '
- 'have code\n'
- ' points in the range U+0000-U+007F.\n'
+ ' Return "True" if the string is empty or all characters '
+ 'in the\n'
+ ' string are ASCII, "False" otherwise. ASCII characters '
+ 'have code\n'
+ ' points in the range U+0000-U+007F.\n'
'\n'
' New in version 3.7.\n'
'\n'
'str.isdecimal()\n'
'\n'
- ' Return "True" if all characters in the string are '
- 'decimal\n'
- ' characters and there is at least one character, "False" '
- 'otherwise.\n'
- ' Decimal characters are those that can be used to form '
- 'numbers in\n'
- ' base 10, e.g. U+0660, ARABIC-INDIC DIGIT ZERO. '
- 'Formally a decimal\n'
- ' character is a character in the Unicode General '
- 'Category “Nd”.\n'
+ ' Return "True" if all characters in the string are '
+ 'decimal\n'
+ ' characters and there is at least one character, "False" '
+ 'otherwise.\n'
+ ' Decimal characters are those that can be used to form '
+ 'numbers in\n'
+ ' base 10, e.g. U+0660, ARABIC-INDIC DIGIT ZERO. '
+ 'Formally a decimal\n'
+ ' character is a character in the Unicode General '
+ 'Category “Nd”.\n'
'\n'
'str.isdigit()\n'
'\n'
- ' Return "True" if all characters in the string are '
- 'digits and there\n'
- ' is at least one character, "False" otherwise. Digits '
- 'include\n'
- ' decimal characters and digits that need special '
- 'handling, such as\n'
- ' the compatibility superscript digits. This covers '
- 'digits which\n'
- ' cannot be used to form numbers in base 10, like the '
- 'Kharosthi\n'
- ' numbers. Formally, a digit is a character that has the '
- 'property\n'
- ' value Numeric_Type=Digit or Numeric_Type=Decimal.\n'
+ ' Return "True" if all characters in the string are '
+ 'digits and there\n'
+ ' is at least one character, "False" otherwise. Digits '
+ 'include\n'
+ ' decimal characters and digits that need special '
+ 'handling, such as\n'
+ ' the compatibility superscript digits. This covers '
+ 'digits which\n'
+ ' cannot be used to form numbers in base 10, like the '
+ 'Kharosthi\n'
+ ' numbers. Formally, a digit is a character that has the '
+ 'property\n'
+ ' value Numeric_Type=Digit or Numeric_Type=Decimal.\n'
'\n'
'str.isidentifier()\n'
'\n'
- ' Return "True" if the string is a valid identifier '
+ ' Return "True" if the string is a valid identifier '
'according to the\n'
' language definition, section Identifiers and keywords.\n'
'\n'
- ' Call "keyword.iskeyword()" to test whether string "s" '
- 'is a reserved\n'
- ' identifier, such as "def" and "class".\n'
- '\n'
- ' Example:\n'
- '\n'
- ' >>> from keyword import iskeyword\n'
- '\n'
- " >>> 'hello'.isidentifier(), iskeyword('hello')\n"
- ' (True, False)\n'
- " >>> 'def'.isidentifier(), iskeyword('def')\n"
- ' (True, True)\n'
- '\n'
+ ' Call "keyword.iskeyword()" to test whether string "s" '
+ 'is a reserved\n'
+ ' identifier, such as "def" and "class".\n'
+ '\n'
+ ' Example:\n'
+ '\n'
+ ' >>> from keyword import iskeyword\n'
+ '\n'
+ " >>> 'hello'.isidentifier(), iskeyword('hello')\n"
+ ' (True, False)\n'
+ " >>> 'def'.isidentifier(), iskeyword('def')\n"
+ ' (True, True)\n'
+ '\n'
'str.islower()\n'
'\n'
- ' Return "True" if all cased characters [4] in the string '
- 'are\n'
- ' lowercase and there is at least one cased character, '
- '"False"\n'
- ' otherwise.\n'
+ ' Return "True" if all cased characters [4] in the string '
+ 'are\n'
+ ' lowercase and there is at least one cased character, '
+ '"False"\n'
+ ' otherwise.\n'
'\n'
'str.isnumeric()\n'
'\n'
- ' Return "True" if all characters in the string are '
- 'numeric\n'
- ' characters, and there is at least one character, '
- '"False" otherwise.\n'
- ' Numeric characters include digit characters, and all '
- 'characters\n'
- ' that have the Unicode numeric value property, e.g. '
- 'U+2155, VULGAR\n'
- ' FRACTION ONE FIFTH. Formally, numeric characters are '
- 'those with\n'
- ' the property value Numeric_Type=Digit, '
- 'Numeric_Type=Decimal or\n'
+ ' Return "True" if all characters in the string are '
+ 'numeric\n'
+ ' characters, and there is at least one character, '
+ '"False" otherwise.\n'
+ ' Numeric characters include digit characters, and all '
+ 'characters\n'
+ ' that have the Unicode numeric value property, e.g. '
+ 'U+2155, VULGAR\n'
+ ' FRACTION ONE FIFTH. Formally, numeric characters are '
+ 'those with\n'
+ ' the property value Numeric_Type=Digit, '
+ 'Numeric_Type=Decimal or\n'
' Numeric_Type=Numeric.\n'
'\n'
'str.isprintable()\n'
'\n'
- ' Return "True" if all characters in the string are '
+ ' Return "True" if all characters in the string are '
'printable or the\n'
- ' string is empty, "False" otherwise. Nonprintable '
+ ' string is empty, "False" otherwise. Nonprintable '
'characters are\n'
' those characters defined in the Unicode character '
'database as\n'
@@ -10607,46 +10607,46 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
'str.isspace()\n'
'\n'
- ' Return "True" if there are only whitespace characters '
- 'in the string\n'
- ' and there is at least one character, "False" '
- 'otherwise.\n'
- '\n'
- ' A character is *whitespace* if in the Unicode character '
- 'database\n'
- ' (see "unicodedata"), either its general category is '
- '"Zs"\n'
- ' (“Separator, space”), or its bidirectional class is one '
- 'of "WS",\n'
- ' "B", or "S".\n'
- '\n'
+ ' Return "True" if there are only whitespace characters '
+ 'in the string\n'
+ ' and there is at least one character, "False" '
+ 'otherwise.\n'
+ '\n'
+ ' A character is *whitespace* if in the Unicode character '
+ 'database\n'
+ ' (see "unicodedata"), either its general category is '
+ '"Zs"\n'
+ ' (“Separator, space”), or its bidirectional class is one '
+ 'of "WS",\n'
+ ' "B", or "S".\n'
+ '\n'
'str.istitle()\n'
'\n'
- ' Return "True" if the string is a titlecased string and '
+ ' Return "True" if the string is a titlecased string and '
'there is at\n'
' least one character, for example uppercase characters '
'may only\n'
' follow uncased characters and lowercase characters only '
'cased ones.\n'
- ' Return "False" otherwise.\n'
+ ' Return "False" otherwise.\n'
'\n'
'str.isupper()\n'
'\n'
- ' Return "True" if all cased characters [4] in the string '
- 'are\n'
- ' uppercase and there is at least one cased character, '
- '"False"\n'
- ' otherwise.\n'
- '\n'
- " >>> 'BANANA'.isupper()\n"
- ' True\n'
- " >>> 'banana'.isupper()\n"
- ' False\n'
- " >>> 'baNana'.isupper()\n"
- ' False\n'
- " >>> ' '.isupper()\n"
- ' False\n'
- '\n'
+ ' Return "True" if all cased characters [4] in the string '
+ 'are\n'
+ ' uppercase and there is at least one cased character, '
+ '"False"\n'
+ ' otherwise.\n'
+ '\n'
+ " >>> 'BANANA'.isupper()\n"
+ ' True\n'
+ " >>> 'banana'.isupper()\n"
+ ' False\n'
+ " >>> 'baNana'.isupper()\n"
+ ' False\n'
+ " >>> ' '.isupper()\n"
+ ' False\n'
+ '\n'
'str.join(iterable)\n'
'\n'
' Return a string which is the concatenation of the '
@@ -10695,16 +10695,16 @@ topics = {'assert': 'The "assert" statement\n'
" >>> 'www.example.com'.lstrip('cmowz.')\n"
" 'example.com'\n"
'\n'
- ' See "str.removeprefix()" for a method that will remove '
- 'a single\n'
- ' prefix string rather than all of a set of characters. '
- 'For example:\n'
- '\n'
- " >>> 'Arthur: three!'.lstrip('Arthur: ')\n"
- " 'ee!'\n"
- " >>> 'Arthur: three!'.removeprefix('Arthur: ')\n"
- " 'three!'\n"
- '\n'
+ ' See "str.removeprefix()" for a method that will remove '
+ 'a single\n'
+ ' prefix string rather than all of a set of characters. '
+ 'For example:\n'
+ '\n'
+ " >>> 'Arthur: three!'.lstrip('Arthur: ')\n"
+ " 'ee!'\n"
+ " >>> 'Arthur: three!'.removeprefix('Arthur: ')\n"
+ " 'three!'\n"
+ '\n'
'static str.maketrans(x[, y[, z]])\n'
'\n'
' This static method returns a translation table usable '
@@ -10741,35 +10741,35 @@ topics = {'assert': 'The "assert" statement\n'
'followed by\n'
' two empty strings.\n'
'\n'
- 'str.removeprefix(prefix, /)\n'
- '\n'
- ' If the string starts with the *prefix* string, return\n'
- ' "string[len(prefix):]". Otherwise, return a copy of the '
- 'original\n'
- ' string:\n'
- '\n'
- " >>> 'TestHook'.removeprefix('Test')\n"
- " 'Hook'\n"
- " >>> 'BaseTestCase'.removeprefix('Test')\n"
- " 'BaseTestCase'\n"
- '\n'
- ' New in version 3.9.\n'
- '\n'
- 'str.removesuffix(suffix, /)\n'
- '\n'
- ' If the string ends with the *suffix* string and that '
- '*suffix* is\n'
- ' not empty, return "string[:-len(suffix)]". Otherwise, '
- 'return a copy\n'
- ' of the original string:\n'
- '\n'
- " >>> 'MiscTests'.removesuffix('Tests')\n"
- " 'Misc'\n"
- " >>> 'TmpDirMixin'.removesuffix('Tests')\n"
- " 'TmpDirMixin'\n"
- '\n'
- ' New in version 3.9.\n'
- '\n'
+ 'str.removeprefix(prefix, /)\n'
+ '\n'
+ ' If the string starts with the *prefix* string, return\n'
+ ' "string[len(prefix):]". Otherwise, return a copy of the '
+ 'original\n'
+ ' string:\n'
+ '\n'
+ " >>> 'TestHook'.removeprefix('Test')\n"
+ " 'Hook'\n"
+ " >>> 'BaseTestCase'.removeprefix('Test')\n"
+ " 'BaseTestCase'\n"
+ '\n'
+ ' New in version 3.9.\n'
+ '\n'
+ 'str.removesuffix(suffix, /)\n'
+ '\n'
+ ' If the string ends with the *suffix* string and that '
+ '*suffix* is\n'
+ ' not empty, return "string[:-len(suffix)]". Otherwise, '
+ 'return a copy\n'
+ ' of the original string:\n'
+ '\n'
+ " >>> 'MiscTests'.removesuffix('Tests')\n"
+ " 'Misc'\n"
+ " >>> 'TmpDirMixin'.removesuffix('Tests')\n"
+ " 'TmpDirMixin'\n"
+ '\n'
+ ' New in version 3.9.\n'
+ '\n'
'str.replace(old, new[, count])\n'
'\n'
' Return a copy of the string with all occurrences of '
@@ -10816,7 +10816,7 @@ topics = {'assert': 'The "assert" statement\n'
'followed by\n'
' the string itself.\n'
'\n'
- 'str.rsplit(sep=None, maxsplit=-1)\n'
+ 'str.rsplit(sep=None, maxsplit=-1)\n'
'\n'
' Return a list of the words in the string, using *sep* '
'as the\n'
@@ -10847,17 +10847,17 @@ topics = {'assert': 'The "assert" statement\n'
" >>> 'mississippi'.rstrip('ipz')\n"
" 'mississ'\n"
'\n'
- ' See "str.removesuffix()" for a method that will remove '
- 'a single\n'
- ' suffix string rather than all of a set of characters. '
- 'For example:\n'
- '\n'
- " >>> 'Monty Python'.rstrip(' Python')\n"
- " 'M'\n"
- " >>> 'Monty Python'.removesuffix(' Python')\n"
- " 'Monty'\n"
- '\n'
- 'str.split(sep=None, maxsplit=-1)\n'
+ ' See "str.removesuffix()" for a method that will remove '
+ 'a single\n'
+ ' suffix string rather than all of a set of characters. '
+ 'For example:\n'
+ '\n'
+ " >>> 'Monty Python'.rstrip(' Python')\n"
+ " 'M'\n"
+ " >>> 'Monty Python'.removesuffix(' Python')\n"
+ " 'Monty'\n"
+ '\n'
+ 'str.split(sep=None, maxsplit=-1)\n'
'\n'
' Return a list of the words in the string, using *sep* '
'as the\n'
@@ -10912,7 +10912,7 @@ topics = {'assert': 'The "assert" statement\n'
" >>> ' 1 2 3 '.split()\n"
" ['1', '2', '3']\n"
'\n'
- 'str.splitlines(keepends=False)\n'
+ 'str.splitlines(keepends=False)\n'
'\n'
' Return a list of the lines in the string, breaking at '
'line\n'
@@ -10930,7 +10930,7 @@ topics = {'assert': 'The "assert" statement\n'
' | Representation | '
'Description |\n'
' '
- '|=========================|===============================|\n'
+ '|=========================|===============================|\n'
' | "\\n" | Line '
'Feed |\n'
' '
@@ -11092,7 +11092,7 @@ topics = {'assert': 'The "assert" statement\n'
' >>> def titlecase(s):\n'
' ... return re.sub(r"[A-Za-z]+(\'[A-Za-z]+)?",\n'
' ... lambda mo: '
- 'mo.group(0).capitalize(),\n'
+ 'mo.group(0).capitalize(),\n'
' ... s)\n'
' ...\n'
' >>> titlecase("they\'re bill\'s friends.")\n'
@@ -11269,7 +11269,7 @@ topics = {'assert': 'The "assert" statement\n'
'+-------------------+-----------------------------------+---------+\n'
'| Escape Sequence | Meaning | Notes '
'|\n'
- '|===================|===================================|=========|\n'
+ '|===================|===================================|=========|\n'
'| "\\newline" | Backslash and newline ignored '
'| |\n'
'+-------------------+-----------------------------------+---------+\n'
@@ -11315,7 +11315,7 @@ topics = {'assert': 'The "assert" statement\n'
'+-------------------+-----------------------------------+---------+\n'
'| Escape Sequence | Meaning | Notes '
'|\n'
- '|===================|===================================|=========|\n'
+ '|===================|===================================|=========|\n'
'| "\\N{name}" | Character named *name* in the | '
'(4) |\n'
'| | Unicode database | '
@@ -11338,20 +11338,20 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
'2. Unlike in Standard C, exactly two hex digits are required.\n'
'\n'
- '3. In a bytes literal, hexadecimal and octal escapes denote the '
- 'byte\n'
- ' with the given value. In a string literal, these escapes '
- 'denote a\n'
- ' Unicode character with the given value.\n'
+ '3. In a bytes literal, hexadecimal and octal escapes denote the '
+ 'byte\n'
+ ' with the given value. In a string literal, these escapes '
+ 'denote a\n'
+ ' Unicode character with the given value.\n'
'\n'
'4. Changed in version 3.3: Support for name aliases [1] has been\n'
' added.\n'
'\n'
'5. Exactly four hex digits are required.\n'
'\n'
- '6. Any Unicode character can be encoded this way. Exactly eight '
- 'hex\n'
- ' digits are required.\n'
+ '6. Any Unicode character can be encoded this way. Exactly eight '
+ 'hex\n'
+ ' digits are required.\n'
'\n'
'Unlike Standard C, all unrecognized escape sequences are left in '
'the\n'
@@ -11369,9 +11369,9 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
' Changed in version 3.6: Unrecognized escape sequences produce '
'a\n'
- ' "DeprecationWarning". In a future Python version they will be '
- 'a\n'
- ' "SyntaxWarning" and eventually a "SyntaxError".\n'
+ ' "DeprecationWarning". In a future Python version they will be '
+ 'a\n'
+ ' "SyntaxWarning" and eventually a "SyntaxError".\n'
'\n'
'Even in a raw literal, quotes can be escaped with a backslash, '
'but the\n'
@@ -11393,10 +11393,10 @@ topics = {'assert': 'The "assert" statement\n'
'subscriptions': 'Subscriptions\n'
'*************\n'
'\n'
- 'Subscription of a sequence (string, tuple or list) or '
- 'mapping\n'
- '(dictionary) object usually selects an item from the '
- 'collection:\n'
+ 'Subscription of a sequence (string, tuple or list) or '
+ 'mapping\n'
+ '(dictionary) object usually selects an item from the '
+ 'collection:\n'
'\n'
' subscription ::= primary "[" expression_list "]"\n'
'\n'
@@ -11447,13 +11447,13 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
'A string’s items are characters. A character is not a '
'separate data\n'
- 'type but a string of exactly one character.\n'
- '\n'
- 'Subscription of certain *classes* or *types* creates a '
- 'generic alias.\n'
- 'In this case, user-defined classes can support subscription '
- 'by\n'
- 'providing a "__class_getitem__()" classmethod.\n',
+ 'type but a string of exactly one character.\n'
+ '\n'
+ 'Subscription of certain *classes* or *types* creates a '
+ 'generic alias.\n'
+ 'In this case, user-defined classes can support subscription '
+ 'by\n'
+ 'providing a "__class_getitem__()" classmethod.\n',
'truth': 'Truth Value Testing\n'
'*******************\n'
'\n'
@@ -11511,8 +11511,8 @@ topics = {'assert': 'The "assert" statement\n'
'object is “compatible” with the exception. An object is compatible\n'
'with an exception if it is the class or a base class of the '
'exception\n'
- 'object, or a tuple containing an item that is the class or a base\n'
- 'class of the exception object.\n'
+ 'object, or a tuple containing an item that is the class or a base\n'
+ 'class of the exception object.\n'
'\n'
'If no except clause matches the exception, the search for an '
'exception\n'
@@ -11590,9 +11590,9 @@ topics = {'assert': 'The "assert" statement\n'
'clause. If the "finally" clause raises another exception, the saved\n'
'exception is set as the context of the new exception. If the '
'"finally"\n'
- 'clause executes a "return", "break" or "continue" statement, the '
- 'saved\n'
- 'exception is discarded:\n'
+ 'clause executes a "return", "break" or "continue" statement, the '
+ 'saved\n'
+ 'exception is discarded:\n'
'\n'
' >>> def f():\n'
' ... try:\n'
@@ -11608,7 +11608,7 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
'When a "return", "break" or "continue" statement is executed in the\n'
'"try" suite of a "try"…"finally" statement, the "finally" clause is\n'
- 'also executed ‘on the way out.’\n'
+ 'also executed ‘on the way out.’\n'
'\n'
'The return value of a function is determined by the last "return"\n'
'statement executed. Since the "finally" clause always executes, a\n'
@@ -11628,11 +11628,11 @@ topics = {'assert': 'The "assert" statement\n'
'Additional information on exceptions can be found in section\n'
'Exceptions, and information on using the "raise" statement to '
'generate\n'
- 'exceptions may be found in section The raise statement.\n'
- '\n'
- 'Changed in version 3.8: Prior to Python 3.8, a "continue" statement\n'
- 'was illegal in the "finally" clause due to a problem with the\n'
- 'implementation.\n',
+ 'exceptions may be found in section The raise statement.\n'
+ '\n'
+ 'Changed in version 3.8: Prior to Python 3.8, a "continue" statement\n'
+ 'was illegal in the "finally" clause due to a problem with the\n'
+ 'implementation.\n',
'types': 'The standard type hierarchy\n'
'***************************\n'
'\n'
@@ -11672,17 +11672,17 @@ topics = {'assert': 'The "assert" statement\n'
'for\n'
' the operands provided. (The interpreter will then try the\n'
' reflected operation, or some other fallback, depending on the\n'
- ' operator.) It should not be evaluated in a boolean context.\n'
+ ' operator.) It should not be evaluated in a boolean context.\n'
'\n'
' See Implementing the arithmetic operations for more details.\n'
'\n'
- ' Changed in version 3.9: Evaluating "NotImplemented" in a '
- 'boolean\n'
- ' context is deprecated. While it currently evaluates as true, it\n'
- ' will emit a "DeprecationWarning". It will raise a "TypeError" in '
- 'a\n'
- ' future version of Python.\n'
- '\n'
+ ' Changed in version 3.9: Evaluating "NotImplemented" in a '
+ 'boolean\n'
+ ' context is deprecated. While it currently evaluates as true, it\n'
+ ' will emit a "DeprecationWarning". It will raise a "TypeError" in '
+ 'a\n'
+ ' future version of Python.\n'
+ '\n'
'Ellipsis\n'
' This type has a single value. There is a single object with '
'this\n'
@@ -11700,27 +11700,27 @@ topics = {'assert': 'The "assert" statement\n'
'representation\n'
' in computers.\n'
'\n'
- ' The string representations of the numeric classes, computed by\n'
- ' "__repr__()" and "__str__()", have the following properties:\n'
- '\n'
- ' * They are valid numeric literals which, when passed to their '
- 'class\n'
- ' constructor, produce an object having the value of the '
- 'original\n'
- ' numeric.\n'
- '\n'
- ' * The representation is in base 10, when possible.\n'
- '\n'
- ' * Leading zeros, possibly excepting a single zero before a '
- 'decimal\n'
- ' point, are not shown.\n'
- '\n'
- ' * Trailing zeros, possibly excepting a single zero after a '
- 'decimal\n'
- ' point, are not shown.\n'
- '\n'
- ' * A sign is shown only when the number is negative.\n'
- '\n'
+ ' The string representations of the numeric classes, computed by\n'
+ ' "__repr__()" and "__str__()", have the following properties:\n'
+ '\n'
+ ' * They are valid numeric literals which, when passed to their '
+ 'class\n'
+ ' constructor, produce an object having the value of the '
+ 'original\n'
+ ' numeric.\n'
+ '\n'
+ ' * The representation is in base 10, when possible.\n'
+ '\n'
+ ' * Leading zeros, possibly excepting a single zero before a '
+ 'decimal\n'
+ ' point, are not shown.\n'
+ '\n'
+ ' * Trailing zeros, possibly excepting a single zero after a '
+ 'decimal\n'
+ ' point, are not shown.\n'
+ '\n'
+ ' * A sign is shown only when the number is negative.\n'
+ '\n'
' Python distinguishes between integers, floating point numbers, '
'and\n'
' complex numbers:\n'
@@ -11829,7 +11829,7 @@ topics = {'assert': 'The "assert" statement\n'
' points. All the code points in the range "U+0000 - '
'U+10FFFF"\n'
' can be represented in a string. Python doesn’t have a '
- '"char"\n'
+ '"char"\n'
' type; instead, every code point in the string is '
'represented\n'
' as a string object with length "1". The built-in '
@@ -11961,16 +11961,16 @@ topics = {'assert': 'The "assert" statement\n'
' then they can be used interchangeably to index the same\n'
' dictionary entry.\n'
'\n'
- ' Dictionaries preserve insertion order, meaning that keys will '
- 'be\n'
- ' produced in the same order they were added sequentially over '
- 'the\n'
- ' dictionary. Replacing an existing key does not change the '
- 'order,\n'
- ' however removing a key and re-inserting it will add it to '
- 'the\n'
- ' end instead of keeping its old place.\n'
- '\n'
+ ' Dictionaries preserve insertion order, meaning that keys will '
+ 'be\n'
+ ' produced in the same order they were added sequentially over '
+ 'the\n'
+ ' dictionary. Replacing an existing key does not change the '
+ 'order,\n'
+ ' however removing a key and re-inserting it will add it to '
+ 'the\n'
+ ' end instead of keeping its old place.\n'
+ '\n'
' Dictionaries are mutable; they can be created by the "{...}"\n'
' notation (see section Dictionary displays).\n'
'\n'
@@ -11979,13 +11979,13 @@ topics = {'assert': 'The "assert" statement\n'
'"collections"\n'
' module.\n'
'\n'
- ' Changed in version 3.7: Dictionaries did not preserve '
- 'insertion\n'
- ' order in versions of Python before 3.6. In CPython 3.6,\n'
- ' insertion order was preserved, but it was considered an\n'
- ' implementation detail at that time rather than a language\n'
- ' guarantee.\n'
- '\n'
+ ' Changed in version 3.7: Dictionaries did not preserve '
+ 'insertion\n'
+ ' order in versions of Python before 3.6. In CPython 3.6,\n'
+ ' insertion order was preserved, but it was considered an\n'
+ ' implementation detail at that time rather than a language\n'
+ ' guarantee.\n'
+ '\n'
'Callable types\n'
' These are the types to which the function call operation (see\n'
' section Calls) can be applied:\n'
@@ -12004,24 +12004,24 @@ topics = {'assert': 'The "assert" statement\n'
' | Attribute | Meaning '
'| |\n'
' '
- '|===========================|=================================|=============|\n'
+ '|===========================|=================================|=============|\n'
' | "__doc__" | The function’s documentation '
'| Writable |\n'
' | | string, or "None" if '
'| |\n'
' | | unavailable; not inherited by '
'| |\n'
- ' | | subclasses. '
+ ' | | subclasses. '
'| |\n'
' '
'+---------------------------+---------------------------------+-------------+\n'
- ' | "__name__" | The function’s name. '
+ ' | "__name__" | The function’s name. '
'| Writable |\n'
' '
'+---------------------------+---------------------------------+-------------+\n'
- ' | "__qualname__" | The function’s *qualified '
+ ' | "__qualname__" | The function’s *qualified '
'| Writable |\n'
- ' | | name*. New in version 3.3. '
+ ' | | name*. New in version 3.3. '
'| |\n'
' '
'+---------------------------+---------------------------------+-------------+\n'
@@ -12041,7 +12041,7 @@ topics = {'assert': 'The "assert" statement\n'
'| |\n'
' | | or "None" if no arguments have '
'| |\n'
- ' | | a default value. '
+ ' | | a default value. '
'| |\n'
' '
'+---------------------------+---------------------------------+-------------+\n'
@@ -12122,8 +12122,8 @@ topics = {'assert': 'The "assert" statement\n'
' Additional information about a function’s definition can be\n'
' retrieved from its code object; see the description of '
'internal\n'
- ' types below. The "cell" type can be accessed in the "types"\n'
- ' module.\n'
+ ' types below. The "cell" type can be accessed in the "types"\n'
+ ' module.\n'
'\n'
' Instance methods\n'
' An instance method object combines a class, a class instance '
@@ -12243,14 +12243,14 @@ topics = {'assert': 'The "assert" statement\n'
'for"\n'
' statement to execute the body of the function.\n'
'\n'
- ' Calling the asynchronous iterator’s "aiterator.__anext__" '
- 'method\n'
- ' will return an *awaitable* which when awaited will execute '
- 'until\n'
- ' it provides a value using the "yield" expression. When the\n'
- ' function executes an empty "return" statement or falls off '
+ ' Calling the asynchronous iterator’s "aiterator.__anext__" '
+ 'method\n'
+ ' will return an *awaitable* which when awaited will execute '
+ 'until\n'
+ ' it provides a value using the "yield" expression. When the\n'
+ ' function executes an empty "return" statement or falls off '
'the\n'
- ' end, a "StopAsyncIteration" exception is raised and the\n'
+ ' end, a "StopAsyncIteration" exception is raised and the\n'
' asynchronous iterator will have reached the end of the set '
'of\n'
' values to be yielded.\n'
@@ -12481,37 +12481,37 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
' Special read-only attributes: "co_name" gives the function '
'name;\n'
- ' "co_argcount" is the total number of positional arguments\n'
- ' (including positional-only arguments and arguments with '
- 'default\n'
- ' values); "co_posonlyargcount" is the number of '
- 'positional-only\n'
- ' arguments (including arguments with default values);\n'
- ' "co_kwonlyargcount" is the number of keyword-only arguments\n'
- ' (including arguments with default values); "co_nlocals" is '
+ ' "co_argcount" is the total number of positional arguments\n'
+ ' (including positional-only arguments and arguments with '
+ 'default\n'
+ ' values); "co_posonlyargcount" is the number of '
+ 'positional-only\n'
+ ' arguments (including arguments with default values);\n'
+ ' "co_kwonlyargcount" is the number of keyword-only arguments\n'
+ ' (including arguments with default values); "co_nlocals" is '
'the\n'
- ' number of local variables used by the function (including\n'
- ' arguments); "co_varnames" is a tuple containing the names of '
- 'the\n'
- ' local variables (starting with the argument names);\n'
- ' "co_cellvars" is a tuple containing the names of local '
- 'variables\n'
- ' that are referenced by nested functions; "co_freevars" is a\n'
- ' tuple containing the names of free variables; "co_code" is a\n'
- ' string representing the sequence of bytecode instructions;\n'
- ' "co_consts" is a tuple containing the literals used by the\n'
- ' bytecode; "co_names" is a tuple containing the names used by '
- 'the\n'
- ' bytecode; "co_filename" is the filename from which the code '
- 'was\n'
- ' compiled; "co_firstlineno" is the first line number of the\n'
- ' function; "co_lnotab" is a string encoding the mapping from\n'
- ' bytecode offsets to line numbers (for details see the source\n'
- ' code of the interpreter); "co_stacksize" is the required '
- 'stack\n'
- ' size; "co_flags" is an integer encoding a number of flags '
- 'for\n'
- ' the interpreter.\n'
+ ' number of local variables used by the function (including\n'
+ ' arguments); "co_varnames" is a tuple containing the names of '
+ 'the\n'
+ ' local variables (starting with the argument names);\n'
+ ' "co_cellvars" is a tuple containing the names of local '
+ 'variables\n'
+ ' that are referenced by nested functions; "co_freevars" is a\n'
+ ' tuple containing the names of free variables; "co_code" is a\n'
+ ' string representing the sequence of bytecode instructions;\n'
+ ' "co_consts" is a tuple containing the literals used by the\n'
+ ' bytecode; "co_names" is a tuple containing the names used by '
+ 'the\n'
+ ' bytecode; "co_filename" is the filename from which the code '
+ 'was\n'
+ ' compiled; "co_firstlineno" is the first line number of the\n'
+ ' function; "co_lnotab" is a string encoding the mapping from\n'
+ ' bytecode offsets to line numbers (for details see the source\n'
+ ' code of the interpreter); "co_stacksize" is the required '
+ 'stack\n'
+ ' size; "co_flags" is an integer encoding a number of flags '
+ 'for\n'
+ ' the interpreter.\n'
'\n'
' The following flag bits are defined for "co_flags": bit '
'"0x04"\n'
@@ -12559,10 +12559,10 @@ topics = {'assert': 'The "assert" statement\n'
' gives the precise instruction (this is an index into the\n'
' bytecode string of the code object).\n'
'\n'
- ' Accessing "f_code" raises an auditing event '
- '"object.__getattr__"\n'
- ' with arguments "obj" and ""f_code"".\n'
- '\n'
+ ' Accessing "f_code" raises an auditing event '
+ '"object.__getattr__"\n'
+ ' with arguments "obj" and ""f_code"".\n'
+ '\n'
' Special writable attributes: "f_trace", if not "None", is a\n'
' function called for various events during code execution '
'(this\n'
@@ -12646,9 +12646,9 @@ topics = {'assert': 'The "assert" statement\n'
' the exception occurred in a "try" statement with no matching\n'
' except clause or with a finally clause.\n'
'\n'
- ' Accessing "tb_frame" raises an auditing event\n'
- ' "object.__getattr__" with arguments "obj" and ""tb_frame"".\n'
- '\n'
+ ' Accessing "tb_frame" raises an auditing event\n'
+ ' "object.__getattr__" with arguments "obj" and ""tb_frame"".\n'
+ '\n'
' Special writable attribute: "tb_next" is the next level in '
'the\n'
' stack trace (towards the frame where the exception occurred), '
@@ -12770,29 +12770,29 @@ topics = {'assert': 'The "assert" statement\n'
'"dict"\n'
'constructor.\n'
'\n'
- 'class dict(**kwargs)\n'
- 'class dict(mapping, **kwargs)\n'
- 'class dict(iterable, **kwargs)\n'
+ 'class dict(**kwargs)\n'
+ 'class dict(mapping, **kwargs)\n'
+ 'class dict(iterable, **kwargs)\n'
'\n'
' Return a new dictionary initialized from an optional '
'positional\n'
' argument and a possibly empty set of keyword arguments.\n'
'\n'
- ' Dictionaries can be created by several means:\n'
- '\n'
- ' * Use a comma-separated list of "key: value" pairs within '
- 'braces:\n'
- ' "{\'jack\': 4098, \'sjoerd\': 4127}" or "{4098: '
- "'jack', 4127:\n"
- ' \'sjoerd\'}"\n'
- '\n'
- ' * Use a dict comprehension: "{}", "{x: x ** 2 for x in '
- 'range(10)}"\n'
- '\n'
- ' * Use the type constructor: "dict()", "dict([(\'foo\', '
- "100), ('bar',\n"
- ' 200)])", "dict(foo=100, bar=200)"\n'
- '\n'
+ ' Dictionaries can be created by several means:\n'
+ '\n'
+ ' * Use a comma-separated list of "key: value" pairs within '
+ 'braces:\n'
+ ' "{\'jack\': 4098, \'sjoerd\': 4127}" or "{4098: '
+ "'jack', 4127:\n"
+ ' \'sjoerd\'}"\n'
+ '\n'
+ ' * Use a dict comprehension: "{}", "{x: x ** 2 for x in '
+ 'range(10)}"\n'
+ '\n'
+ ' * Use the type constructor: "dict()", "dict([(\'foo\', '
+ "100), ('bar',\n"
+ ' 200)])", "dict(foo=100, bar=200)"\n'
+ '\n'
' If no positional argument is given, an empty dictionary '
'is created.\n'
' If a positional argument is given and it is a mapping '
@@ -12830,8 +12830,8 @@ topics = {'assert': 'The "assert" statement\n'
" >>> c = dict(zip(['one', 'two', 'three'], [1, 2, 3]))\n"
" >>> d = dict([('two', 2), ('one', 1), ('three', 3)])\n"
" >>> e = dict({'three': 3, 'one': 1, 'two': 2})\n"
- " >>> f = dict({'one': 1, 'three': 3}, two=2)\n"
- ' >>> a == b == c == d == e == f\n'
+ " >>> f = dict({'one': 1, 'three': 3}, two=2)\n"
+ ' >>> a == b == c == d == e == f\n'
' True\n'
'\n'
' Providing keyword arguments as in the first example only '
@@ -12844,11 +12844,11 @@ topics = {'assert': 'The "assert" statement\n'
'therefore,\n'
' custom mapping types should support too):\n'
'\n'
- ' list(d)\n'
- '\n'
- ' Return a list of all the keys used in the dictionary '
- '*d*.\n'
- '\n'
+ ' list(d)\n'
+ '\n'
+ ' Return a list of all the keys used in the dictionary '
+ '*d*.\n'
+ '\n'
' len(d)\n'
'\n'
' Return the number of items in the dictionary *d*.\n'
@@ -12929,13 +12929,13 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
' "fromkeys()" is a class method that returns a new '
'dictionary.\n'
- ' *value* defaults to "None". All of the values refer '
- 'to just a\n'
- ' single instance, so it generally doesn’t make sense '
- 'for *value*\n'
- ' to be a mutable object such as an empty list. To get '
- 'distinct\n'
- ' values, use a dict comprehension instead.\n'
+ ' *value* defaults to "None". All of the values refer '
+ 'to just a\n'
+ ' single instance, so it generally doesn’t make sense '
+ 'for *value*\n'
+ ' to be a mutable object such as an empty list. To get '
+ 'distinct\n'
+ ' values, use a dict comprehension instead.\n'
'\n'
' get(key[, default])\n'
'\n'
@@ -12981,14 +12981,14 @@ topics = {'assert': 'The "assert" statement\n'
' versions, "popitem()" would return an arbitrary '
'key/value pair.\n'
'\n'
- ' reversed(d)\n'
- '\n'
- ' Return a reverse iterator over the keys of the '
- 'dictionary. This\n'
- ' is a shortcut for "reversed(d.keys())".\n'
- '\n'
- ' New in version 3.8.\n'
- '\n'
+ ' reversed(d)\n'
+ '\n'
+ ' Return a reverse iterator over the keys of the '
+ 'dictionary. This\n'
+ ' is a shortcut for "reversed(d.keys())".\n'
+ '\n'
+ ' New in version 3.8.\n'
+ '\n'
' setdefault(key[, default])\n'
'\n'
' If *key* is in the dictionary, return its value. If '
@@ -13019,44 +13019,44 @@ topics = {'assert': 'The "assert" statement\n'
'the\n'
' documentation of view objects.\n'
'\n'
- ' An equality comparison between one "dict.values()" '
- 'view and\n'
- ' another will always return "False". This also applies '
- 'when\n'
- ' comparing "dict.values()" to itself:\n'
- '\n'
- " >>> d = {'a': 1}\n"
- ' >>> d.values() == d.values()\n'
- ' False\n'
- '\n'
- ' d | other\n'
- '\n'
- ' Create a new dictionary with the merged keys and '
- 'values of *d*\n'
- ' and *other*, which must both be dictionaries. The '
- 'values of\n'
- ' *other* take priority when *d* and *other* share '
- 'keys.\n'
- '\n'
- ' New in version 3.9.\n'
- '\n'
- ' d |= other\n'
- '\n'
- ' Update the dictionary *d* with keys and values from '
- '*other*,\n'
- ' which may be either a *mapping* or an *iterable* of '
- 'key/value\n'
- ' pairs. The values of *other* take priority when *d* '
- 'and *other*\n'
- ' share keys.\n'
- '\n'
- ' New in version 3.9.\n'
- '\n'
+ ' An equality comparison between one "dict.values()" '
+ 'view and\n'
+ ' another will always return "False". This also applies '
+ 'when\n'
+ ' comparing "dict.values()" to itself:\n'
+ '\n'
+ " >>> d = {'a': 1}\n"
+ ' >>> d.values() == d.values()\n'
+ ' False\n'
+ '\n'
+ ' d | other\n'
+ '\n'
+ ' Create a new dictionary with the merged keys and '
+ 'values of *d*\n'
+ ' and *other*, which must both be dictionaries. The '
+ 'values of\n'
+ ' *other* take priority when *d* and *other* share '
+ 'keys.\n'
+ '\n'
+ ' New in version 3.9.\n'
+ '\n'
+ ' d |= other\n'
+ '\n'
+ ' Update the dictionary *d* with keys and values from '
+ '*other*,\n'
+ ' which may be either a *mapping* or an *iterable* of '
+ 'key/value\n'
+ ' pairs. The values of *other* take priority when *d* '
+ 'and *other*\n'
+ ' share keys.\n'
+ '\n'
+ ' New in version 3.9.\n'
+ '\n'
' Dictionaries compare equal if and only if they have the '
'same "(key,\n'
- ' value)" pairs (regardless of ordering). Order comparisons '
- '(‘<’,\n'
- ' ‘<=’, ‘>=’, ‘>’) raise "TypeError".\n'
+ ' value)" pairs (regardless of ordering). Order comparisons '
+ '(‘<’,\n'
+ ' ‘<=’, ‘>=’, ‘>’) raise "TypeError".\n'
'\n'
' Dictionaries preserve insertion order. Note that '
'updating a key\n'
@@ -13085,27 +13085,27 @@ topics = {'assert': 'The "assert" statement\n'
'detail of\n'
' CPython from 3.6.\n'
'\n'
- ' Dictionaries and dictionary views are reversible.\n'
- '\n'
- ' >>> d = {"one": 1, "two": 2, "three": 3, "four": 4}\n'
- ' >>> d\n'
- " {'one': 1, 'two': 2, 'three': 3, 'four': 4}\n"
- ' >>> list(reversed(d))\n'
- " ['four', 'three', 'two', 'one']\n"
- ' >>> list(reversed(d.values()))\n'
- ' [4, 3, 2, 1]\n'
- ' >>> list(reversed(d.items()))\n'
- " [('four', 4), ('three', 3), ('two', 2), ('one', 1)]\n"
- '\n'
- ' Changed in version 3.8: Dictionaries are now reversible.\n'
- '\n'
- 'See also:\n'
- '\n'
- ' "types.MappingProxyType" can be used to create a read-only '
- 'view of a\n'
- ' "dict".\n'
- '\n'
- '\n'
+ ' Dictionaries and dictionary views are reversible.\n'
+ '\n'
+ ' >>> d = {"one": 1, "two": 2, "three": 3, "four": 4}\n'
+ ' >>> d\n'
+ " {'one': 1, 'two': 2, 'three': 3, 'four': 4}\n"
+ ' >>> list(reversed(d))\n'
+ " ['four', 'three', 'two', 'one']\n"
+ ' >>> list(reversed(d.values()))\n'
+ ' [4, 3, 2, 1]\n'
+ ' >>> list(reversed(d.items()))\n'
+ " [('four', 4), ('three', 3), ('two', 2), ('one', 1)]\n"
+ '\n'
+ ' Changed in version 3.8: Dictionaries are now reversible.\n'
+ '\n'
+ 'See also:\n'
+ '\n'
+ ' "types.MappingProxyType" can be used to create a read-only '
+ 'view of a\n'
+ ' "dict".\n'
+ '\n'
+ '\n'
'Dictionary view objects\n'
'=======================\n'
'\n'
@@ -13155,17 +13155,17 @@ topics = {'assert': 'The "assert" statement\n'
'value)"\n'
' tuple).\n'
'\n'
- 'reversed(dictview)\n'
- '\n'
- ' Return a reverse iterator over the keys, values or items '
- 'of the\n'
- ' dictionary. The view will be iterated in reverse order of '
- 'the\n'
- ' insertion.\n'
- '\n'
- ' Changed in version 3.8: Dictionary views are now '
- 'reversible.\n'
- '\n'
+ 'reversed(dictview)\n'
+ '\n'
+ ' Return a reverse iterator over the keys, values or items '
+ 'of the\n'
+ ' dictionary. The view will be iterated in reverse order of '
+ 'the\n'
+ ' insertion.\n'
+ '\n'
+ ' Changed in version 3.8: Dictionary views are now '
+ 'reversible.\n'
+ '\n'
'Keys views are set-like since their entries are unique and '
'hashable.\n'
'If all values are hashable, so that "(key, value)" pairs are '
@@ -13344,7 +13344,7 @@ topics = {'assert': 'The "assert" statement\n'
'+----------------------------+----------------------------------+------------+\n'
'| Operation | Result '
'| Notes |\n'
- '|============================|==================================|============|\n'
+ '|============================|==================================|============|\n'
'| "x in s" | "True" if an item of *s* is '
'| (1) |\n'
'| | equal to *x*, else "False" '
@@ -13487,14 +13487,14 @@ topics = {'assert': 'The "assert" statement\n'
'"None", it\n'
' is treated like "1".\n'
'\n'
- '6. Concatenating immutable sequences always results in a new '
- 'object.\n'
- ' This means that building up a sequence by repeated '
- 'concatenation\n'
- ' will have a quadratic runtime cost in the total sequence '
- 'length.\n'
- ' To get a linear runtime cost, you must switch to one of the\n'
- ' alternatives below:\n'
+ '6. Concatenating immutable sequences always results in a new '
+ 'object.\n'
+ ' This means that building up a sequence by repeated '
+ 'concatenation\n'
+ ' will have a quadratic runtime cost in the total sequence '
+ 'length.\n'
+ ' To get a linear runtime cost, you must switch to one of the\n'
+ ' alternatives below:\n'
'\n'
' * if concatenating "str" objects, you can build a list and '
'use\n'
@@ -13512,27 +13512,27 @@ topics = {'assert': 'The "assert" statement\n'
' * for other types, investigate the relevant class '
'documentation\n'
'\n'
- '7. Some sequence types (such as "range") only support item '
- 'sequences\n'
- ' that follow specific patterns, and hence don’t support '
+ '7. Some sequence types (such as "range") only support item '
+ 'sequences\n'
+ ' that follow specific patterns, and hence don’t support '
'sequence\n'
- ' concatenation or repetition.\n'
- '\n'
- '8. "index" raises "ValueError" when *x* is not found in *s*. Not '
- 'all\n'
- ' implementations support passing the additional arguments *i* '
- 'and\n'
- ' *j*. These arguments allow efficient searching of subsections '
- 'of\n'
- ' the sequence. Passing the extra arguments is roughly '
- 'equivalent to\n'
- ' using "s[i:j].index(x)", only without copying any data and '
- 'with the\n'
- ' returned index being relative to the start of the sequence '
- 'rather\n'
- ' than the start of the slice.\n'
- '\n'
- '\n'
+ ' concatenation or repetition.\n'
+ '\n'
+ '8. "index" raises "ValueError" when *x* is not found in *s*. Not '
+ 'all\n'
+ ' implementations support passing the additional arguments *i* '
+ 'and\n'
+ ' *j*. These arguments allow efficient searching of subsections '
+ 'of\n'
+ ' the sequence. Passing the extra arguments is roughly '
+ 'equivalent to\n'
+ ' using "s[i:j].index(x)", only without copying any data and '
+ 'with the\n'
+ ' returned index being relative to the start of the sequence '
+ 'rather\n'
+ ' than the start of the slice.\n'
+ '\n'
+ '\n'
'Immutable Sequence Types\n'
'========================\n'
'\n'
@@ -13575,7 +13575,7 @@ topics = {'assert': 'The "assert" statement\n'
'+--------------------------------+----------------------------------+-----------------------+\n'
'| Operation | '
'Result | Notes |\n'
- '|================================|==================================|=======================|\n'
+ '|================================|==================================|=======================|\n'
'| "s[i] = x" | item *i* of *s* is replaced '
'by | |\n'
'| | '
@@ -13637,7 +13637,7 @@ topics = {'assert': 'The "assert" statement\n'
'| | "s[i:i] = '
'[x]") | |\n'
'+--------------------------------+----------------------------------+-----------------------+\n'
- '| "s.pop()" or "s.pop(i)" | retrieves the item at *i* '
+ '| "s.pop()" or "s.pop(i)" | retrieves the item at *i* '
'and | (2) |\n'
'| | also removes it from '
'*s* | |\n'
@@ -13658,25 +13658,25 @@ topics = {'assert': 'The "assert" statement\n'
'1. *t* must have the same length as the slice it is replacing.\n'
'\n'
'2. The optional argument *i* defaults to "-1", so that by '
- 'default the\n'
- ' last item is removed and returned.\n'
+ 'default the\n'
+ ' last item is removed and returned.\n'
'\n'
- '3. "remove()" raises "ValueError" when *x* is not found in *s*.\n'
+ '3. "remove()" raises "ValueError" when *x* is not found in *s*.\n'
'\n'
- '4. The "reverse()" method modifies the sequence in place for '
- 'economy\n'
- ' of space when reversing a large sequence. To remind users '
- 'that it\n'
- ' operates by side effect, it does not return the reversed '
- 'sequence.\n'
+ '4. The "reverse()" method modifies the sequence in place for '
+ 'economy\n'
+ ' of space when reversing a large sequence. To remind users '
+ 'that it\n'
+ ' operates by side effect, it does not return the reversed '
+ 'sequence.\n'
'\n'
'5. "clear()" and "copy()" are included for consistency with the\n'
' interfaces of mutable containers that don’t support slicing\n'
- ' operations (such as "dict" and "set"). "copy()" is not part '
- 'of the\n'
- ' "collections.abc.MutableSequence" ABC, but most concrete '
- 'mutable\n'
- ' sequence classes provide it.\n'
+ ' operations (such as "dict" and "set"). "copy()" is not part '
+ 'of the\n'
+ ' "collections.abc.MutableSequence" ABC, but most concrete '
+ 'mutable\n'
+ ' sequence classes provide it.\n'
'\n'
' New in version 3.3: "clear()" and "copy()" methods.\n'
'\n'
@@ -13705,9 +13705,9 @@ topics = {'assert': 'The "assert" statement\n'
' * Using a pair of square brackets to denote the empty list: '
'"[]"\n'
'\n'
- ' * Using square brackets, separating items with commas: "[a]", '
- '"[a,\n'
- ' b, c]"\n'
+ ' * Using square brackets, separating items with commas: "[a]", '
+ '"[a,\n'
+ ' b, c]"\n'
'\n'
' * Using a list comprehension: "[x for x in iterable]"\n'
'\n'
@@ -13788,10 +13788,10 @@ topics = {'assert': 'The "assert" statement\n'
'salary\n'
' grade).\n'
'\n'
- ' For sorting examples and a brief sorting tutorial, see '
- 'Sorting\n'
- ' HOW TO.\n'
- '\n'
+ ' For sorting examples and a brief sorting tutorial, see '
+ 'Sorting\n'
+ ' HOW TO.\n'
+ '\n'
' **CPython implementation detail:** While a list is being '
'sorted,\n'
' the effect of attempting to mutate, or even inspect, the '
@@ -13877,8 +13877,8 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
' The arguments to the range constructor must be integers '
'(either\n'
- ' built-in "int" or any object that implements the '
- '"__index__()"\n'
+ ' built-in "int" or any object that implements the '
+ '"__index__()"\n'
' special method). If the *step* argument is omitted, it '
'defaults to\n'
' "1". If the *start* argument is omitted, it defaults to "0". '
@@ -14011,9 +14011,9 @@ topics = {'assert': 'The "assert" statement\n'
'\n'
'See also:\n'
'\n'
- ' * The linspace recipe shows how to implement a lazy version of '
- 'range\n'
- ' suitable for floating point applications.\n',
+ ' * The linspace recipe shows how to implement a lazy version of '
+ 'range\n'
+ ' suitable for floating point applications.\n',
'typesseq-mutable': 'Mutable Sequence Types\n'
'**********************\n'
'\n'
@@ -14038,7 +14038,7 @@ topics = {'assert': 'The "assert" statement\n'
'| Operation | '
'Result | Notes '
'|\n'
- '|================================|==================================|=======================|\n'
+ '|================================|==================================|=======================|\n'
'| "s[i] = x" | item *i* of *s* is '
'replaced by | |\n'
'| | '
@@ -14101,7 +14101,7 @@ topics = {'assert': 'The "assert" statement\n'
'| | "s[i:i] = '
'[x]") | |\n'
'+--------------------------------+----------------------------------+-----------------------+\n'
- '| "s.pop()" or "s.pop(i)" | retrieves the item at '
+ '| "s.pop()" or "s.pop(i)" | retrieves the item at '
'*i* and | (2) |\n'
'| | also removes it from '
'*s* | |\n'
@@ -14124,28 +14124,28 @@ topics = {'assert': 'The "assert" statement\n'
'replacing.\n'
'\n'
'2. The optional argument *i* defaults to "-1", so that '
- 'by default the\n'
- ' last item is removed and returned.\n'
+ 'by default the\n'
+ ' last item is removed and returned.\n'
'\n'
- '3. "remove()" raises "ValueError" when *x* is not found '
- 'in *s*.\n'
+ '3. "remove()" raises "ValueError" when *x* is not found '
+ 'in *s*.\n'
'\n'
'4. The "reverse()" method modifies the sequence in place '
- 'for economy\n'
- ' of space when reversing a large sequence. To remind '
- 'users that it\n'
- ' operates by side effect, it does not return the '
- 'reversed sequence.\n'
+ 'for economy\n'
+ ' of space when reversing a large sequence. To remind '
+ 'users that it\n'
+ ' operates by side effect, it does not return the '
+ 'reversed sequence.\n'
'\n'
'5. "clear()" and "copy()" are included for consistency '
'with the\n'
' interfaces of mutable containers that don’t support '
'slicing\n'
- ' operations (such as "dict" and "set"). "copy()" is '
- 'not part of the\n'
- ' "collections.abc.MutableSequence" ABC, but most '
- 'concrete mutable\n'
- ' sequence classes provide it.\n'
+ ' operations (such as "dict" and "set"). "copy()" is '
+ 'not part of the\n'
+ ' "collections.abc.MutableSequence" ABC, but most '
+ 'concrete mutable\n'
+ ' sequence classes provide it.\n'
'\n'
' New in version 3.3: "clear()" and "copy()" methods.\n'
'\n'
@@ -14166,21 +14166,21 @@ topics = {'assert': 'The "assert" statement\n'
' u_expr ::= power | "-" u_expr | "+" u_expr | "~" u_expr\n'
'\n'
'The unary "-" (minus) operator yields the negation of its numeric\n'
- 'argument; the operation can be overridden with the "__neg__()" '
- 'special\n'
- 'method.\n'
+ 'argument; the operation can be overridden with the "__neg__()" '
+ 'special\n'
+ 'method.\n'
'\n'
'The unary "+" (plus) operator yields its numeric argument '
- 'unchanged;\n'
- 'the operation can be overridden with the "__pos__()" special '
- 'method.\n'
+ 'unchanged;\n'
+ 'the operation can be overridden with the "__pos__()" special '
+ 'method.\n'
'\n'
'The unary "~" (invert) operator yields the bitwise inversion of '
'its\n'
'integer argument. The bitwise inversion of "x" is defined as\n'
- '"-(x+1)". It only applies to integral numbers or to custom '
- 'objects\n'
- 'that override the "__invert__()" special method.\n'
+ '"-(x+1)". It only applies to integral numbers or to custom '
+ 'objects\n'
+ 'that override the "__invert__()" special method.\n'
'\n'
'In all three cases, if the argument does not have the proper type, '
'a\n'
@@ -14191,7 +14191,7 @@ topics = {'assert': 'The "assert" statement\n'
'The "while" statement is used for repeated execution as long as an\n'
'expression is true:\n'
'\n'
- ' while_stmt ::= "while" assignment_expression ":" suite\n'
+ ' while_stmt ::= "while" assignment_expression ":" suite\n'
' ["else" ":" suite]\n'
'\n'
'This repeatedly tests the expression and, if it is true, executes '
@@ -14224,32 +14224,32 @@ topics = {'assert': 'The "assert" statement\n'
'The execution of the "with" statement with one “item” proceeds as\n'
'follows:\n'
'\n'
- '1. The context expression (the expression given in the "with_item") '
- 'is\n'
- ' evaluated to obtain a context manager.\n'
- '\n'
- '2. The context manager’s "__enter__()" is loaded for later use.\n'
+ '1. The context expression (the expression given in the "with_item") '
+ 'is\n'
+ ' evaluated to obtain a context manager.\n'
'\n'
- '3. The context manager’s "__exit__()" is loaded for later use.\n'
+ '2. The context manager’s "__enter__()" is loaded for later use.\n'
'\n'
- '4. The context manager’s "__enter__()" method is invoked.\n'
+ '3. The context manager’s "__exit__()" is loaded for later use.\n'
'\n'
- '5. If a target was included in the "with" statement, the return '
- 'value\n'
- ' from "__enter__()" is assigned to it.\n'
+ '4. The context manager’s "__enter__()" method is invoked.\n'
+ '\n'
+ '5. If a target was included in the "with" statement, the return '
+ 'value\n'
+ ' from "__enter__()" is assigned to it.\n'
'\n'
- ' Note:\n'
- '\n'
- ' The "with" statement guarantees that if the "__enter__()" '
- 'method\n'
- ' returns without an error, then "__exit__()" will always be\n'
+ ' Note:\n'
+ '\n'
+ ' The "with" statement guarantees that if the "__enter__()" '
+ 'method\n'
+ ' returns without an error, then "__exit__()" will always be\n'
' called. Thus, if an error occurs during the assignment to the\n'
' target list, it will be treated the same as an error occurring\n'
' within the suite would be. See step 6 below.\n'
'\n'
- '6. The suite is executed.\n'
+ '6. The suite is executed.\n'
'\n'
- '7. The context manager’s "__exit__()" method is invoked. If an\n'
+ '7. The context manager’s "__exit__()" method is invoked. If an\n'
' exception caused the suite to be exited, its type, value, and\n'
' traceback are passed as arguments to "__exit__()". Otherwise, '
'three\n'
@@ -14269,41 +14269,41 @@ topics = {'assert': 'The "assert" statement\n'
'proceeds\n'
' at the normal location for the kind of exit that was taken.\n'
'\n'
- 'The following code:\n'
- '\n'
- ' with EXPRESSION as TARGET:\n'
- ' SUITE\n'
- '\n'
- 'is semantically equivalent to:\n'
- '\n'
- ' manager = (EXPRESSION)\n'
- ' enter = type(manager).__enter__\n'
- ' exit = type(manager).__exit__\n'
- ' value = enter(manager)\n'
- ' hit_except = False\n'
- '\n'
- ' try:\n'
- ' TARGET = value\n'
- ' SUITE\n'
- ' except:\n'
- ' hit_except = True\n'
- ' if not exit(manager, *sys.exc_info()):\n'
- ' raise\n'
- ' finally:\n'
- ' if not hit_except:\n'
- ' exit(manager, None, None, None)\n'
- '\n'
+ 'The following code:\n'
+ '\n'
+ ' with EXPRESSION as TARGET:\n'
+ ' SUITE\n'
+ '\n'
+ 'is semantically equivalent to:\n'
+ '\n'
+ ' manager = (EXPRESSION)\n'
+ ' enter = type(manager).__enter__\n'
+ ' exit = type(manager).__exit__\n'
+ ' value = enter(manager)\n'
+ ' hit_except = False\n'
+ '\n'
+ ' try:\n'
+ ' TARGET = value\n'
+ ' SUITE\n'
+ ' except:\n'
+ ' hit_except = True\n'
+ ' if not exit(manager, *sys.exc_info()):\n'
+ ' raise\n'
+ ' finally:\n'
+ ' if not hit_except:\n'
+ ' exit(manager, None, None, None)\n'
+ '\n'
'With more than one item, the context managers are processed as if\n'
'multiple "with" statements were nested:\n'
'\n'
' with A() as a, B() as b:\n'
- ' SUITE\n'
+ ' SUITE\n'
'\n'
- 'is semantically equivalent to:\n'
+ 'is semantically equivalent to:\n'
'\n'
' with A() as a:\n'
' with B() as b:\n'
- ' SUITE\n'
+ ' SUITE\n'
'\n'
'Changed in version 3.1: Support for multiple context expressions.\n'
'\n'
diff --git a/contrib/tools/python3/src/Lib/queue.py b/contrib/tools/python3/src/Lib/queue.py
index 10dbcbc18e..bbd5db5aa2 100644
--- a/contrib/tools/python3/src/Lib/queue.py
+++ b/contrib/tools/python3/src/Lib/queue.py
@@ -1,7 +1,7 @@
'''A multi-producer, multi-consumer queue.'''
import threading
-import types
+import types
from collections import deque
from heapq import heappush, heappop
from time import monotonic as time
@@ -15,7 +15,7 @@ __all__ = ['Empty', 'Full', 'Queue', 'PriorityQueue', 'LifoQueue', 'SimpleQueue'
try:
from _queue import Empty
-except ImportError:
+except ImportError:
class Empty(Exception):
'Exception raised by Queue.get(block=0)/get_nowait().'
pass
@@ -217,9 +217,9 @@ class Queue:
def _get(self):
return self.queue.popleft()
- __class_getitem__ = classmethod(types.GenericAlias)
-
+ __class_getitem__ = classmethod(types.GenericAlias)
+
class PriorityQueue(Queue):
'''Variant of Queue that retrieves open entries in priority order (lowest first).
@@ -319,8 +319,8 @@ class _PySimpleQueue:
'''Return the approximate size of the queue (not reliable!).'''
return len(self._queue)
- __class_getitem__ = classmethod(types.GenericAlias)
-
+ __class_getitem__ = classmethod(types.GenericAlias)
+
if SimpleQueue is None:
SimpleQueue = _PySimpleQueue
diff --git a/contrib/tools/python3/src/Lib/quopri.py b/contrib/tools/python3/src/Lib/quopri.py
index 08899c5cb7..8a5cd2fd97 100644
--- a/contrib/tools/python3/src/Lib/quopri.py
+++ b/contrib/tools/python3/src/Lib/quopri.py
@@ -204,11 +204,11 @@ def main():
print("-t: quote tabs")
print("-d: decode; default encode")
sys.exit(2)
- deco = False
- tabs = False
+ deco = False
+ tabs = False
for o, a in opts:
- if o == '-t': tabs = True
- if o == '-d': deco = True
+ if o == '-t': tabs = True
+ if o == '-d': deco = True
if tabs and deco:
sys.stdout = sys.stderr
print("-t and -d are mutually exclusive")
diff --git a/contrib/tools/python3/src/Lib/random.py b/contrib/tools/python3/src/Lib/random.py
index 1d4b5eb36f..8fd6439aa8 100644
--- a/contrib/tools/python3/src/Lib/random.py
+++ b/contrib/tools/python3/src/Lib/random.py
@@ -1,9 +1,9 @@
"""Random variable generators.
- bytes
- -----
- uniform bytes (values between 0 and 255)
-
+ bytes
+ -----
+ uniform bytes (values between 0 and 255)
+
integers
--------
uniform within range
@@ -41,61 +41,61 @@ General notes on the underlying Mersenne Twister core generator:
"""
-# Translated by Guido van Rossum from C source provided by
-# Adrian Baddeley. Adapted by Raymond Hettinger for use with
-# the Mersenne Twister and os.urandom() core generators.
-
+# Translated by Guido van Rossum from C source provided by
+# Adrian Baddeley. Adapted by Raymond Hettinger for use with
+# the Mersenne Twister and os.urandom() core generators.
+
from warnings import warn as _warn
from math import log as _log, exp as _exp, pi as _pi, e as _e, ceil as _ceil
from math import sqrt as _sqrt, acos as _acos, cos as _cos, sin as _sin
-from math import tau as TWOPI, floor as _floor
+from math import tau as TWOPI, floor as _floor
from os import urandom as _urandom
from _collections_abc import Set as _Set, Sequence as _Sequence
-from itertools import accumulate as _accumulate, repeat as _repeat
-from bisect import bisect as _bisect
+from itertools import accumulate as _accumulate, repeat as _repeat
+from bisect import bisect as _bisect
import os as _os
-import _random
-
-try:
- # hashlib is pretty heavy to load, try lean internal module first
- from _sha512 import sha512 as _sha512
-except ImportError:
- # fallback to official implementation
- from hashlib import sha512 as _sha512
-
-__all__ = [
- "Random",
- "SystemRandom",
- "betavariate",
- "choice",
- "choices",
- "expovariate",
- "gammavariate",
- "gauss",
- "getrandbits",
- "getstate",
- "lognormvariate",
- "normalvariate",
- "paretovariate",
- "randbytes",
- "randint",
- "random",
- "randrange",
- "sample",
- "seed",
- "setstate",
- "shuffle",
- "triangular",
- "uniform",
- "vonmisesvariate",
- "weibullvariate",
-]
-
-NV_MAGICCONST = 4 * _exp(-0.5) / _sqrt(2.0)
+import _random
+
+try:
+ # hashlib is pretty heavy to load, try lean internal module first
+ from _sha512 import sha512 as _sha512
+except ImportError:
+ # fallback to official implementation
+ from hashlib import sha512 as _sha512
+
+__all__ = [
+ "Random",
+ "SystemRandom",
+ "betavariate",
+ "choice",
+ "choices",
+ "expovariate",
+ "gammavariate",
+ "gauss",
+ "getrandbits",
+ "getstate",
+ "lognormvariate",
+ "normalvariate",
+ "paretovariate",
+ "randbytes",
+ "randint",
+ "random",
+ "randrange",
+ "sample",
+ "seed",
+ "setstate",
+ "shuffle",
+ "triangular",
+ "uniform",
+ "vonmisesvariate",
+ "weibullvariate",
+]
+
+NV_MAGICCONST = 4 * _exp(-0.5) / _sqrt(2.0)
LOG4 = _log(4.0)
SG_MAGICCONST = 1.0 + _log(4.5)
BPF = 53 # Number of bits in a float
-RECIP_BPF = 2 ** -BPF
+RECIP_BPF = 2 ** -BPF
class Random(_random.Random):
@@ -123,12 +123,12 @@ class Random(_random.Random):
self.seed(x)
self.gauss_next = None
- def seed(self, a=None, version=2):
- """Initialize internal state from a seed.
-
- The only supported seed types are None, int, float,
- str, bytes, and bytearray.
-
+ def seed(self, a=None, version=2):
+ """Initialize internal state from a seed.
+
+ The only supported seed types are None, int, float,
+ str, bytes, and bytearray.
+
None or no argument seeds from current time or from an operating
system specific randomness source if available.
@@ -149,19 +149,19 @@ class Random(_random.Random):
x ^= len(a)
a = -2 if x == -1 else x
- elif version == 2 and isinstance(a, (str, bytes, bytearray)):
+ elif version == 2 and isinstance(a, (str, bytes, bytearray)):
if isinstance(a, str):
a = a.encode()
- a = int.from_bytes(a + _sha512(a).digest(), 'big')
-
- elif not isinstance(a, (type(None), int, float, str, bytes, bytearray)):
- _warn('Seeding based on hashing is deprecated\n'
- 'since Python 3.9 and will be removed in a subsequent '
- 'version. The only \n'
- 'supported seed types are: None, '
- 'int, float, str, bytes, and bytearray.',
- DeprecationWarning, 2)
-
+ a = int.from_bytes(a + _sha512(a).digest(), 'big')
+
+ elif not isinstance(a, (type(None), int, float, str, bytes, bytearray)):
+ _warn('Seeding based on hashing is deprecated\n'
+ 'since Python 3.9 and will be removed in a subsequent '
+ 'version. The only \n'
+ 'supported seed types are: None, '
+ 'int, float, str, bytes, and bytearray.',
+ DeprecationWarning, 2)
+
super().seed(a)
self.gauss_next = None
@@ -182,7 +182,7 @@ class Random(_random.Random):
# really unsigned 32-bit ints, so we convert negative ints from
# version 2 to positive longs for version 3.
try:
- internalstate = tuple(x % (2 ** 32) for x in internalstate)
+ internalstate = tuple(x % (2 ** 32) for x in internalstate)
except ValueError as e:
raise TypeError from e
super().setstate(internalstate)
@@ -192,17 +192,17 @@ class Random(_random.Random):
(version, self.VERSION))
- ## -------------------------------------------------------
- ## ---- Methods below this point do not need to be overridden or extended
- ## ---- when subclassing for the purpose of using a different core generator.
-
-
- ## -------------------- pickle support -------------------
+ ## -------------------------------------------------------
+ ## ---- Methods below this point do not need to be overridden or extended
+ ## ---- when subclassing for the purpose of using a different core generator.
+
+ ## -------------------- pickle support -------------------
+
# Issue 17489: Since __reduce__ was defined to fix #759889 this is no
# longer called; we leave it here because it has been here since random was
# rewritten back in 2001 and why risk breaking something.
- def __getstate__(self): # for pickle
+ def __getstate__(self): # for pickle
return self.getstate()
def __setstate__(self, state): # for pickle
@@ -212,82 +212,82 @@ class Random(_random.Random):
return self.__class__, (), self.getstate()
- ## ---- internal support method for evenly distributed integers ----
-
- def __init_subclass__(cls, /, **kwargs):
- """Control how subclasses generate random integers.
-
- The algorithm a subclass can use depends on the random() and/or
- getrandbits() implementation available to it and determines
- whether it can generate random integers from arbitrarily large
- ranges.
- """
-
- for c in cls.__mro__:
- if '_randbelow' in c.__dict__:
- # just inherit it
- break
- if 'getrandbits' in c.__dict__:
- cls._randbelow = cls._randbelow_with_getrandbits
- break
- if 'random' in c.__dict__:
- cls._randbelow = cls._randbelow_without_getrandbits
- break
-
- def _randbelow_with_getrandbits(self, n):
- "Return a random int in the range [0,n). Returns 0 if n==0."
-
- if not n:
- return 0
- getrandbits = self.getrandbits
- k = n.bit_length() # don't use (n-1) here because n can be 1
- r = getrandbits(k) # 0 <= r < 2**k
- while r >= n:
- r = getrandbits(k)
- return r
-
- def _randbelow_without_getrandbits(self, n, maxsize=1<<BPF):
- """Return a random int in the range [0,n). Returns 0 if n==0.
-
- The implementation does not use getrandbits, but only random.
- """
-
- random = self.random
- if n >= maxsize:
- _warn("Underlying random() generator does not supply \n"
- "enough bits to choose from a population range this large.\n"
- "To remove the range limitation, add a getrandbits() method.")
- return _floor(random() * n)
- if n == 0:
- return 0
- rem = maxsize % n
- limit = (maxsize - rem) / maxsize # int(limit * maxsize) % n == 0
- r = random()
- while r >= limit:
- r = random()
- return _floor(r * maxsize) % n
-
- _randbelow = _randbelow_with_getrandbits
-
-
- ## --------------------------------------------------------
- ## ---- Methods below this point generate custom distributions
- ## ---- based on the methods defined above. They do not
- ## ---- directly touch the underlying generator and only
- ## ---- access randomness through the methods: random(),
- ## ---- getrandbits(), or _randbelow().
-
-
- ## -------------------- bytes methods ---------------------
-
- def randbytes(self, n):
- """Generate n random bytes."""
- return self.getrandbits(n * 8).to_bytes(n, 'little')
-
-
- ## -------------------- integer methods -------------------
-
- def randrange(self, start, stop=None, step=1):
+ ## ---- internal support method for evenly distributed integers ----
+
+ def __init_subclass__(cls, /, **kwargs):
+ """Control how subclasses generate random integers.
+
+ The algorithm a subclass can use depends on the random() and/or
+ getrandbits() implementation available to it and determines
+ whether it can generate random integers from arbitrarily large
+ ranges.
+ """
+
+ for c in cls.__mro__:
+ if '_randbelow' in c.__dict__:
+ # just inherit it
+ break
+ if 'getrandbits' in c.__dict__:
+ cls._randbelow = cls._randbelow_with_getrandbits
+ break
+ if 'random' in c.__dict__:
+ cls._randbelow = cls._randbelow_without_getrandbits
+ break
+
+ def _randbelow_with_getrandbits(self, n):
+ "Return a random int in the range [0,n). Returns 0 if n==0."
+
+ if not n:
+ return 0
+ getrandbits = self.getrandbits
+ k = n.bit_length() # don't use (n-1) here because n can be 1
+ r = getrandbits(k) # 0 <= r < 2**k
+ while r >= n:
+ r = getrandbits(k)
+ return r
+
+ def _randbelow_without_getrandbits(self, n, maxsize=1<<BPF):
+ """Return a random int in the range [0,n). Returns 0 if n==0.
+
+ The implementation does not use getrandbits, but only random.
+ """
+
+ random = self.random
+ if n >= maxsize:
+ _warn("Underlying random() generator does not supply \n"
+ "enough bits to choose from a population range this large.\n"
+ "To remove the range limitation, add a getrandbits() method.")
+ return _floor(random() * n)
+ if n == 0:
+ return 0
+ rem = maxsize % n
+ limit = (maxsize - rem) / maxsize # int(limit * maxsize) % n == 0
+ r = random()
+ while r >= limit:
+ r = random()
+ return _floor(r * maxsize) % n
+
+ _randbelow = _randbelow_with_getrandbits
+
+
+ ## --------------------------------------------------------
+ ## ---- Methods below this point generate custom distributions
+ ## ---- based on the methods defined above. They do not
+ ## ---- directly touch the underlying generator and only
+ ## ---- access randomness through the methods: random(),
+ ## ---- getrandbits(), or _randbelow().
+
+
+ ## -------------------- bytes methods ---------------------
+
+ def randbytes(self, n):
+ """Generate n random bytes."""
+ return self.getrandbits(n * 8).to_bytes(n, 'little')
+
+
+ ## -------------------- integer methods -------------------
+
+ def randrange(self, start, stop=None, step=1):
"""Choose a random item from range(start, stop[, step]).
This fixes the problem with randint() which includes the
@@ -297,7 +297,7 @@ class Random(_random.Random):
# This code is a bit messy to make it fast for the
# common case while still doing adequate error checking.
- istart = int(start)
+ istart = int(start)
if istart != start:
raise ValueError("non-integer arg 1 for randrange()")
if stop is None:
@@ -306,17 +306,17 @@ class Random(_random.Random):
raise ValueError("empty range for randrange()")
# stop argument supplied.
- istop = int(stop)
+ istop = int(stop)
if istop != stop:
raise ValueError("non-integer stop for randrange()")
width = istop - istart
if step == 1 and width > 0:
return istart + self._randbelow(width)
if step == 1:
- raise ValueError("empty range for randrange() (%d, %d, %d)" % (istart, istop, width))
+ raise ValueError("empty range for randrange() (%d, %d, %d)" % (istart, istop, width))
# Non-unit step argument supplied.
- istep = int(step)
+ istep = int(step)
if istep != step:
raise ValueError("non-integer step for randrange()")
if istep > 0:
@@ -329,7 +329,7 @@ class Random(_random.Random):
if n <= 0:
raise ValueError("empty range for randrange()")
- return istart + istep * self._randbelow(n)
+ return istart + istep * self._randbelow(n)
def randint(self, a, b):
"""Return random integer in range [a, b], including both end points.
@@ -338,12 +338,12 @@ class Random(_random.Random):
return self.randrange(a, b+1)
- ## -------------------- sequence methods -------------------
-
+ ## -------------------- sequence methods -------------------
+
def choice(self, seq):
"""Choose a random element from a non-empty sequence."""
- # raises IndexError if seq is empty
- return seq[self._randbelow(len(seq))]
+ # raises IndexError if seq is empty
+ return seq[self._randbelow(len(seq))]
def shuffle(self, x, random=None):
"""Shuffle list x in place, and return None.
@@ -358,20 +358,20 @@ class Random(_random.Random):
randbelow = self._randbelow
for i in reversed(range(1, len(x))):
# pick an element in x[:i+1] with which to exchange x[i]
- j = randbelow(i + 1)
+ j = randbelow(i + 1)
x[i], x[j] = x[j], x[i]
else:
- _warn('The *random* parameter to shuffle() has been deprecated\n'
- 'since Python 3.9 and will be removed in a subsequent '
- 'version.',
- DeprecationWarning, 2)
- floor = _floor
+ _warn('The *random* parameter to shuffle() has been deprecated\n'
+ 'since Python 3.9 and will be removed in a subsequent '
+ 'version.',
+ DeprecationWarning, 2)
+ floor = _floor
for i in reversed(range(1, len(x))):
# pick an element in x[:i+1] with which to exchange x[i]
- j = floor(random() * (i + 1))
+ j = floor(random() * (i + 1))
x[i], x[j] = x[j], x[i]
- def sample(self, population, k, *, counts=None):
+ def sample(self, population, k, *, counts=None):
"""Chooses k unique random elements from a population sequence or set.
Returns a new list containing elements from the population while
@@ -384,21 +384,21 @@ class Random(_random.Random):
population contains repeats, then each occurrence is a possible
selection in the sample.
- Repeated elements can be specified one at a time or with the optional
- counts parameter. For example:
-
- sample(['red', 'blue'], counts=[4, 2], k=5)
-
- is equivalent to:
-
- sample(['red', 'red', 'red', 'red', 'blue', 'blue'], k=5)
-
- To choose a sample from a range of integers, use range() for the
- population argument. This is especially fast and space efficient
- for sampling from a large population:
-
- sample(range(10000000), 60)
-
+ Repeated elements can be specified one at a time or with the optional
+ counts parameter. For example:
+
+ sample(['red', 'blue'], counts=[4, 2], k=5)
+
+ is equivalent to:
+
+ sample(['red', 'red', 'red', 'red', 'blue', 'blue'], k=5)
+
+ To choose a sample from a range of integers, use range() for the
+ population argument. This is especially fast and space efficient
+ for sampling from a large population:
+
+ sample(range(10000000), 60)
+
"""
# Sampling without replacement entails tracking either potential
@@ -411,54 +411,54 @@ class Random(_random.Random):
# preferred since the list takes less space than the
# set and it doesn't suffer from frequent reselections.
- # The number of calls to _randbelow() is kept at or near k, the
- # theoretical minimum. This is important because running time
- # is dominated by _randbelow() and because it extracts the
- # least entropy from the underlying random number generators.
-
- # Memory requirements are kept to the smaller of a k-length
- # set or an n-length list.
-
- # There are other sampling algorithms that do not require
- # auxiliary memory, but they were rejected because they made
- # too many calls to _randbelow(), making them slower and
- # causing them to eat more entropy than necessary.
-
+ # The number of calls to _randbelow() is kept at or near k, the
+ # theoretical minimum. This is important because running time
+ # is dominated by _randbelow() and because it extracts the
+ # least entropy from the underlying random number generators.
+
+ # Memory requirements are kept to the smaller of a k-length
+ # set or an n-length list.
+
+ # There are other sampling algorithms that do not require
+ # auxiliary memory, but they were rejected because they made
+ # too many calls to _randbelow(), making them slower and
+ # causing them to eat more entropy than necessary.
+
if isinstance(population, _Set):
- _warn('Sampling from a set deprecated\n'
- 'since Python 3.9 and will be removed in a subsequent version.',
- DeprecationWarning, 2)
+ _warn('Sampling from a set deprecated\n'
+ 'since Python 3.9 and will be removed in a subsequent version.',
+ DeprecationWarning, 2)
population = tuple(population)
if not isinstance(population, _Sequence):
- raise TypeError("Population must be a sequence. For dicts or sets, use sorted(d).")
- n = len(population)
- if counts is not None:
- cum_counts = list(_accumulate(counts))
- if len(cum_counts) != n:
- raise ValueError('The number of counts does not match the population')
- total = cum_counts.pop()
- if not isinstance(total, int):
- raise TypeError('Counts must be integers')
- if total <= 0:
- raise ValueError('Total of counts must be greater than zero')
- selections = self.sample(range(total), k=k)
- bisect = _bisect
- return [population[bisect(cum_counts, s)] for s in selections]
+ raise TypeError("Population must be a sequence. For dicts or sets, use sorted(d).")
+ n = len(population)
+ if counts is not None:
+ cum_counts = list(_accumulate(counts))
+ if len(cum_counts) != n:
+ raise ValueError('The number of counts does not match the population')
+ total = cum_counts.pop()
+ if not isinstance(total, int):
+ raise TypeError('Counts must be integers')
+ if total <= 0:
+ raise ValueError('Total of counts must be greater than zero')
+ selections = self.sample(range(total), k=k)
+ bisect = _bisect
+ return [population[bisect(cum_counts, s)] for s in selections]
randbelow = self._randbelow
if not 0 <= k <= n:
raise ValueError("Sample larger than population or is negative")
result = [None] * k
setsize = 21 # size of a small set minus size of an empty list
if k > 5:
- setsize += 4 ** _ceil(_log(k * 3, 4)) # table size for big sets
+ setsize += 4 ** _ceil(_log(k * 3, 4)) # table size for big sets
if n <= setsize:
- # An n-length list is smaller than a k-length set.
- # Invariant: non-selected at pool[0 : n-i]
+ # An n-length list is smaller than a k-length set.
+ # Invariant: non-selected at pool[0 : n-i]
pool = list(population)
- for i in range(k):
- j = randbelow(n - i)
+ for i in range(k):
+ j = randbelow(n - i)
result[i] = pool[j]
- pool[j] = pool[n - i - 1] # move non-selected item into vacancy
+ pool[j] = pool[n - i - 1] # move non-selected item into vacancy
else:
selected = set()
selected_add = selected.add
@@ -478,39 +478,39 @@ class Random(_random.Random):
"""
random = self.random
- n = len(population)
+ n = len(population)
if cum_weights is None:
if weights is None:
- floor = _floor
- n += 0.0 # convert to float for a small speed improvement
- return [population[floor(random() * n)] for i in _repeat(None, k)]
- try:
- cum_weights = list(_accumulate(weights))
- except TypeError:
- if not isinstance(weights, int):
- raise
- k = weights
- raise TypeError(
- f'The number of choices must be a keyword argument: {k=}'
- ) from None
+ floor = _floor
+ n += 0.0 # convert to float for a small speed improvement
+ return [population[floor(random() * n)] for i in _repeat(None, k)]
+ try:
+ cum_weights = list(_accumulate(weights))
+ except TypeError:
+ if not isinstance(weights, int):
+ raise
+ k = weights
+ raise TypeError(
+ f'The number of choices must be a keyword argument: {k=}'
+ ) from None
elif weights is not None:
raise TypeError('Cannot specify both weights and cumulative weights')
- if len(cum_weights) != n:
+ if len(cum_weights) != n:
raise ValueError('The number of weights does not match the population')
- total = cum_weights[-1] + 0.0 # convert to float
- if total <= 0.0:
- raise ValueError('Total of weights must be greater than zero')
- bisect = _bisect
- hi = n - 1
+ total = cum_weights[-1] + 0.0 # convert to float
+ if total <= 0.0:
+ raise ValueError('Total of weights must be greater than zero')
+ bisect = _bisect
+ hi = n - 1
return [population[bisect(cum_weights, random() * total, 0, hi)]
- for i in _repeat(None, k)]
+ for i in _repeat(None, k)]
- ## -------------------- real-valued distributions -------------------
+ ## -------------------- real-valued distributions -------------------
def uniform(self, a, b):
"Get a random number in the range [a, b) or [a, b] depending on rounding."
- return a + (b - a) * self.random()
+ return a + (b - a) * self.random()
def triangular(self, low=0.0, high=1.0, mode=None):
"""Triangular distribution.
@@ -544,53 +544,53 @@ class Random(_random.Random):
# Math Software, 3, (1977), pp257-260.
random = self.random
- while True:
+ while True:
u1 = random()
u2 = 1.0 - random()
- z = NV_MAGICCONST * (u1 - 0.5) / u2
- zz = z * z / 4.0
+ z = NV_MAGICCONST * (u1 - 0.5) / u2
+ zz = z * z / 4.0
if zz <= -_log(u2):
break
- return mu + z * sigma
-
- def gauss(self, mu, sigma):
- """Gaussian distribution.
-
- mu is the mean, and sigma is the standard deviation. This is
- slightly faster than the normalvariate() function.
-
- Not thread-safe without a lock around calls.
-
- """
- # When x and y are two variables from [0, 1), uniformly
- # distributed, then
- #
- # cos(2*pi*x)*sqrt(-2*log(1-y))
- # sin(2*pi*x)*sqrt(-2*log(1-y))
- #
- # are two *independent* variables with normal distribution
- # (mu = 0, sigma = 1).
- # (Lambert Meertens)
- # (corrected version; bug discovered by Mike Miller, fixed by LM)
-
- # Multithreading note: When two threads call this function
- # simultaneously, it is possible that they will receive the
- # same return value. The window is very small though. To
- # avoid this, you have to use a lock around all calls. (I
- # didn't want to slow this down in the serial case by using a
- # lock here.)
-
- random = self.random
- z = self.gauss_next
- self.gauss_next = None
- if z is None:
- x2pi = random() * TWOPI
- g2rad = _sqrt(-2.0 * _log(1.0 - random()))
- z = _cos(x2pi) * g2rad
- self.gauss_next = _sin(x2pi) * g2rad
-
- return mu + z * sigma
-
+ return mu + z * sigma
+
+ def gauss(self, mu, sigma):
+ """Gaussian distribution.
+
+ mu is the mean, and sigma is the standard deviation. This is
+ slightly faster than the normalvariate() function.
+
+ Not thread-safe without a lock around calls.
+
+ """
+ # When x and y are two variables from [0, 1), uniformly
+ # distributed, then
+ #
+ # cos(2*pi*x)*sqrt(-2*log(1-y))
+ # sin(2*pi*x)*sqrt(-2*log(1-y))
+ #
+ # are two *independent* variables with normal distribution
+ # (mu = 0, sigma = 1).
+ # (Lambert Meertens)
+ # (corrected version; bug discovered by Mike Miller, fixed by LM)
+
+ # Multithreading note: When two threads call this function
+ # simultaneously, it is possible that they will receive the
+ # same return value. The window is very small though. To
+ # avoid this, you have to use a lock around all calls. (I
+ # didn't want to slow this down in the serial case by using a
+ # lock here.)
+
+ random = self.random
+ z = self.gauss_next
+ self.gauss_next = None
+ if z is None:
+ x2pi = random() * TWOPI
+ g2rad = _sqrt(-2.0 * _log(1.0 - random()))
+ z = _cos(x2pi) * g2rad
+ self.gauss_next = _sin(x2pi) * g2rad
+
+ return mu + z * sigma
+
def lognormvariate(self, mu, sigma):
"""Log normal distribution.
@@ -616,7 +616,7 @@ class Random(_random.Random):
# we use 1-random() instead of random() to preclude the
# possibility of taking the log of zero.
- return -_log(1.0 - self.random()) / lambd
+ return -_log(1.0 - self.random()) / lambd
def vonmisesvariate(self, mu, kappa):
"""Circular data distribution.
@@ -641,7 +641,7 @@ class Random(_random.Random):
s = 0.5 / kappa
r = s + _sqrt(1.0 + s * s)
- while True:
+ while True:
u1 = random()
z = _cos(_pi * u1)
@@ -692,31 +692,31 @@ class Random(_random.Random):
while 1:
u1 = random()
- if not 1e-7 < u1 < 0.9999999:
+ if not 1e-7 < u1 < 0.9999999:
continue
u2 = 1.0 - random()
- v = _log(u1 / (1.0 - u1)) / ainv
- x = alpha * _exp(v)
- z = u1 * u1 * u2
- r = bbb + ccc * v - x
- if r + SG_MAGICCONST - 4.5 * z >= 0.0 or r >= _log(z):
+ v = _log(u1 / (1.0 - u1)) / ainv
+ x = alpha * _exp(v)
+ z = u1 * u1 * u2
+ r = bbb + ccc * v - x
+ if r + SG_MAGICCONST - 4.5 * z >= 0.0 or r >= _log(z):
return x * beta
elif alpha == 1.0:
# expovariate(1/beta)
- return -_log(1.0 - random()) * beta
+ return -_log(1.0 - random()) * beta
- else:
- # alpha is between 0 and 1 (exclusive)
+ else:
+ # alpha is between 0 and 1 (exclusive)
# Uses ALGORITHM GS of Statistical Computing - Kennedy & Gentle
- while True:
+ while True:
u = random()
- b = (_e + alpha) / _e
- p = b * u
+ b = (_e + alpha) / _e
+ p = b * u
if p <= 1.0:
- x = p ** (1.0 / alpha)
+ x = p ** (1.0 / alpha)
else:
- x = -_log((b - p) / alpha)
+ x = -_log((b - p) / alpha)
u1 = random()
if p > 1.0:
if u1 <= x ** (alpha - 1.0):
@@ -732,32 +732,32 @@ class Random(_random.Random):
Returned values range between 0 and 1.
"""
- ## See
- ## http://mail.python.org/pipermail/python-bugs-list/2001-January/003752.html
- ## for Ivan Frohne's insightful analysis of why the original implementation:
- ##
- ## def betavariate(self, alpha, beta):
- ## # Discrete Event Simulation in C, pp 87-88.
- ##
- ## y = self.expovariate(alpha)
- ## z = self.expovariate(1.0/beta)
- ## return z/(y+z)
- ##
- ## was dead wrong, and how it probably got that way.
+ ## See
+ ## http://mail.python.org/pipermail/python-bugs-list/2001-January/003752.html
+ ## for Ivan Frohne's insightful analysis of why the original implementation:
+ ##
+ ## def betavariate(self, alpha, beta):
+ ## # Discrete Event Simulation in C, pp 87-88.
+ ##
+ ## y = self.expovariate(alpha)
+ ## z = self.expovariate(1.0/beta)
+ ## return z/(y+z)
+ ##
+ ## was dead wrong, and how it probably got that way.
# This version due to Janne Sinkkonen, and matches all the std
# texts (e.g., Knuth Vol 2 Ed 3 pg 134 "the beta distribution").
y = self.gammavariate(alpha, 1.0)
- if y:
+ if y:
return y / (y + self.gammavariate(beta, 1.0))
- return 0.0
+ return 0.0
def paretovariate(self, alpha):
"""Pareto distribution. alpha is the shape parameter."""
# Jain, pg. 495
u = 1.0 - self.random()
- return 1.0 / u ** (1.0 / alpha)
+ return 1.0 / u ** (1.0 / alpha)
def weibullvariate(self, alpha, beta):
"""Weibull distribution.
@@ -768,20 +768,20 @@ class Random(_random.Random):
# Jain, pg. 499; bug fix courtesy Bill Arms
u = 1.0 - self.random()
- return alpha * (-_log(u)) ** (1.0 / beta)
-
+ return alpha * (-_log(u)) ** (1.0 / beta)
-## ------------------------------------------------------------------
+
+## ------------------------------------------------------------------
## --------------- Operating System Random Source ------------------
-
+
class SystemRandom(Random):
"""Alternate random number generator using sources provided
by the operating system (such as /dev/urandom on Unix or
CryptGenRandom on Windows).
Not available on all systems (see os.urandom() for details).
-
+
"""
def random(self):
@@ -790,18 +790,18 @@ class SystemRandom(Random):
def getrandbits(self, k):
"""getrandbits(k) -> x. Generates an int with k random bits."""
- if k < 0:
- raise ValueError('number of bits must be non-negative')
+ if k < 0:
+ raise ValueError('number of bits must be non-negative')
numbytes = (k + 7) // 8 # bits / 8 and rounded up
x = int.from_bytes(_urandom(numbytes), 'big')
return x >> (numbytes * 8 - k) # trim excess bits
- def randbytes(self, n):
- """Generate n random bytes."""
- # os.urandom(n) fails with ValueError for n < 0
- # and returns an empty bytes string for n == 0.
- return _urandom(n)
-
+ def randbytes(self, n):
+ """Generate n random bytes."""
+ # os.urandom(n) fails with ValueError for n < 0
+ # and returns an empty bytes string for n == 0.
+ return _urandom(n)
+
def seed(self, *args, **kwds):
"Stub method. Not used for a system random number generator."
return None
@@ -812,10 +812,10 @@ class SystemRandom(Random):
getstate = setstate = _notimplemented
-# ----------------------------------------------------------------------
+# ----------------------------------------------------------------------
# Create one instance, seeded from current time, and export its methods
# as module-level functions. The functions share state across all uses
-# (both in the user's code and in the Python libraries), but that's fine
+# (both in the user's code and in the Python libraries), but that's fine
# for most programs and is easier for the casual user than making them
# instantiate their own Random() instance.
@@ -842,51 +842,51 @@ weibullvariate = _inst.weibullvariate
getstate = _inst.getstate
setstate = _inst.setstate
getrandbits = _inst.getrandbits
-randbytes = _inst.randbytes
-
-
-## ------------------------------------------------------
-## ----------------- test program -----------------------
-
-def _test_generator(n, func, args):
- from statistics import stdev, fmean as mean
- from time import perf_counter
-
- t0 = perf_counter()
- data = [func(*args) for i in range(n)]
- t1 = perf_counter()
-
- xbar = mean(data)
- sigma = stdev(data, xbar)
- low = min(data)
- high = max(data)
-
- print(f'{t1 - t0:.3f} sec, {n} times {func.__name__}')
- print('avg %g, stddev %g, min %g, max %g\n' % (xbar, sigma, low, high))
-
-
-def _test(N=2000):
- _test_generator(N, random, ())
- _test_generator(N, normalvariate, (0.0, 1.0))
- _test_generator(N, lognormvariate, (0.0, 1.0))
- _test_generator(N, vonmisesvariate, (0.0, 1.0))
- _test_generator(N, gammavariate, (0.01, 1.0))
- _test_generator(N, gammavariate, (0.1, 1.0))
- _test_generator(N, gammavariate, (0.1, 2.0))
- _test_generator(N, gammavariate, (0.5, 1.0))
- _test_generator(N, gammavariate, (0.9, 1.0))
- _test_generator(N, gammavariate, (1.0, 1.0))
- _test_generator(N, gammavariate, (2.0, 1.0))
- _test_generator(N, gammavariate, (20.0, 1.0))
- _test_generator(N, gammavariate, (200.0, 1.0))
- _test_generator(N, gauss, (0.0, 1.0))
- _test_generator(N, betavariate, (3.0, 3.0))
- _test_generator(N, triangular, (0.0, 1.0, 1.0 / 3.0))
-
-
-## ------------------------------------------------------
-## ------------------ fork support ---------------------
-
+randbytes = _inst.randbytes
+
+
+## ------------------------------------------------------
+## ----------------- test program -----------------------
+
+def _test_generator(n, func, args):
+ from statistics import stdev, fmean as mean
+ from time import perf_counter
+
+ t0 = perf_counter()
+ data = [func(*args) for i in range(n)]
+ t1 = perf_counter()
+
+ xbar = mean(data)
+ sigma = stdev(data, xbar)
+ low = min(data)
+ high = max(data)
+
+ print(f'{t1 - t0:.3f} sec, {n} times {func.__name__}')
+ print('avg %g, stddev %g, min %g, max %g\n' % (xbar, sigma, low, high))
+
+
+def _test(N=2000):
+ _test_generator(N, random, ())
+ _test_generator(N, normalvariate, (0.0, 1.0))
+ _test_generator(N, lognormvariate, (0.0, 1.0))
+ _test_generator(N, vonmisesvariate, (0.0, 1.0))
+ _test_generator(N, gammavariate, (0.01, 1.0))
+ _test_generator(N, gammavariate, (0.1, 1.0))
+ _test_generator(N, gammavariate, (0.1, 2.0))
+ _test_generator(N, gammavariate, (0.5, 1.0))
+ _test_generator(N, gammavariate, (0.9, 1.0))
+ _test_generator(N, gammavariate, (1.0, 1.0))
+ _test_generator(N, gammavariate, (2.0, 1.0))
+ _test_generator(N, gammavariate, (20.0, 1.0))
+ _test_generator(N, gammavariate, (200.0, 1.0))
+ _test_generator(N, gauss, (0.0, 1.0))
+ _test_generator(N, betavariate, (3.0, 3.0))
+ _test_generator(N, triangular, (0.0, 1.0, 1.0 / 3.0))
+
+
+## ------------------------------------------------------
+## ------------------ fork support ---------------------
+
if hasattr(_os, "fork"):
_os.register_at_fork(after_in_child=_inst.seed)
diff --git a/contrib/tools/python3/src/Lib/re.py b/contrib/tools/python3/src/Lib/re.py
index bfb7b1ccd9..3375bb3ec5 100644
--- a/contrib/tools/python3/src/Lib/re.py
+++ b/contrib/tools/python3/src/Lib/re.py
@@ -44,7 +44,7 @@ The special characters are:
"|" A|B, creates an RE that will match either A or B.
(...) Matches the RE inside the parentheses.
The contents can be retrieved or matched later in the string.
- (?aiLmsux) The letters set the corresponding flags defined below.
+ (?aiLmsux) The letters set the corresponding flags defined below.
(?:...) Non-grouping version of regular parentheses.
(?P<name>...) The substring matched by the group is accessible by name.
(?P=name) Matches the text matched earlier by the group named name.
@@ -97,9 +97,9 @@ This module exports the following functions:
purge Clear the regular expression cache.
escape Backslash all non-alphanumerics in a string.
-Each function other than purge and escape can take an optional 'flags' argument
-consisting of one or more of the following module constants, joined by "|".
-A, L, and U are mutually exclusive.
+Each function other than purge and escape can take an optional 'flags' argument
+consisting of one or more of the following module constants, joined by "|".
+A, L, and U are mutually exclusive.
A ASCII For string patterns, make \w, \W, \b, \B, \d, \D
match the corresponding ASCII character categories
(rather than the whole Unicode categories, which is the
@@ -143,40 +143,40 @@ __all__ = [
__version__ = "2.2.1"
class RegexFlag(enum.IntFlag):
- ASCII = A = sre_compile.SRE_FLAG_ASCII # assume ascii "locale"
- IGNORECASE = I = sre_compile.SRE_FLAG_IGNORECASE # ignore case
- LOCALE = L = sre_compile.SRE_FLAG_LOCALE # assume current 8-bit locale
- UNICODE = U = sre_compile.SRE_FLAG_UNICODE # assume unicode "locale"
- MULTILINE = M = sre_compile.SRE_FLAG_MULTILINE # make anchors look for newline
- DOTALL = S = sre_compile.SRE_FLAG_DOTALL # make dot match newline
- VERBOSE = X = sre_compile.SRE_FLAG_VERBOSE # ignore whitespace and comments
+ ASCII = A = sre_compile.SRE_FLAG_ASCII # assume ascii "locale"
+ IGNORECASE = I = sre_compile.SRE_FLAG_IGNORECASE # ignore case
+ LOCALE = L = sre_compile.SRE_FLAG_LOCALE # assume current 8-bit locale
+ UNICODE = U = sre_compile.SRE_FLAG_UNICODE # assume unicode "locale"
+ MULTILINE = M = sre_compile.SRE_FLAG_MULTILINE # make anchors look for newline
+ DOTALL = S = sre_compile.SRE_FLAG_DOTALL # make dot match newline
+ VERBOSE = X = sre_compile.SRE_FLAG_VERBOSE # ignore whitespace and comments
# sre extensions (experimental, don't rely on these)
- TEMPLATE = T = sre_compile.SRE_FLAG_TEMPLATE # disable backtracking
+ TEMPLATE = T = sre_compile.SRE_FLAG_TEMPLATE # disable backtracking
DEBUG = sre_compile.SRE_FLAG_DEBUG # dump pattern after compilation
-
- def __repr__(self):
- if self._name_ is not None:
- return f're.{self._name_}'
- value = self._value_
- members = []
- negative = value < 0
- if negative:
- value = ~value
- for m in self.__class__:
- if value & m._value_:
- value &= ~m._value_
- members.append(f're.{m._name_}')
- if value:
- members.append(hex(value))
- res = '|'.join(members)
- if negative:
- if len(members) > 1:
- res = f'~({res})'
- else:
- res = f'~{res}'
- return res
- __str__ = object.__str__
-
+
+ def __repr__(self):
+ if self._name_ is not None:
+ return f're.{self._name_}'
+ value = self._value_
+ members = []
+ negative = value < 0
+ if negative:
+ value = ~value
+ for m in self.__class__:
+ if value & m._value_:
+ value &= ~m._value_
+ members.append(f're.{m._name_}')
+ if value:
+ members.append(hex(value))
+ res = '|'.join(members)
+ if negative:
+ if len(members) > 1:
+ res = f'~({res})'
+ else:
+ res = f'~{res}'
+ return res
+ __str__ = object.__str__
+
globals().update(RegexFlag.__members__)
# sre exception
@@ -352,7 +352,7 @@ class Scanner:
self.lexicon = lexicon
# combine phrases into a compound pattern
p = []
- s = sre_parse.State()
+ s = sre_parse.State()
s.flags = flags
for phrase, action in lexicon:
gid = s.opengroup()
diff --git a/contrib/tools/python3/src/Lib/rlcompleter.py b/contrib/tools/python3/src/Lib/rlcompleter.py
index 923f5c0541..79116c7863 100644
--- a/contrib/tools/python3/src/Lib/rlcompleter.py
+++ b/contrib/tools/python3/src/Lib/rlcompleter.py
@@ -169,20 +169,20 @@ class Completer:
if (word[:n] == attr and
not (noprefix and word[:n+1] == noprefix)):
match = "%s.%s" % (expr, word)
- if isinstance(getattr(type(thisobject), word, None),
- property):
- # bpo-44752: thisobject.word is a method decorated by
- # `@property`. What follows applies a postfix if
- # thisobject.word is callable, but know we know that
- # this is not callable (because it is a property).
- # Also, getattr(thisobject, word) will evaluate the
- # property method, which is not desirable.
- matches.append(match)
- continue
- if (value := getattr(thisobject, word, None)) is not None:
- matches.append(self._callable_postfix(value, match))
+ if isinstance(getattr(type(thisobject), word, None),
+ property):
+ # bpo-44752: thisobject.word is a method decorated by
+ # `@property`. What follows applies a postfix if
+ # thisobject.word is callable, but know we know that
+ # this is not callable (because it is a property).
+ # Also, getattr(thisobject, word) will evaluate the
+ # property method, which is not desirable.
+ matches.append(match)
+ continue
+ if (value := getattr(thisobject, word, None)) is not None:
+ matches.append(self._callable_postfix(value, match))
else:
- matches.append(match)
+ matches.append(match)
if matches or not noprefix:
break
if noprefix == '_':
diff --git a/contrib/tools/python3/src/Lib/runpy.py b/contrib/tools/python3/src/Lib/runpy.py
index 7e1e1ac5dd..fb8b463c5d 100644
--- a/contrib/tools/python3/src/Lib/runpy.py
+++ b/contrib/tools/python3/src/Lib/runpy.py
@@ -13,9 +13,9 @@ importers when locating support scripts as well as when importing modules.
import sys
import importlib.machinery # importlib first so we can test #15386 via -m
import importlib.util
-import io
+import io
import types
-import os
+import os
from pkgutil import read_code, get_importer
__all__ = [
@@ -133,9 +133,9 @@ def _get_module_details(mod_name, error=ImportError):
# importlib, where the latter raises other errors for cases where
# pkgutil previously raised ImportError
msg = "Error while finding module specification for {!r} ({}: {})"
- if mod_name.endswith(".py"):
- msg += (f". Try using '{mod_name[:-3]}' instead of "
- f"'{mod_name}' as the module name.")
+ if mod_name.endswith(".py"):
+ msg += (f". Try using '{mod_name[:-3]}' instead of "
+ f"'{mod_name}' as the module name.")
raise error(msg.format(mod_name, type(ex).__name__, ex)) from ex
if spec is None:
raise error("No module named %s" % mod_name)
@@ -233,12 +233,12 @@ def _get_main_module_details(error=ImportError):
def _get_code_from_file(run_name, fname):
# Check for a compiled file first
- decoded_path = os.path.abspath(os.fsdecode(fname))
- with io.open_code(decoded_path) as f:
+ decoded_path = os.path.abspath(os.fsdecode(fname))
+ with io.open_code(decoded_path) as f:
code = read_code(f)
if code is None:
# That didn't work, so try it as normal source code
- with io.open_code(decoded_path) as f:
+ with io.open_code(decoded_path) as f:
code = compile(f.read(), fname, 'exec')
return code, fname
diff --git a/contrib/tools/python3/src/Lib/secrets.py b/contrib/tools/python3/src/Lib/secrets.py
index a546efbdd4..7ad999e41b 100644
--- a/contrib/tools/python3/src/Lib/secrets.py
+++ b/contrib/tools/python3/src/Lib/secrets.py
@@ -43,7 +43,7 @@ def token_bytes(nbytes=None):
"""
if nbytes is None:
nbytes = DEFAULT_ENTROPY
- return _sysrand.randbytes(nbytes)
+ return _sysrand.randbytes(nbytes)
def token_hex(nbytes=None):
"""Return a random text string, in hexadecimal.
diff --git a/contrib/tools/python3/src/Lib/selectors.py b/contrib/tools/python3/src/Lib/selectors.py
index bb15a1cb1b..f32c0a25df 100644
--- a/contrib/tools/python3/src/Lib/selectors.py
+++ b/contrib/tools/python3/src/Lib/selectors.py
@@ -57,7 +57,7 @@ if sys.version_info >= (3, 5):
SelectorKey.data.__doc__ = ('''Optional opaque data associated to this file object.
For example, this could be used to store a per-client session ID.''')
-
+
class _SelectorMapping(Mapping):
"""Mapping of file objects to selector keys."""
@@ -553,10 +553,10 @@ if hasattr(select, 'kqueue'):
def select(self, timeout=None):
timeout = None if timeout is None else max(timeout, 0)
- # If max_ev is 0, kqueue will ignore the timeout. For consistent
- # behavior with the other selector classes, we prevent that here
- # (using max). See https://bugs.python.org/issue29255
- max_ev = max(len(self._fd_to_key), 1)
+ # If max_ev is 0, kqueue will ignore the timeout. For consistent
+ # behavior with the other selector classes, we prevent that here
+ # (using max). See https://bugs.python.org/issue29255
+ max_ev = max(len(self._fd_to_key), 1)
ready = []
try:
kev_list = self._selector.control(None, max_ev, timeout)
@@ -581,39 +581,39 @@ if hasattr(select, 'kqueue'):
super().close()
-def _can_use(method):
- """Check if we can use the selector depending upon the
- operating system. """
- # Implementation based upon https://github.com/sethmlarson/selectors2/blob/master/selectors2.py
- selector = getattr(select, method, None)
- if selector is None:
- # select module does not implement method
- return False
- # check if the OS and Kernel actually support the method. Call may fail with
- # OSError: [Errno 38] Function not implemented
- try:
- selector_obj = selector()
- if method == 'poll':
- # check that poll actually works
- selector_obj.poll(0)
- else:
- # close epoll, kqueue, and devpoll fd
- selector_obj.close()
- return True
- except OSError:
- return False
-
-
+def _can_use(method):
+ """Check if we can use the selector depending upon the
+ operating system. """
+ # Implementation based upon https://github.com/sethmlarson/selectors2/blob/master/selectors2.py
+ selector = getattr(select, method, None)
+ if selector is None:
+ # select module does not implement method
+ return False
+ # check if the OS and Kernel actually support the method. Call may fail with
+ # OSError: [Errno 38] Function not implemented
+ try:
+ selector_obj = selector()
+ if method == 'poll':
+ # check that poll actually works
+ selector_obj.poll(0)
+ else:
+ # close epoll, kqueue, and devpoll fd
+ selector_obj.close()
+ return True
+ except OSError:
+ return False
+
+
# Choose the best implementation, roughly:
# epoll|kqueue|devpoll > poll > select.
# select() also can't accept a FD > FD_SETSIZE (usually around 1024)
-if _can_use('kqueue'):
+if _can_use('kqueue'):
DefaultSelector = KqueueSelector
-elif _can_use('epoll'):
+elif _can_use('epoll'):
DefaultSelector = EpollSelector
-elif _can_use('devpoll'):
+elif _can_use('devpoll'):
DefaultSelector = DevpollSelector
-elif _can_use('poll'):
+elif _can_use('poll'):
DefaultSelector = PollSelector
else:
DefaultSelector = SelectSelector
diff --git a/contrib/tools/python3/src/Lib/shlex.py b/contrib/tools/python3/src/Lib/shlex.py
index 4801a6c1d4..409db17998 100644
--- a/contrib/tools/python3/src/Lib/shlex.py
+++ b/contrib/tools/python3/src/Lib/shlex.py
@@ -14,7 +14,7 @@ from collections import deque
from io import StringIO
-__all__ = ["shlex", "split", "quote", "join"]
+__all__ = ["shlex", "split", "quote", "join"]
class shlex:
"A lexical analyzer class for simple shell-like syntaxes."
@@ -55,7 +55,7 @@ class shlex:
punctuation_chars = ''
elif punctuation_chars is True:
punctuation_chars = '();<>|&'
- self._punctuation_chars = punctuation_chars
+ self._punctuation_chars = punctuation_chars
if punctuation_chars:
# _pushback_chars is a push back queue used by lookahead logic
self._pushback_chars = deque()
@@ -65,10 +65,10 @@ class shlex:
t = self.wordchars.maketrans(dict.fromkeys(punctuation_chars))
self.wordchars = self.wordchars.translate(t)
- @property
- def punctuation_chars(self):
- return self._punctuation_chars
-
+ @property
+ def punctuation_chars(self):
+ return self._punctuation_chars
+
def push_token(self, tok):
"Push a token onto the stack popped by the get_token method"
if self.debug >= 1:
@@ -250,8 +250,8 @@ class shlex:
escapedstate = 'a'
self.state = nextchar
elif (nextchar in self.wordchars or nextchar in self.quotes
- or (self.whitespace_split and
- nextchar not in self.punctuation_chars)):
+ or (self.whitespace_split and
+ nextchar not in self.punctuation_chars)):
self.token += nextchar
else:
if self.punctuation_chars:
@@ -303,11 +303,11 @@ class shlex:
return token
def split(s, comments=False, posix=True):
- """Split the string *s* using shell-like syntax."""
- if s is None:
- import warnings
- warnings.warn("Passing None for 's' to shlex.split() is deprecated.",
- DeprecationWarning, stacklevel=2)
+ """Split the string *s* using shell-like syntax."""
+ if s is None:
+ import warnings
+ warnings.warn("Passing None for 's' to shlex.split() is deprecated.",
+ DeprecationWarning, stacklevel=2)
lex = shlex(s, posix=posix)
lex.whitespace_split = True
if not comments:
@@ -315,11 +315,11 @@ def split(s, comments=False, posix=True):
return list(lex)
-def join(split_command):
- """Return a shell-escaped string from *split_command*."""
- return ' '.join(quote(arg) for arg in split_command)
-
-
+def join(split_command):
+ """Return a shell-escaped string from *split_command*."""
+ return ' '.join(quote(arg) for arg in split_command)
+
+
_find_unsafe = re.compile(r'[^\w@%+=:,./-]', re.ASCII).search
def quote(s):
diff --git a/contrib/tools/python3/src/Lib/shutil.py b/contrib/tools/python3/src/Lib/shutil.py
index 752900c588..19dfd529c0 100644
--- a/contrib/tools/python3/src/Lib/shutil.py
+++ b/contrib/tools/python3/src/Lib/shutil.py
@@ -42,20 +42,20 @@ try:
except ImportError:
getgrnam = None
-_WINDOWS = os.name == 'nt'
-posix = nt = None
-if os.name == 'posix':
- import posix
-elif _WINDOWS:
- import nt
-
-COPY_BUFSIZE = 1024 * 1024 if _WINDOWS else 64 * 1024
-_USE_CP_SENDFILE = hasattr(os, "sendfile") and sys.platform.startswith("linux")
-_HAS_FCOPYFILE = posix and hasattr(posix, "_fcopyfile") # macOS
-
-# CMD defaults in Windows 10
-_WIN_DEFAULT_PATHEXT = ".COM;.EXE;.BAT;.CMD;.VBS;.JS;.WS;.MSC"
-
+_WINDOWS = os.name == 'nt'
+posix = nt = None
+if os.name == 'posix':
+ import posix
+elif _WINDOWS:
+ import nt
+
+COPY_BUFSIZE = 1024 * 1024 if _WINDOWS else 64 * 1024
+_USE_CP_SENDFILE = hasattr(os, "sendfile") and sys.platform.startswith("linux")
+_HAS_FCOPYFILE = posix and hasattr(posix, "_fcopyfile") # macOS
+
+# CMD defaults in Windows 10
+_WIN_DEFAULT_PATHEXT = ".COM;.EXE;.BAT;.CMD;.VBS;.JS;.WS;.MSC"
+
__all__ = ["copyfileobj", "copyfile", "copymode", "copystat", "copy", "copy2",
"copytree", "move", "rmtree", "Error", "SpecialFileError",
"ExecError", "make_archive", "get_archive_formats",
@@ -86,135 +86,135 @@ class RegistryError(Exception):
"""Raised when a registry operation with the archiving
and unpacking registries fails"""
-class _GiveupOnFastCopy(Exception):
- """Raised as a signal to fallback on using raw read()/write()
- file copy when fast-copy functions fail to do so.
- """
-
-def _fastcopy_fcopyfile(fsrc, fdst, flags):
- """Copy a regular file content or metadata by using high-performance
- fcopyfile(3) syscall (macOS).
- """
- try:
- infd = fsrc.fileno()
- outfd = fdst.fileno()
- except Exception as err:
- raise _GiveupOnFastCopy(err) # not a regular file
-
- try:
- posix._fcopyfile(infd, outfd, flags)
- except OSError as err:
- err.filename = fsrc.name
- err.filename2 = fdst.name
- if err.errno in {errno.EINVAL, errno.ENOTSUP}:
- raise _GiveupOnFastCopy(err)
- else:
- raise err from None
-
-def _fastcopy_sendfile(fsrc, fdst):
- """Copy data from one regular mmap-like fd to another by using
- high-performance sendfile(2) syscall.
- This should work on Linux >= 2.6.33 only.
- """
- # Note: copyfileobj() is left alone in order to not introduce any
- # unexpected breakage. Possible risks by using zero-copy calls
- # in copyfileobj() are:
- # - fdst cannot be open in "a"(ppend) mode
- # - fsrc and fdst may be open in "t"(ext) mode
- # - fsrc may be a BufferedReader (which hides unread data in a buffer),
- # GzipFile (which decompresses data), HTTPResponse (which decodes
- # chunks).
- # - possibly others (e.g. encrypted fs/partition?)
- global _USE_CP_SENDFILE
- try:
- infd = fsrc.fileno()
- outfd = fdst.fileno()
- except Exception as err:
- raise _GiveupOnFastCopy(err) # not a regular file
-
- # Hopefully the whole file will be copied in a single call.
- # sendfile() is called in a loop 'till EOF is reached (0 return)
- # so a bufsize smaller or bigger than the actual file size
- # should not make any difference, also in case the file content
- # changes while being copied.
- try:
- blocksize = max(os.fstat(infd).st_size, 2 ** 23) # min 8MiB
- except OSError:
- blocksize = 2 ** 27 # 128MiB
- # On 32-bit architectures truncate to 1GiB to avoid OverflowError,
- # see bpo-38319.
- if sys.maxsize < 2 ** 32:
- blocksize = min(blocksize, 2 ** 30)
-
- offset = 0
- while True:
- try:
- sent = os.sendfile(outfd, infd, offset, blocksize)
- except OSError as err:
- # ...in oder to have a more informative exception.
- err.filename = fsrc.name
- err.filename2 = fdst.name
-
- if err.errno == errno.ENOTSOCK:
- # sendfile() on this platform (probably Linux < 2.6.33)
- # does not support copies between regular files (only
- # sockets).
- _USE_CP_SENDFILE = False
- raise _GiveupOnFastCopy(err)
-
- if err.errno == errno.ENOSPC: # filesystem is full
- raise err from None
-
- # Give up on first call and if no data was copied.
- if offset == 0 and os.lseek(outfd, 0, os.SEEK_CUR) == 0:
- raise _GiveupOnFastCopy(err)
-
- raise err
- else:
- if sent == 0:
- break # EOF
- offset += sent
-
-def _copyfileobj_readinto(fsrc, fdst, length=COPY_BUFSIZE):
- """readinto()/memoryview() based variant of copyfileobj().
- *fsrc* must support readinto() method and both files must be
- open in binary mode.
- """
- # Localize variable access to minimize overhead.
- fsrc_readinto = fsrc.readinto
- fdst_write = fdst.write
- with memoryview(bytearray(length)) as mv:
- while True:
- n = fsrc_readinto(mv)
- if not n:
- break
- elif n < length:
- with mv[:n] as smv:
- fdst.write(smv)
- else:
- fdst_write(mv)
-
-def copyfileobj(fsrc, fdst, length=0):
+class _GiveupOnFastCopy(Exception):
+ """Raised as a signal to fallback on using raw read()/write()
+ file copy when fast-copy functions fail to do so.
+ """
+
+def _fastcopy_fcopyfile(fsrc, fdst, flags):
+ """Copy a regular file content or metadata by using high-performance
+ fcopyfile(3) syscall (macOS).
+ """
+ try:
+ infd = fsrc.fileno()
+ outfd = fdst.fileno()
+ except Exception as err:
+ raise _GiveupOnFastCopy(err) # not a regular file
+
+ try:
+ posix._fcopyfile(infd, outfd, flags)
+ except OSError as err:
+ err.filename = fsrc.name
+ err.filename2 = fdst.name
+ if err.errno in {errno.EINVAL, errno.ENOTSUP}:
+ raise _GiveupOnFastCopy(err)
+ else:
+ raise err from None
+
+def _fastcopy_sendfile(fsrc, fdst):
+ """Copy data from one regular mmap-like fd to another by using
+ high-performance sendfile(2) syscall.
+ This should work on Linux >= 2.6.33 only.
+ """
+ # Note: copyfileobj() is left alone in order to not introduce any
+ # unexpected breakage. Possible risks by using zero-copy calls
+ # in copyfileobj() are:
+ # - fdst cannot be open in "a"(ppend) mode
+ # - fsrc and fdst may be open in "t"(ext) mode
+ # - fsrc may be a BufferedReader (which hides unread data in a buffer),
+ # GzipFile (which decompresses data), HTTPResponse (which decodes
+ # chunks).
+ # - possibly others (e.g. encrypted fs/partition?)
+ global _USE_CP_SENDFILE
+ try:
+ infd = fsrc.fileno()
+ outfd = fdst.fileno()
+ except Exception as err:
+ raise _GiveupOnFastCopy(err) # not a regular file
+
+ # Hopefully the whole file will be copied in a single call.
+ # sendfile() is called in a loop 'till EOF is reached (0 return)
+ # so a bufsize smaller or bigger than the actual file size
+ # should not make any difference, also in case the file content
+ # changes while being copied.
+ try:
+ blocksize = max(os.fstat(infd).st_size, 2 ** 23) # min 8MiB
+ except OSError:
+ blocksize = 2 ** 27 # 128MiB
+ # On 32-bit architectures truncate to 1GiB to avoid OverflowError,
+ # see bpo-38319.
+ if sys.maxsize < 2 ** 32:
+ blocksize = min(blocksize, 2 ** 30)
+
+ offset = 0
+ while True:
+ try:
+ sent = os.sendfile(outfd, infd, offset, blocksize)
+ except OSError as err:
+ # ...in oder to have a more informative exception.
+ err.filename = fsrc.name
+ err.filename2 = fdst.name
+
+ if err.errno == errno.ENOTSOCK:
+ # sendfile() on this platform (probably Linux < 2.6.33)
+ # does not support copies between regular files (only
+ # sockets).
+ _USE_CP_SENDFILE = False
+ raise _GiveupOnFastCopy(err)
+
+ if err.errno == errno.ENOSPC: # filesystem is full
+ raise err from None
+
+ # Give up on first call and if no data was copied.
+ if offset == 0 and os.lseek(outfd, 0, os.SEEK_CUR) == 0:
+ raise _GiveupOnFastCopy(err)
+
+ raise err
+ else:
+ if sent == 0:
+ break # EOF
+ offset += sent
+
+def _copyfileobj_readinto(fsrc, fdst, length=COPY_BUFSIZE):
+ """readinto()/memoryview() based variant of copyfileobj().
+ *fsrc* must support readinto() method and both files must be
+ open in binary mode.
+ """
+ # Localize variable access to minimize overhead.
+ fsrc_readinto = fsrc.readinto
+ fdst_write = fdst.write
+ with memoryview(bytearray(length)) as mv:
+ while True:
+ n = fsrc_readinto(mv)
+ if not n:
+ break
+ elif n < length:
+ with mv[:n] as smv:
+ fdst.write(smv)
+ else:
+ fdst_write(mv)
+
+def copyfileobj(fsrc, fdst, length=0):
"""copy data from file-like object fsrc to file-like object fdst"""
- # Localize variable access to minimize overhead.
- if not length:
- length = COPY_BUFSIZE
- fsrc_read = fsrc.read
- fdst_write = fdst.write
- while True:
- buf = fsrc_read(length)
+ # Localize variable access to minimize overhead.
+ if not length:
+ length = COPY_BUFSIZE
+ fsrc_read = fsrc.read
+ fdst_write = fdst.write
+ while True:
+ buf = fsrc_read(length)
if not buf:
break
- fdst_write(buf)
+ fdst_write(buf)
def _samefile(src, dst):
# Macintosh, Unix.
- if isinstance(src, os.DirEntry) and hasattr(os.path, 'samestat'):
- try:
- return os.path.samestat(src.stat(), os.stat(dst))
- except OSError:
- return False
-
+ if isinstance(src, os.DirEntry) and hasattr(os.path, 'samestat'):
+ try:
+ return os.path.samestat(src.stat(), os.stat(dst))
+ except OSError:
+ return False
+
if hasattr(os.path, 'samefile'):
try:
return os.path.samefile(src, dst)
@@ -225,74 +225,74 @@ def _samefile(src, dst):
return (os.path.normcase(os.path.abspath(src)) ==
os.path.normcase(os.path.abspath(dst)))
-def _stat(fn):
- return fn.stat() if isinstance(fn, os.DirEntry) else os.stat(fn)
-
-def _islink(fn):
- return fn.is_symlink() if isinstance(fn, os.DirEntry) else os.path.islink(fn)
-
+def _stat(fn):
+ return fn.stat() if isinstance(fn, os.DirEntry) else os.stat(fn)
+
+def _islink(fn):
+ return fn.is_symlink() if isinstance(fn, os.DirEntry) else os.path.islink(fn)
+
def copyfile(src, dst, *, follow_symlinks=True):
- """Copy data from src to dst in the most efficient way possible.
+ """Copy data from src to dst in the most efficient way possible.
If follow_symlinks is not set and src is a symbolic link, a new
symlink will be created instead of copying the file it points to.
"""
- sys.audit("shutil.copyfile", src, dst)
-
+ sys.audit("shutil.copyfile", src, dst)
+
if _samefile(src, dst):
raise SameFileError("{!r} and {!r} are the same file".format(src, dst))
- file_size = 0
- for i, fn in enumerate([src, dst]):
+ file_size = 0
+ for i, fn in enumerate([src, dst]):
try:
- st = _stat(fn)
+ st = _stat(fn)
except OSError:
# File most likely does not exist
pass
else:
# XXX What about other special files? (sockets, devices...)
if stat.S_ISFIFO(st.st_mode):
- fn = fn.path if isinstance(fn, os.DirEntry) else fn
+ fn = fn.path if isinstance(fn, os.DirEntry) else fn
raise SpecialFileError("`%s` is a named pipe" % fn)
- if _WINDOWS and i == 0:
- file_size = st.st_size
+ if _WINDOWS and i == 0:
+ file_size = st.st_size
- if not follow_symlinks and _islink(src):
+ if not follow_symlinks and _islink(src):
os.symlink(os.readlink(src), dst)
else:
- with open(src, 'rb') as fsrc:
- try:
- with open(dst, 'wb') as fdst:
- # macOS
- if _HAS_FCOPYFILE:
- try:
- _fastcopy_fcopyfile(fsrc, fdst, posix._COPYFILE_DATA)
- return dst
- except _GiveupOnFastCopy:
- pass
- # Linux
- elif _USE_CP_SENDFILE:
- try:
- _fastcopy_sendfile(fsrc, fdst)
- return dst
- except _GiveupOnFastCopy:
- pass
- # Windows, see:
- # https://github.com/python/cpython/pull/7160#discussion_r195405230
- elif _WINDOWS and file_size > 0:
- _copyfileobj_readinto(fsrc, fdst, min(file_size, COPY_BUFSIZE))
- return dst
-
- copyfileobj(fsrc, fdst)
-
- # Issue 43219, raise a less confusing exception
- except IsADirectoryError as e:
- if not os.path.exists(dst):
- raise FileNotFoundError(f'Directory does not exist: {dst}') from e
- else:
- raise
-
+ with open(src, 'rb') as fsrc:
+ try:
+ with open(dst, 'wb') as fdst:
+ # macOS
+ if _HAS_FCOPYFILE:
+ try:
+ _fastcopy_fcopyfile(fsrc, fdst, posix._COPYFILE_DATA)
+ return dst
+ except _GiveupOnFastCopy:
+ pass
+ # Linux
+ elif _USE_CP_SENDFILE:
+ try:
+ _fastcopy_sendfile(fsrc, fdst)
+ return dst
+ except _GiveupOnFastCopy:
+ pass
+ # Windows, see:
+ # https://github.com/python/cpython/pull/7160#discussion_r195405230
+ elif _WINDOWS and file_size > 0:
+ _copyfileobj_readinto(fsrc, fdst, min(file_size, COPY_BUFSIZE))
+ return dst
+
+ copyfileobj(fsrc, fdst)
+
+ # Issue 43219, raise a less confusing exception
+ except IsADirectoryError as e:
+ if not os.path.exists(dst):
+ raise FileNotFoundError(f'Directory does not exist: {dst}') from e
+ else:
+ raise
+
return dst
def copymode(src, dst, *, follow_symlinks=True):
@@ -303,15 +303,15 @@ def copymode(src, dst, *, follow_symlinks=True):
(e.g. Linux) this method does nothing.
"""
- sys.audit("shutil.copymode", src, dst)
-
- if not follow_symlinks and _islink(src) and os.path.islink(dst):
+ sys.audit("shutil.copymode", src, dst)
+
+ if not follow_symlinks and _islink(src) and os.path.islink(dst):
if hasattr(os, 'lchmod'):
stat_func, chmod_func = os.lstat, os.lchmod
else:
return
else:
- stat_func, chmod_func = _stat, os.chmod
+ stat_func, chmod_func = _stat, os.chmod
st = stat_func(src)
chmod_func(dst, stat.S_IMODE(st.st_mode))
@@ -329,7 +329,7 @@ if hasattr(os, 'listxattr'):
try:
names = os.listxattr(src, follow_symlinks=follow_symlinks)
except OSError as e:
- if e.errno not in (errno.ENOTSUP, errno.ENODATA, errno.EINVAL):
+ if e.errno not in (errno.ENOTSUP, errno.ENODATA, errno.EINVAL):
raise
return
for name in names:
@@ -337,8 +337,8 @@ if hasattr(os, 'listxattr'):
value = os.getxattr(src, name, follow_symlinks=follow_symlinks)
os.setxattr(dst, name, value, follow_symlinks=follow_symlinks)
except OSError as e:
- if e.errno not in (errno.EPERM, errno.ENOTSUP, errno.ENODATA,
- errno.EINVAL):
+ if e.errno not in (errno.EPERM, errno.ENOTSUP, errno.ENODATA,
+ errno.EINVAL):
raise
else:
def _copyxattr(*args, **kwargs):
@@ -350,19 +350,19 @@ def copystat(src, dst, *, follow_symlinks=True):
Copy the permission bits, last access time, last modification time, and
flags from `src` to `dst`. On Linux, copystat() also copies the "extended
attributes" where possible. The file contents, owner, and group are
- unaffected. `src` and `dst` are path-like objects or path names given as
- strings.
+ unaffected. `src` and `dst` are path-like objects or path names given as
+ strings.
If the optional flag `follow_symlinks` is not set, symlinks aren't
followed if and only if both `src` and `dst` are symlinks.
"""
- sys.audit("shutil.copystat", src, dst)
-
+ sys.audit("shutil.copystat", src, dst)
+
def _nop(*args, ns=None, follow_symlinks=None):
pass
# follow symlinks (aka don't not follow symlinks)
- follow = follow_symlinks or not (_islink(src) and os.path.islink(dst))
+ follow = follow_symlinks or not (_islink(src) and os.path.islink(dst))
if follow:
# use the real function if it exists
def lookup(name):
@@ -376,16 +376,16 @@ def copystat(src, dst, *, follow_symlinks=True):
return fn
return _nop
- if isinstance(src, os.DirEntry):
- st = src.stat(follow_symlinks=follow)
- else:
- st = lookup("stat")(src, follow_symlinks=follow)
+ if isinstance(src, os.DirEntry):
+ st = src.stat(follow_symlinks=follow)
+ else:
+ st = lookup("stat")(src, follow_symlinks=follow)
mode = stat.S_IMODE(st.st_mode)
lookup("utime")(dst, ns=(st.st_atime_ns, st.st_mtime_ns),
follow_symlinks=follow)
- # We must copy extended attributes before the file is (potentially)
- # chmod()'ed read-only, otherwise setxattr() will error with -EACCES.
- _copyxattr(src, dst, follow_symlinks=follow)
+ # We must copy extended attributes before the file is (potentially)
+ # chmod()'ed read-only, otherwise setxattr() will error with -EACCES.
+ _copyxattr(src, dst, follow_symlinks=follow)
try:
lookup("chmod")(dst, mode, follow_symlinks=follow)
except NotImplementedError:
@@ -457,55 +457,55 @@ def ignore_patterns(*patterns):
return set(ignored_names)
return _ignore_patterns
-def _copytree(entries, src, dst, symlinks, ignore, copy_function,
- ignore_dangling_symlinks, dirs_exist_ok=False):
+def _copytree(entries, src, dst, symlinks, ignore, copy_function,
+ ignore_dangling_symlinks, dirs_exist_ok=False):
if ignore is not None:
- ignored_names = ignore(os.fspath(src), [x.name for x in entries])
+ ignored_names = ignore(os.fspath(src), [x.name for x in entries])
else:
ignored_names = set()
- os.makedirs(dst, exist_ok=dirs_exist_ok)
+ os.makedirs(dst, exist_ok=dirs_exist_ok)
errors = []
- use_srcentry = copy_function is copy2 or copy_function is copy
-
- for srcentry in entries:
- if srcentry.name in ignored_names:
+ use_srcentry = copy_function is copy2 or copy_function is copy
+
+ for srcentry in entries:
+ if srcentry.name in ignored_names:
continue
- srcname = os.path.join(src, srcentry.name)
- dstname = os.path.join(dst, srcentry.name)
- srcobj = srcentry if use_srcentry else srcname
+ srcname = os.path.join(src, srcentry.name)
+ dstname = os.path.join(dst, srcentry.name)
+ srcobj = srcentry if use_srcentry else srcname
try:
- is_symlink = srcentry.is_symlink()
- if is_symlink and os.name == 'nt':
- # Special check for directory junctions, which appear as
- # symlinks but we want to recurse.
- lstat = srcentry.stat(follow_symlinks=False)
- if lstat.st_reparse_tag == stat.IO_REPARSE_TAG_MOUNT_POINT:
- is_symlink = False
- if is_symlink:
+ is_symlink = srcentry.is_symlink()
+ if is_symlink and os.name == 'nt':
+ # Special check for directory junctions, which appear as
+ # symlinks but we want to recurse.
+ lstat = srcentry.stat(follow_symlinks=False)
+ if lstat.st_reparse_tag == stat.IO_REPARSE_TAG_MOUNT_POINT:
+ is_symlink = False
+ if is_symlink:
linkto = os.readlink(srcname)
if symlinks:
# We can't just leave it to `copy_function` because legacy
# code with a custom `copy_function` may rely on copytree
# doing the right thing.
os.symlink(linkto, dstname)
- copystat(srcobj, dstname, follow_symlinks=not symlinks)
+ copystat(srcobj, dstname, follow_symlinks=not symlinks)
else:
# ignore dangling symlink if the flag is on
if not os.path.exists(linkto) and ignore_dangling_symlinks:
continue
- # otherwise let the copy occur. copy2 will raise an error
- if srcentry.is_dir():
- copytree(srcobj, dstname, symlinks, ignore,
- copy_function, dirs_exist_ok=dirs_exist_ok)
+ # otherwise let the copy occur. copy2 will raise an error
+ if srcentry.is_dir():
+ copytree(srcobj, dstname, symlinks, ignore,
+ copy_function, dirs_exist_ok=dirs_exist_ok)
else:
- copy_function(srcobj, dstname)
- elif srcentry.is_dir():
- copytree(srcobj, dstname, symlinks, ignore, copy_function,
- dirs_exist_ok=dirs_exist_ok)
+ copy_function(srcobj, dstname)
+ elif srcentry.is_dir():
+ copytree(srcobj, dstname, symlinks, ignore, copy_function,
+ dirs_exist_ok=dirs_exist_ok)
else:
# Will raise a SpecialFileError for unsupported file types
- copy_function(srcobj, dstname)
+ copy_function(srcobj, dstname)
# catch the Error from the recursive copytree so that we can
# continue with other files
except Error as err:
@@ -522,83 +522,83 @@ def _copytree(entries, src, dst, symlinks, ignore, copy_function,
raise Error(errors)
return dst
-def copytree(src, dst, symlinks=False, ignore=None, copy_function=copy2,
- ignore_dangling_symlinks=False, dirs_exist_ok=False):
- """Recursively copy a directory tree and return the destination directory.
-
- dirs_exist_ok dictates whether to raise an exception in case dst or any
- missing parent directory already exists.
-
- If exception(s) occur, an Error is raised with a list of reasons.
-
- If the optional symlinks flag is true, symbolic links in the
- source tree result in symbolic links in the destination tree; if
- it is false, the contents of the files pointed to by symbolic
- links are copied. If the file pointed by the symlink doesn't
- exist, an exception will be added in the list of errors raised in
- an Error exception at the end of the copy process.
-
- You can set the optional ignore_dangling_symlinks flag to true if you
- want to silence this exception. Notice that this has no effect on
- platforms that don't support os.symlink.
-
- The optional ignore argument is a callable. If given, it
- is called with the `src` parameter, which is the directory
- being visited by copytree(), and `names` which is the list of
- `src` contents, as returned by os.listdir():
-
- callable(src, names) -> ignored_names
-
- Since copytree() is called recursively, the callable will be
- called once for each directory that is copied. It returns a
- list of names relative to the `src` directory that should
- not be copied.
-
- The optional copy_function argument is a callable that will be used
- to copy each file. It will be called with the source path and the
- destination path as arguments. By default, copy2() is used, but any
- function that supports the same signature (like copy()) can be used.
-
- """
- sys.audit("shutil.copytree", src, dst)
- with os.scandir(src) as itr:
- entries = list(itr)
- return _copytree(entries=entries, src=src, dst=dst, symlinks=symlinks,
- ignore=ignore, copy_function=copy_function,
- ignore_dangling_symlinks=ignore_dangling_symlinks,
- dirs_exist_ok=dirs_exist_ok)
-
-if hasattr(os.stat_result, 'st_file_attributes'):
- # Special handling for directory junctions to make them behave like
- # symlinks for shutil.rmtree, since in general they do not appear as
- # regular links.
- def _rmtree_isdir(entry):
- try:
- st = entry.stat(follow_symlinks=False)
- return (stat.S_ISDIR(st.st_mode) and not
- (st.st_file_attributes & stat.FILE_ATTRIBUTE_REPARSE_POINT
- and st.st_reparse_tag == stat.IO_REPARSE_TAG_MOUNT_POINT))
- except OSError:
- return False
-
- def _rmtree_islink(path):
- try:
- st = os.lstat(path)
- return (stat.S_ISLNK(st.st_mode) or
- (st.st_file_attributes & stat.FILE_ATTRIBUTE_REPARSE_POINT
- and st.st_reparse_tag == stat.IO_REPARSE_TAG_MOUNT_POINT))
- except OSError:
- return False
-else:
- def _rmtree_isdir(entry):
- try:
- return entry.is_dir(follow_symlinks=False)
- except OSError:
- return False
-
- def _rmtree_islink(path):
- return os.path.islink(path)
-
+def copytree(src, dst, symlinks=False, ignore=None, copy_function=copy2,
+ ignore_dangling_symlinks=False, dirs_exist_ok=False):
+ """Recursively copy a directory tree and return the destination directory.
+
+ dirs_exist_ok dictates whether to raise an exception in case dst or any
+ missing parent directory already exists.
+
+ If exception(s) occur, an Error is raised with a list of reasons.
+
+ If the optional symlinks flag is true, symbolic links in the
+ source tree result in symbolic links in the destination tree; if
+ it is false, the contents of the files pointed to by symbolic
+ links are copied. If the file pointed by the symlink doesn't
+ exist, an exception will be added in the list of errors raised in
+ an Error exception at the end of the copy process.
+
+ You can set the optional ignore_dangling_symlinks flag to true if you
+ want to silence this exception. Notice that this has no effect on
+ platforms that don't support os.symlink.
+
+ The optional ignore argument is a callable. If given, it
+ is called with the `src` parameter, which is the directory
+ being visited by copytree(), and `names` which is the list of
+ `src` contents, as returned by os.listdir():
+
+ callable(src, names) -> ignored_names
+
+ Since copytree() is called recursively, the callable will be
+ called once for each directory that is copied. It returns a
+ list of names relative to the `src` directory that should
+ not be copied.
+
+ The optional copy_function argument is a callable that will be used
+ to copy each file. It will be called with the source path and the
+ destination path as arguments. By default, copy2() is used, but any
+ function that supports the same signature (like copy()) can be used.
+
+ """
+ sys.audit("shutil.copytree", src, dst)
+ with os.scandir(src) as itr:
+ entries = list(itr)
+ return _copytree(entries=entries, src=src, dst=dst, symlinks=symlinks,
+ ignore=ignore, copy_function=copy_function,
+ ignore_dangling_symlinks=ignore_dangling_symlinks,
+ dirs_exist_ok=dirs_exist_ok)
+
+if hasattr(os.stat_result, 'st_file_attributes'):
+ # Special handling for directory junctions to make them behave like
+ # symlinks for shutil.rmtree, since in general they do not appear as
+ # regular links.
+ def _rmtree_isdir(entry):
+ try:
+ st = entry.stat(follow_symlinks=False)
+ return (stat.S_ISDIR(st.st_mode) and not
+ (st.st_file_attributes & stat.FILE_ATTRIBUTE_REPARSE_POINT
+ and st.st_reparse_tag == stat.IO_REPARSE_TAG_MOUNT_POINT))
+ except OSError:
+ return False
+
+ def _rmtree_islink(path):
+ try:
+ st = os.lstat(path)
+ return (stat.S_ISLNK(st.st_mode) or
+ (st.st_file_attributes & stat.FILE_ATTRIBUTE_REPARSE_POINT
+ and st.st_reparse_tag == stat.IO_REPARSE_TAG_MOUNT_POINT))
+ except OSError:
+ return False
+else:
+ def _rmtree_isdir(entry):
+ try:
+ return entry.is_dir(follow_symlinks=False)
+ except OSError:
+ return False
+
+ def _rmtree_islink(path):
+ return os.path.islink(path)
+
# version vulnerable to race conditions
def _rmtree_unsafe(path, onerror):
try:
@@ -609,7 +609,7 @@ def _rmtree_unsafe(path, onerror):
entries = []
for entry in entries:
fullname = entry.path
- if _rmtree_isdir(entry):
+ if _rmtree_isdir(entry):
try:
if entry.is_symlink():
# This can only happen if someone replaces
@@ -645,14 +645,14 @@ def _rmtree_safe_fd(topfd, path, onerror):
is_dir = entry.is_dir(follow_symlinks=False)
except OSError:
is_dir = False
- else:
- if is_dir:
- try:
- orig_st = entry.stat(follow_symlinks=False)
- is_dir = stat.S_ISDIR(orig_st.st_mode)
- except OSError:
- onerror(os.lstat, fullname, sys.exc_info())
- continue
+ else:
+ if is_dir:
+ try:
+ orig_st = entry.stat(follow_symlinks=False)
+ is_dir = stat.S_ISDIR(orig_st.st_mode)
+ except OSError:
+ onerror(os.lstat, fullname, sys.exc_info())
+ continue
if is_dir:
try:
dirfd = os.open(entry.name, os.O_RDONLY, dir_fd=topfd)
@@ -699,7 +699,7 @@ def rmtree(path, ignore_errors=False, onerror=None):
is false and onerror is None, an exception is raised.
"""
- sys.audit("shutil.rmtree", path)
+ sys.audit("shutil.rmtree", path)
if ignore_errors:
def onerror(*args):
pass
@@ -720,7 +720,7 @@ def rmtree(path, ignore_errors=False, onerror=None):
try:
fd = os.open(path, os.O_RDONLY)
except Exception:
- onerror(os.open, path, sys.exc_info())
+ onerror(os.open, path, sys.exc_info())
return
try:
if os.path.samestat(orig_st, os.fstat(fd)):
@@ -739,7 +739,7 @@ def rmtree(path, ignore_errors=False, onerror=None):
os.close(fd)
else:
try:
- if _rmtree_islink(path):
+ if _rmtree_islink(path):
# symlinks to directories are forbidden, see bug #1669
raise OSError("Cannot call rmtree on a symbolic link")
except OSError:
@@ -753,20 +753,20 @@ def rmtree(path, ignore_errors=False, onerror=None):
rmtree.avoids_symlink_attacks = _use_fd_functions
def _basename(path):
- """A basename() variant which first strips the trailing slash, if present.
- Thus we always get the last component of the path, even for directories.
-
- path: Union[PathLike, str]
-
- e.g.
- >>> os.path.basename('/bar/foo')
- 'foo'
- >>> os.path.basename('/bar/foo/')
- ''
- >>> _basename('/bar/foo/')
- 'foo'
- """
- path = os.fspath(path)
+ """A basename() variant which first strips the trailing slash, if present.
+ Thus we always get the last component of the path, even for directories.
+
+ path: Union[PathLike, str]
+
+ e.g.
+ >>> os.path.basename('/bar/foo')
+ 'foo'
+ >>> os.path.basename('/bar/foo/')
+ ''
+ >>> _basename('/bar/foo/')
+ 'foo'
+ """
+ path = os.fspath(path)
sep = os.path.sep + (os.path.altsep or '')
return os.path.basename(path.rstrip(sep))
@@ -796,7 +796,7 @@ def move(src, dst, copy_function=copy2):
the issues this implementation glosses over.
"""
- sys.audit("shutil.move", src, dst)
+ sys.audit("shutil.move", src, dst)
real_dst = dst
if os.path.isdir(dst):
if _samefile(src, dst):
@@ -805,10 +805,10 @@ def move(src, dst, copy_function=copy2):
os.rename(src, dst)
return
- # Using _basename instead of os.path.basename is important, as we must
- # ignore any trailing slash to avoid the basename returning ''
+ # Using _basename instead of os.path.basename is important, as we must
+ # ignore any trailing slash to avoid the basename returning ''
real_dst = os.path.join(dst, _basename(src))
-
+
if os.path.exists(real_dst):
raise Error("Destination path '%s' already exists" % real_dst)
try:
@@ -822,12 +822,12 @@ def move(src, dst, copy_function=copy2):
if _destinsrc(src, dst):
raise Error("Cannot move a directory '%s' into itself"
" '%s'." % (src, dst))
- if (_is_immutable(src)
- or (not os.access(src, os.W_OK) and os.listdir(src)
- and sys.platform == 'darwin')):
- raise PermissionError("Cannot move the non-empty directory "
- "'%s': Lacking write permission to '%s'."
- % (src, src))
+ if (_is_immutable(src)
+ or (not os.access(src, os.W_OK) and os.listdir(src)
+ and sys.platform == 'darwin')):
+ raise PermissionError("Cannot move the non-empty directory "
+ "'%s': Lacking write permission to '%s'."
+ % (src, src))
copytree(src, real_dst, copy_function=copy_function,
symlinks=True)
rmtree(src)
@@ -845,11 +845,11 @@ def _destinsrc(src, dst):
dst += os.path.sep
return dst.startswith(src)
-def _is_immutable(src):
- st = _stat(src)
- immutable_states = [stat.UF_IMMUTABLE, stat.SF_IMMUTABLE]
- return hasattr(st, 'st_flags') and st.st_flags in immutable_states
-
+def _is_immutable(src):
+ st = _stat(src)
+ immutable_states = [stat.UF_IMMUTABLE, stat.SF_IMMUTABLE]
+ return hasattr(st, 'st_flags') and st.st_flags in immutable_states
+
def _get_gid(name):
"""Returns a gid, given a group name."""
if getgrnam is None or name is None:
@@ -1052,7 +1052,7 @@ def make_archive(base_name, format, root_dir=None, base_dir=None, verbose=0,
'owner' and 'group' are used when creating a tar archive. By default,
uses the current owner and group.
"""
- sys.audit("shutil.make_archive", base_name, format, root_dir, base_dir)
+ sys.audit("shutil.make_archive", base_name, format, root_dir, base_dir)
save_cwd = os.getcwd()
if root_dir is not None:
if logger is not None:
@@ -1168,16 +1168,16 @@ def _unpack_zipfile(filename, extract_dir):
if name.startswith('/') or '..' in name:
continue
- targetpath = os.path.join(extract_dir, *name.split('/'))
- if not targetpath:
+ targetpath = os.path.join(extract_dir, *name.split('/'))
+ if not targetpath:
continue
- _ensure_directory(targetpath)
+ _ensure_directory(targetpath)
if not name.endswith('/'):
# file
- with zip.open(name, 'r') as source, \
- open(targetpath, 'wb') as target:
- copyfileobj(source, target)
+ with zip.open(name, 'r') as source, \
+ open(targetpath, 'wb') as target:
+ copyfileobj(source, target)
finally:
zip.close()
@@ -1234,8 +1234,8 @@ def unpack_archive(filename, extract_dir=None, format=None):
In case none is found, a ValueError is raised.
"""
- sys.audit("shutil.unpack_archive", filename, extract_dir, format)
-
+ sys.audit("shutil.unpack_archive", filename, extract_dir, format)
+
if extract_dir is None:
extract_dir = os.getcwd()
@@ -1281,7 +1281,7 @@ if hasattr(os, 'statvfs'):
used = (st.f_blocks - st.f_bfree) * st.f_frsize
return _ntuple_diskusage(total, used, free)
-elif _WINDOWS:
+elif _WINDOWS:
__all__.append('disk_usage')
_ntuple_diskusage = collections.namedtuple('usage', 'total used free')
@@ -1303,7 +1303,7 @@ def chown(path, user=None, group=None):
user and group can be the uid/gid or the user/group names, and in that case,
they are converted to their respective uid/gid.
"""
- sys.audit('shutil.chown', path, user, group)
+ sys.audit('shutil.chown', path, user, group)
if user is None and group is None:
raise ValueError("user and/or group must be set")
@@ -1374,15 +1374,15 @@ def get_terminal_size(fallback=(80, 24)):
return os.terminal_size((columns, lines))
-
-# Check that a given file can be accessed with the correct mode.
-# Additionally check that `file` is not a directory, as on Windows
-# directories pass the os.access check.
-def _access_check(fn, mode):
- return (os.path.exists(fn) and os.access(fn, mode)
- and not os.path.isdir(fn))
-
-
+
+# Check that a given file can be accessed with the correct mode.
+# Additionally check that `file` is not a directory, as on Windows
+# directories pass the os.access check.
+def _access_check(fn, mode):
+ return (os.path.exists(fn) and os.access(fn, mode)
+ and not os.path.isdir(fn))
+
+
def which(cmd, mode=os.F_OK | os.X_OK, path=None):
"""Given a command, mode, and a PATH string, return the path which
conforms to the given mode on the PATH, or None if there is no such
@@ -1401,44 +1401,44 @@ def which(cmd, mode=os.F_OK | os.X_OK, path=None):
return cmd
return None
- use_bytes = isinstance(cmd, bytes)
-
+ use_bytes = isinstance(cmd, bytes)
+
if path is None:
- path = os.environ.get("PATH", None)
- if path is None:
- try:
- path = os.confstr("CS_PATH")
- except (AttributeError, ValueError):
- # os.confstr() or CS_PATH is not available
- path = os.defpath
- # bpo-35755: Don't use os.defpath if the PATH environment variable is
- # set to an empty string
-
- # PATH='' doesn't match, whereas PATH=':' looks in the current directory
+ path = os.environ.get("PATH", None)
+ if path is None:
+ try:
+ path = os.confstr("CS_PATH")
+ except (AttributeError, ValueError):
+ # os.confstr() or CS_PATH is not available
+ path = os.defpath
+ # bpo-35755: Don't use os.defpath if the PATH environment variable is
+ # set to an empty string
+
+ # PATH='' doesn't match, whereas PATH=':' looks in the current directory
if not path:
return None
- if use_bytes:
- path = os.fsencode(path)
- path = path.split(os.fsencode(os.pathsep))
- else:
- path = os.fsdecode(path)
- path = path.split(os.pathsep)
-
+ if use_bytes:
+ path = os.fsencode(path)
+ path = path.split(os.fsencode(os.pathsep))
+ else:
+ path = os.fsdecode(path)
+ path = path.split(os.pathsep)
+
if sys.platform == "win32":
# The current directory takes precedence on Windows.
- curdir = os.curdir
- if use_bytes:
- curdir = os.fsencode(curdir)
- if curdir not in path:
- path.insert(0, curdir)
+ curdir = os.curdir
+ if use_bytes:
+ curdir = os.fsencode(curdir)
+ if curdir not in path:
+ path.insert(0, curdir)
# PATHEXT is necessary to check on Windows.
- pathext_source = os.getenv("PATHEXT") or _WIN_DEFAULT_PATHEXT
- pathext = [ext for ext in pathext_source.split(os.pathsep) if ext]
-
- if use_bytes:
- pathext = [os.fsencode(ext) for ext in pathext]
+ pathext_source = os.getenv("PATHEXT") or _WIN_DEFAULT_PATHEXT
+ pathext = [ext for ext in pathext_source.split(os.pathsep) if ext]
+
+ if use_bytes:
+ pathext = [os.fsencode(ext) for ext in pathext]
# See if the given file matches any of the expected path extensions.
# This will allow us to short circuit when given "python.exe".
# If it does match, only test that one, otherwise we have to try
diff --git a/contrib/tools/python3/src/Lib/signal.py b/contrib/tools/python3/src/Lib/signal.py
index 50b215b29d..fbb0b5b19e 100644
--- a/contrib/tools/python3/src/Lib/signal.py
+++ b/contrib/tools/python3/src/Lib/signal.py
@@ -4,19 +4,19 @@ from enum import IntEnum as _IntEnum
_globals = globals()
-_IntEnum._convert_(
+_IntEnum._convert_(
'Signals', __name__,
lambda name:
name.isupper()
and (name.startswith('SIG') and not name.startswith('SIG_'))
or name.startswith('CTRL_'))
-_IntEnum._convert_(
+_IntEnum._convert_(
'Handlers', __name__,
lambda name: name in ('SIG_DFL', 'SIG_IGN'))
if 'pthread_sigmask' in _globals:
- _IntEnum._convert_(
+ _IntEnum._convert_(
'Sigmasks', __name__,
lambda name: name in ('SIG_BLOCK', 'SIG_UNBLOCK', 'SIG_SETMASK'))
@@ -41,16 +41,16 @@ def _enum_to_int(value):
return value
-# Similar to functools.wraps(), but only assign __doc__.
-# __module__ should be preserved,
-# __name__ and __qualname__ are already fine,
-# __annotations__ is not set.
-def _wraps(wrapped):
- def decorator(wrapper):
- wrapper.__doc__ = wrapped.__doc__
- return wrapper
- return decorator
-
+# Similar to functools.wraps(), but only assign __doc__.
+# __module__ should be preserved,
+# __name__ and __qualname__ are already fine,
+# __annotations__ is not set.
+def _wraps(wrapped):
+ def decorator(wrapper):
+ wrapper.__doc__ = wrapped.__doc__
+ return wrapper
+ return decorator
+
@_wraps(_signal.signal)
def signal(signalnum, handler):
handler = _signal.signal(_enum_to_int(signalnum), _enum_to_int(handler))
@@ -73,7 +73,7 @@ if 'pthread_sigmask' in _globals:
if 'sigpending' in _globals:
@_wraps(_signal.sigpending)
def sigpending():
- return {_int_to_enum(x, Signals) for x in _signal.sigpending()}
+ return {_int_to_enum(x, Signals) for x in _signal.sigpending()}
if 'sigwait' in _globals:
@@ -82,11 +82,11 @@ if 'sigwait' in _globals:
retsig = _signal.sigwait(sigset)
return _int_to_enum(retsig, Signals)
-
-if 'valid_signals' in _globals:
- @_wraps(_signal.valid_signals)
- def valid_signals():
- return {_int_to_enum(x, Signals) for x in _signal.valid_signals()}
-
-
+
+if 'valid_signals' in _globals:
+ @_wraps(_signal.valid_signals)
+ def valid_signals():
+ return {_int_to_enum(x, Signals) for x in _signal.valid_signals()}
+
+
del _globals, _wraps
diff --git a/contrib/tools/python3/src/Lib/site.py b/contrib/tools/python3/src/Lib/site.py
index 19c10bc07f..0f702bdd91 100644
--- a/contrib/tools/python3/src/Lib/site.py
+++ b/contrib/tools/python3/src/Lib/site.py
@@ -73,7 +73,7 @@ import sys
import os
import builtins
import _sitebuiltins
-import io
+import io
# Prefixes for site-packages; add additional prefixes like /usr/local here
PREFIXES = [sys.prefix, sys.exec_prefix]
@@ -157,7 +157,7 @@ def addpackage(sitedir, name, known_paths):
reset = False
fullname = os.path.join(sitedir, name)
try:
- f = io.TextIOWrapper(io.open_code(fullname))
+ f = io.TextIOWrapper(io.open_code(fullname))
except OSError:
return
with f:
@@ -334,22 +334,22 @@ def getsitepackages(prefixes=None):
continue
seen.add(prefix)
- libdirs = [sys.platlibdir]
- if sys.platlibdir != "lib":
- libdirs.append("lib")
-
+ libdirs = [sys.platlibdir]
+ if sys.platlibdir != "lib":
+ libdirs.append("lib")
+
if os.sep == '/':
- for libdir in libdirs:
- path = os.path.join(prefix, libdir,
- "python%d.%d" % sys.version_info[:2],
- "site-packages")
- sitepackages.append(path)
+ for libdir in libdirs:
+ path = os.path.join(prefix, libdir,
+ "python%d.%d" % sys.version_info[:2],
+ "site-packages")
+ sitepackages.append(path)
else:
sitepackages.append(prefix)
-
- for libdir in libdirs:
- path = os.path.join(prefix, libdir, "site-packages")
- sitepackages.append(path)
+
+ for libdir in libdirs:
+ path = os.path.join(prefix, libdir, "site-packages")
+ sitepackages.append(path)
return sitepackages
def addsitepackages(known_paths, prefixes=None):
@@ -453,9 +453,9 @@ def enablerlcompleter():
def write_history():
try:
readline.write_history_file(history)
- except OSError:
- # bpo-19891, bpo-41193: Home directory does not exist
- # or is not writable, or the filesystem is read-only.
+ except OSError:
+ # bpo-19891, bpo-41193: Home directory does not exist
+ # or is not writable, or the filesystem is read-only.
pass
atexit.register(write_history)
@@ -599,7 +599,7 @@ def _script():
Exit codes with --user-base or --user-site:
0 - user site directory is enabled
1 - user site directory is disabled by user
- 2 - user site directory is disabled by super user
+ 2 - user site directory is disabled by super user
or for security reasons
>2 - unknown error
"""
diff --git a/contrib/tools/python3/src/Lib/smtpd.py b/contrib/tools/python3/src/Lib/smtpd.py
index 8f1a22e937..177eaeea1a 100644
--- a/contrib/tools/python3/src/Lib/smtpd.py
+++ b/contrib/tools/python3/src/Lib/smtpd.py
@@ -779,8 +779,8 @@ class PureProxy(SMTPServer):
class MailmanProxy(PureProxy):
def __init__(self, *args, **kwargs):
- warn('MailmanProxy is deprecated and will be removed '
- 'in future', DeprecationWarning, 2)
+ warn('MailmanProxy is deprecated and will be removed '
+ 'in future', DeprecationWarning, 2)
if 'enable_SMTPUTF8' in kwargs and kwargs['enable_SMTPUTF8']:
raise ValueError("MailmanProxy does not support SMTPUTF8.")
super(PureProxy, self).__init__(*args, **kwargs)
diff --git a/contrib/tools/python3/src/Lib/smtplib.py b/contrib/tools/python3/src/Lib/smtplib.py
index b1fd45a003..eec00e7d76 100644
--- a/contrib/tools/python3/src/Lib/smtplib.py
+++ b/contrib/tools/python3/src/Lib/smtplib.py
@@ -54,7 +54,7 @@ import datetime
import sys
from email.base64mime import body_encode as encode_base64
-__all__ = ["SMTPException", "SMTPNotSupportedError", "SMTPServerDisconnected", "SMTPResponseException",
+__all__ = ["SMTPException", "SMTPNotSupportedError", "SMTPServerDisconnected", "SMTPResponseException",
"SMTPSenderRefused", "SMTPRecipientsRefused", "SMTPDataError",
"SMTPConnectError", "SMTPHeloError", "SMTPAuthenticationError",
"quoteaddr", "quotedata", "SMTP"]
@@ -64,7 +64,7 @@ SMTP_SSL_PORT = 465
CRLF = "\r\n"
bCRLF = b"\r\n"
_MAXLINE = 8192 # more than 8 times larger than RFC 821, 4.5.3
-_MAXCHALLENGE = 5 # Maximum number of AUTH challenges sent
+_MAXCHALLENGE = 5 # Maximum number of AUTH challenges sent
OLDSTYLE_AUTH = re.compile(r"auth=(.*)", re.I)
@@ -217,8 +217,8 @@ class SMTP:
method called 'sendmail' that will do an entire mail transaction.
"""
debuglevel = 0
-
- sock = None
+
+ sock = None
file = None
helo_resp = None
ehlo_msg = "ehlo"
@@ -231,8 +231,8 @@ class SMTP:
source_address=None):
"""Initialize a new instance.
- If specified, `host` is the name of the remote host to which to
- connect. If specified, `port` specifies the port to which to connect.
+ If specified, `host` is the name of the remote host to which to
+ connect. If specified, `port` specifies the port to which to connect.
By default, smtplib.SMTP_PORT is used. If a host is specified the
connect method is called, and if it returns anything other than a
success code an SMTPConnectError is raised. If specified,
@@ -249,7 +249,7 @@ class SMTP:
self.esmtp_features = {}
self.command_encoding = 'ascii'
self.source_address = source_address
- self._auth_challenge_count = 0
+ self._auth_challenge_count = 0
if host:
(code, msg) = self.connect(host, port)
@@ -305,8 +305,8 @@ class SMTP:
def _get_socket(self, host, port, timeout):
# This makes it simpler for SMTP_SSL to use the SMTP connect code
# and just alter the socket connection bit.
- if timeout is not None and not timeout:
- raise ValueError('Non-blocking socket (timeout=0) is not supported')
+ if timeout is not None and not timeout:
+ raise ValueError('Non-blocking socket (timeout=0) is not supported')
if self.debuglevel > 0:
self._print_debug('connect: to', (host, port), self.source_address)
return socket.create_connection((host, port), timeout,
@@ -337,7 +337,7 @@ class SMTP:
raise OSError("nonnumeric port")
if not port:
port = self.default_port
- sys.audit("smtplib.connect", self, host, port)
+ sys.audit("smtplib.connect", self, host, port)
self.sock = self._get_socket(host, port, self.timeout)
self.file = None
(code, msg) = self.getreply()
@@ -349,13 +349,13 @@ class SMTP:
"""Send `s' to the server."""
if self.debuglevel > 0:
self._print_debug('send:', repr(s))
- if self.sock:
+ if self.sock:
if isinstance(s, str):
# send is used by the 'data' command, where command_encoding
# should not be used, but 'data' needs to convert the string to
# binary itself anyway, so that's not a problem.
s = s.encode(self.command_encoding)
- sys.audit("smtplib.send", self, s)
+ sys.audit("smtplib.send", self, s)
try:
self.sock.sendall(s)
except OSError:
@@ -367,15 +367,15 @@ class SMTP:
def putcmd(self, cmd, args=""):
"""Send a command to the server."""
if args == "":
- s = cmd
+ s = cmd
else:
- s = f'{cmd} {args}'
- if '\r' in s or '\n' in s:
- s = s.replace('\n', '\\n').replace('\r', '\\r')
- raise ValueError(
- f'command and arguments contain prohibited newline characters: {s}'
- )
- self.send(f'{s}{CRLF}')
+ s = f'{cmd} {args}'
+ if '\r' in s or '\n' in s:
+ s = s.replace('\n', '\\n').replace('\r', '\\r')
+ raise ValueError(
+ f'command and arguments contain prohibited newline characters: {s}'
+ )
+ self.send(f'{s}{CRLF}')
def getreply(self):
"""Get a reply from the server.
@@ -640,23 +640,23 @@ class SMTP:
if initial_response is not None:
response = encode_base64(initial_response.encode('ascii'), eol='')
(code, resp) = self.docmd("AUTH", mechanism + " " + response)
- self._auth_challenge_count = 1
+ self._auth_challenge_count = 1
else:
(code, resp) = self.docmd("AUTH", mechanism)
- self._auth_challenge_count = 0
+ self._auth_challenge_count = 0
# If server responds with a challenge, send the response.
- while code == 334:
- self._auth_challenge_count += 1
+ while code == 334:
+ self._auth_challenge_count += 1
challenge = base64.decodebytes(resp)
response = encode_base64(
authobject(challenge).encode('ascii'), eol='')
(code, resp) = self.docmd(response)
- # If server keeps sending challenges, something is wrong.
- if self._auth_challenge_count > _MAXCHALLENGE:
- raise SMTPException(
- "Server AUTH mechanism infinite loop. Last response: "
- + repr((code, resp))
- )
+ # If server keeps sending challenges, something is wrong.
+ if self._auth_challenge_count > _MAXCHALLENGE:
+ raise SMTPException(
+ "Server AUTH mechanism infinite loop. Last response: "
+ + repr((code, resp))
+ )
if code in (235, 503):
return (code, resp)
raise SMTPAuthenticationError(code, resp)
@@ -678,7 +678,7 @@ class SMTP:
def auth_login(self, challenge=None):
""" Authobject to use with LOGIN authentication. Requires self.user and
self.password to be set."""
- if challenge is None or self._auth_challenge_count < 2:
+ if challenge is None or self._auth_challenge_count < 2:
return self.user
else:
return self.password
@@ -1048,12 +1048,12 @@ if _have_ssl:
keyfile=keyfile)
self.context = context
SMTP.__init__(self, host, port, local_hostname, timeout,
- source_address)
+ source_address)
def _get_socket(self, host, port, timeout):
if self.debuglevel > 0:
self._print_debug('connect:', (host, port))
- new_socket = super()._get_socket(host, port, timeout)
+ new_socket = super()._get_socket(host, port, timeout)
new_socket = self.context.wrap_socket(new_socket,
server_hostname=self._host)
return new_socket
@@ -1082,24 +1082,24 @@ class LMTP(SMTP):
ehlo_msg = "lhlo"
def __init__(self, host='', port=LMTP_PORT, local_hostname=None,
- source_address=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
+ source_address=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
"""Initialize a new instance."""
- super().__init__(host, port, local_hostname=local_hostname,
- source_address=source_address, timeout=timeout)
+ super().__init__(host, port, local_hostname=local_hostname,
+ source_address=source_address, timeout=timeout)
def connect(self, host='localhost', port=0, source_address=None):
"""Connect to the LMTP daemon, on either a Unix or a TCP socket."""
if host[0] != '/':
- return super().connect(host, port, source_address=source_address)
-
- if self.timeout is not None and not self.timeout:
- raise ValueError('Non-blocking socket (timeout=0) is not supported')
+ return super().connect(host, port, source_address=source_address)
+ if self.timeout is not None and not self.timeout:
+ raise ValueError('Non-blocking socket (timeout=0) is not supported')
+
# Handle Unix-domain sockets.
try:
self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
- if self.timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:
- self.sock.settimeout(self.timeout)
+ if self.timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:
+ self.sock.settimeout(self.timeout)
self.file = None
self.sock.connect(host)
except OSError:
diff --git a/contrib/tools/python3/src/Lib/sndhdr.py b/contrib/tools/python3/src/Lib/sndhdr.py
index 96595c6974..a28513adfd 100644
--- a/contrib/tools/python3/src/Lib/sndhdr.py
+++ b/contrib/tools/python3/src/Lib/sndhdr.py
@@ -241,7 +241,7 @@ def testall(list, recursive, toplevel):
if recursive or toplevel:
print('recursing down:')
import glob
- names = glob.glob(os.path.join(glob.escape(filename), '*'))
+ names = glob.glob(os.path.join(glob.escape(filename), '*'))
testall(names, recursive, 0)
else:
print('*** directory (use -r) ***')
diff --git a/contrib/tools/python3/src/Lib/socket.py b/contrib/tools/python3/src/Lib/socket.py
index 46fc49ca32..46575f9962 100644
--- a/contrib/tools/python3/src/Lib/socket.py
+++ b/contrib/tools/python3/src/Lib/socket.py
@@ -12,8 +12,8 @@ Functions:
socket() -- create a new socket object
socketpair() -- create a pair of new socket objects [*]
fromfd() -- create a socket object from an open file descriptor [*]
-send_fds() -- Send file descriptor to the socket.
-recv_fds() -- Recieve file descriptors from the socket.
+send_fds() -- Send file descriptor to the socket.
+recv_fds() -- Recieve file descriptors from the socket.
fromshare() -- create a socket object from data received from socket.share() [*]
gethostname() -- return the current hostname
gethostbyname() -- map a hostname to its IP number
@@ -62,8 +62,8 @@ EBADF = getattr(errno, 'EBADF', 9)
EAGAIN = getattr(errno, 'EAGAIN', 11)
EWOULDBLOCK = getattr(errno, 'EWOULDBLOCK', 11)
-__all__ = ["fromfd", "getfqdn", "create_connection", "create_server",
- "has_dualstack_ipv6", "AddressFamily", "SocketKind"]
+__all__ = ["fromfd", "getfqdn", "create_connection", "create_server",
+ "has_dualstack_ipv6", "AddressFamily", "SocketKind"]
__all__.extend(os._get_exports_list(_socket))
# Set up the socket.AF_* socket.SOCK_* constants as members of IntEnums for
@@ -72,22 +72,22 @@ __all__.extend(os._get_exports_list(_socket))
# in this module understands the enums and translates them back from integers
# where needed (e.g. .family property of a socket object).
-IntEnum._convert_(
+IntEnum._convert_(
'AddressFamily',
__name__,
lambda C: C.isupper() and C.startswith('AF_'))
-IntEnum._convert_(
+IntEnum._convert_(
'SocketKind',
__name__,
lambda C: C.isupper() and C.startswith('SOCK_'))
-IntFlag._convert_(
+IntFlag._convert_(
'MsgFlag',
__name__,
lambda C: C.isupper() and C.startswith('MSG_'))
-IntFlag._convert_(
+IntFlag._convert_(
'AddressInfo',
__name__,
lambda C: C.isupper() and C.startswith('AI_'))
@@ -110,101 +110,101 @@ def _intenum_converter(value, enum_klass):
# WSA error codes
if sys.platform.lower().startswith("win"):
errorTab = {}
- errorTab[6] = "Specified event object handle is invalid."
- errorTab[8] = "Insufficient memory available."
- errorTab[87] = "One or more parameters are invalid."
- errorTab[995] = "Overlapped operation aborted."
- errorTab[996] = "Overlapped I/O event object not in signaled state."
- errorTab[997] = "Overlapped operation will complete later."
+ errorTab[6] = "Specified event object handle is invalid."
+ errorTab[8] = "Insufficient memory available."
+ errorTab[87] = "One or more parameters are invalid."
+ errorTab[995] = "Overlapped operation aborted."
+ errorTab[996] = "Overlapped I/O event object not in signaled state."
+ errorTab[997] = "Overlapped operation will complete later."
errorTab[10004] = "The operation was interrupted."
errorTab[10009] = "A bad file handle was passed."
errorTab[10013] = "Permission denied."
- errorTab[10014] = "A fault occurred on the network??" # WSAEFAULT
+ errorTab[10014] = "A fault occurred on the network??" # WSAEFAULT
errorTab[10022] = "An invalid operation was attempted."
- errorTab[10024] = "Too many open files."
+ errorTab[10024] = "Too many open files."
errorTab[10035] = "The socket operation would block"
errorTab[10036] = "A blocking operation is already in progress."
- errorTab[10037] = "Operation already in progress."
- errorTab[10038] = "Socket operation on nonsocket."
- errorTab[10039] = "Destination address required."
- errorTab[10040] = "Message too long."
- errorTab[10041] = "Protocol wrong type for socket."
- errorTab[10042] = "Bad protocol option."
- errorTab[10043] = "Protocol not supported."
- errorTab[10044] = "Socket type not supported."
- errorTab[10045] = "Operation not supported."
- errorTab[10046] = "Protocol family not supported."
- errorTab[10047] = "Address family not supported by protocol family."
+ errorTab[10037] = "Operation already in progress."
+ errorTab[10038] = "Socket operation on nonsocket."
+ errorTab[10039] = "Destination address required."
+ errorTab[10040] = "Message too long."
+ errorTab[10041] = "Protocol wrong type for socket."
+ errorTab[10042] = "Bad protocol option."
+ errorTab[10043] = "Protocol not supported."
+ errorTab[10044] = "Socket type not supported."
+ errorTab[10045] = "Operation not supported."
+ errorTab[10046] = "Protocol family not supported."
+ errorTab[10047] = "Address family not supported by protocol family."
errorTab[10048] = "The network address is in use."
- errorTab[10049] = "Cannot assign requested address."
- errorTab[10050] = "Network is down."
- errorTab[10051] = "Network is unreachable."
- errorTab[10052] = "Network dropped connection on reset."
- errorTab[10053] = "Software caused connection abort."
+ errorTab[10049] = "Cannot assign requested address."
+ errorTab[10050] = "Network is down."
+ errorTab[10051] = "Network is unreachable."
+ errorTab[10052] = "Network dropped connection on reset."
+ errorTab[10053] = "Software caused connection abort."
errorTab[10054] = "The connection has been reset."
- errorTab[10055] = "No buffer space available."
- errorTab[10056] = "Socket is already connected."
- errorTab[10057] = "Socket is not connected."
+ errorTab[10055] = "No buffer space available."
+ errorTab[10056] = "Socket is already connected."
+ errorTab[10057] = "Socket is not connected."
errorTab[10058] = "The network has been shut down."
- errorTab[10059] = "Too many references."
+ errorTab[10059] = "Too many references."
errorTab[10060] = "The operation timed out."
errorTab[10061] = "Connection refused."
- errorTab[10062] = "Cannot translate name."
+ errorTab[10062] = "Cannot translate name."
errorTab[10063] = "The name is too long."
errorTab[10064] = "The host is down."
errorTab[10065] = "The host is unreachable."
- errorTab[10066] = "Directory not empty."
- errorTab[10067] = "Too many processes."
- errorTab[10068] = "User quota exceeded."
- errorTab[10069] = "Disk quota exceeded."
- errorTab[10070] = "Stale file handle reference."
- errorTab[10071] = "Item is remote."
- errorTab[10091] = "Network subsystem is unavailable."
- errorTab[10092] = "Winsock.dll version out of range."
- errorTab[10093] = "Successful WSAStartup not yet performed."
- errorTab[10101] = "Graceful shutdown in progress."
- errorTab[10102] = "No more results from WSALookupServiceNext."
- errorTab[10103] = "Call has been canceled."
- errorTab[10104] = "Procedure call table is invalid."
- errorTab[10105] = "Service provider is invalid."
- errorTab[10106] = "Service provider failed to initialize."
- errorTab[10107] = "System call failure."
- errorTab[10108] = "Service not found."
- errorTab[10109] = "Class type not found."
- errorTab[10110] = "No more results from WSALookupServiceNext."
- errorTab[10111] = "Call was canceled."
- errorTab[10112] = "Database query was refused."
- errorTab[11001] = "Host not found."
- errorTab[11002] = "Nonauthoritative host not found."
- errorTab[11003] = "This is a nonrecoverable error."
- errorTab[11004] = "Valid name, no data record requested type."
- errorTab[11005] = "QoS receivers."
- errorTab[11006] = "QoS senders."
- errorTab[11007] = "No QoS senders."
- errorTab[11008] = "QoS no receivers."
- errorTab[11009] = "QoS request confirmed."
- errorTab[11010] = "QoS admission error."
- errorTab[11011] = "QoS policy failure."
- errorTab[11012] = "QoS bad style."
- errorTab[11013] = "QoS bad object."
- errorTab[11014] = "QoS traffic control error."
- errorTab[11015] = "QoS generic error."
- errorTab[11016] = "QoS service type error."
- errorTab[11017] = "QoS flowspec error."
- errorTab[11018] = "Invalid QoS provider buffer."
- errorTab[11019] = "Invalid QoS filter style."
- errorTab[11020] = "Invalid QoS filter style."
- errorTab[11021] = "Incorrect QoS filter count."
- errorTab[11022] = "Invalid QoS object length."
- errorTab[11023] = "Incorrect QoS flow count."
- errorTab[11024] = "Unrecognized QoS object."
- errorTab[11025] = "Invalid QoS policy object."
- errorTab[11026] = "Invalid QoS flow descriptor."
- errorTab[11027] = "Invalid QoS provider-specific flowspec."
- errorTab[11028] = "Invalid QoS provider-specific filterspec."
- errorTab[11029] = "Invalid QoS shape discard mode object."
- errorTab[11030] = "Invalid QoS shaping rate object."
- errorTab[11031] = "Reserved policy QoS element type."
+ errorTab[10066] = "Directory not empty."
+ errorTab[10067] = "Too many processes."
+ errorTab[10068] = "User quota exceeded."
+ errorTab[10069] = "Disk quota exceeded."
+ errorTab[10070] = "Stale file handle reference."
+ errorTab[10071] = "Item is remote."
+ errorTab[10091] = "Network subsystem is unavailable."
+ errorTab[10092] = "Winsock.dll version out of range."
+ errorTab[10093] = "Successful WSAStartup not yet performed."
+ errorTab[10101] = "Graceful shutdown in progress."
+ errorTab[10102] = "No more results from WSALookupServiceNext."
+ errorTab[10103] = "Call has been canceled."
+ errorTab[10104] = "Procedure call table is invalid."
+ errorTab[10105] = "Service provider is invalid."
+ errorTab[10106] = "Service provider failed to initialize."
+ errorTab[10107] = "System call failure."
+ errorTab[10108] = "Service not found."
+ errorTab[10109] = "Class type not found."
+ errorTab[10110] = "No more results from WSALookupServiceNext."
+ errorTab[10111] = "Call was canceled."
+ errorTab[10112] = "Database query was refused."
+ errorTab[11001] = "Host not found."
+ errorTab[11002] = "Nonauthoritative host not found."
+ errorTab[11003] = "This is a nonrecoverable error."
+ errorTab[11004] = "Valid name, no data record requested type."
+ errorTab[11005] = "QoS receivers."
+ errorTab[11006] = "QoS senders."
+ errorTab[11007] = "No QoS senders."
+ errorTab[11008] = "QoS no receivers."
+ errorTab[11009] = "QoS request confirmed."
+ errorTab[11010] = "QoS admission error."
+ errorTab[11011] = "QoS policy failure."
+ errorTab[11012] = "QoS bad style."
+ errorTab[11013] = "QoS bad object."
+ errorTab[11014] = "QoS traffic control error."
+ errorTab[11015] = "QoS generic error."
+ errorTab[11016] = "QoS service type error."
+ errorTab[11017] = "QoS flowspec error."
+ errorTab[11018] = "Invalid QoS provider buffer."
+ errorTab[11019] = "Invalid QoS filter style."
+ errorTab[11020] = "Invalid QoS filter style."
+ errorTab[11021] = "Incorrect QoS filter count."
+ errorTab[11022] = "Invalid QoS object length."
+ errorTab[11023] = "Incorrect QoS flow count."
+ errorTab[11024] = "Unrecognized QoS object."
+ errorTab[11025] = "Invalid QoS policy object."
+ errorTab[11026] = "Invalid QoS flow descriptor."
+ errorTab[11027] = "Invalid QoS provider-specific flowspec."
+ errorTab[11028] = "Invalid QoS provider-specific filterspec."
+ errorTab[11029] = "Invalid QoS shape discard mode object."
+ errorTab[11030] = "Invalid QoS shaping rate object."
+ errorTab[11031] = "Reserved policy QoS element type."
__all__.append("errorTab")
@@ -270,7 +270,7 @@ class socket(_socket.socket):
return s
def __getstate__(self):
- raise TypeError(f"cannot pickle {self.__class__.__name__!r} object")
+ raise TypeError(f"cannot pickle {self.__class__.__name__!r} object")
def dup(self):
"""dup() -> socket object
@@ -356,8 +356,8 @@ class socket(_socket.socket):
raise _GiveupOnSendfile(err) # not a regular file
if not fsize:
return 0 # empty file
- # Truncate to 1GiB to avoid OverflowError, see bpo-38319.
- blocksize = min(count or fsize, 2 ** 30)
+ # Truncate to 1GiB to avoid OverflowError, see bpo-38319.
+ blocksize = min(count or fsize, 2 ** 30)
timeout = self.gettimeout()
if timeout == 0:
raise ValueError("non-blocking sockets are not supported")
@@ -544,40 +544,40 @@ def fromfd(fd, family, type, proto=0):
nfd = dup(fd)
return socket(family, type, proto, nfd)
-if hasattr(_socket.socket, "sendmsg"):
- import array
-
- def send_fds(sock, buffers, fds, flags=0, address=None):
- """ send_fds(sock, buffers, fds[, flags[, address]]) -> integer
-
- Send the list of file descriptors fds over an AF_UNIX socket.
- """
- return sock.sendmsg(buffers, [(_socket.SOL_SOCKET,
- _socket.SCM_RIGHTS, array.array("i", fds))])
- __all__.append("send_fds")
-
-if hasattr(_socket.socket, "recvmsg"):
- import array
-
- def recv_fds(sock, bufsize, maxfds, flags=0):
- """ recv_fds(sock, bufsize, maxfds[, flags]) -> (data, list of file
- descriptors, msg_flags, address)
-
- Receive up to maxfds file descriptors returning the message
- data and a list containing the descriptors.
- """
- # Array of ints
- fds = array.array("i")
- msg, ancdata, flags, addr = sock.recvmsg(bufsize,
- _socket.CMSG_LEN(maxfds * fds.itemsize))
- for cmsg_level, cmsg_type, cmsg_data in ancdata:
- if (cmsg_level == _socket.SOL_SOCKET and cmsg_type == _socket.SCM_RIGHTS):
- fds.frombytes(cmsg_data[:
- len(cmsg_data) - (len(cmsg_data) % fds.itemsize)])
-
- return msg, list(fds), flags, addr
- __all__.append("recv_fds")
-
+if hasattr(_socket.socket, "sendmsg"):
+ import array
+
+ def send_fds(sock, buffers, fds, flags=0, address=None):
+ """ send_fds(sock, buffers, fds[, flags[, address]]) -> integer
+
+ Send the list of file descriptors fds over an AF_UNIX socket.
+ """
+ return sock.sendmsg(buffers, [(_socket.SOL_SOCKET,
+ _socket.SCM_RIGHTS, array.array("i", fds))])
+ __all__.append("send_fds")
+
+if hasattr(_socket.socket, "recvmsg"):
+ import array
+
+ def recv_fds(sock, bufsize, maxfds, flags=0):
+ """ recv_fds(sock, bufsize, maxfds[, flags]) -> (data, list of file
+ descriptors, msg_flags, address)
+
+ Receive up to maxfds file descriptors returning the message
+ data and a list containing the descriptors.
+ """
+ # Array of ints
+ fds = array.array("i")
+ msg, ancdata, flags, addr = sock.recvmsg(bufsize,
+ _socket.CMSG_LEN(maxfds * fds.itemsize))
+ for cmsg_level, cmsg_type, cmsg_data in ancdata:
+ if (cmsg_level == _socket.SOL_SOCKET and cmsg_type == _socket.SCM_RIGHTS):
+ fds.frombytes(cmsg_data[:
+ len(cmsg_data) - (len(cmsg_data) % fds.itemsize)])
+
+ return msg, list(fds), flags, addr
+ __all__.append("recv_fds")
+
if hasattr(_socket.socket, "share"):
def fromshare(info):
""" fromshare(info) -> socket object
@@ -781,9 +781,9 @@ def getfqdn(name=''):
An empty argument is interpreted as meaning the local host.
First the hostname returned by gethostbyaddr() is checked, then
- possibly existing aliases. In case no FQDN is available and `name`
- was given, it is returned unchanged. If `name` was empty or '0.0.0.0',
- hostname from gethostname() is returned.
+ possibly existing aliases. In case no FQDN is available and `name`
+ was given, it is returned unchanged. If `name` was empty or '0.0.0.0',
+ hostname from gethostname() is returned.
"""
name = name.strip()
if not name or name == '0.0.0.0':
@@ -840,100 +840,100 @@ def create_connection(address, timeout=_GLOBAL_DEFAULT_TIMEOUT,
sock.close()
if err is not None:
- try:
- raise err
- finally:
- # Break explicitly a reference cycle
- err = None
+ try:
+ raise err
+ finally:
+ # Break explicitly a reference cycle
+ err = None
else:
raise error("getaddrinfo returns an empty list")
-
-def has_dualstack_ipv6():
- """Return True if the platform supports creating a SOCK_STREAM socket
- which can handle both AF_INET and AF_INET6 (IPv4 / IPv6) connections.
- """
- if not has_ipv6 \
- or not hasattr(_socket, 'IPPROTO_IPV6') \
- or not hasattr(_socket, 'IPV6_V6ONLY'):
- return False
- try:
- with socket(AF_INET6, SOCK_STREAM) as sock:
- sock.setsockopt(IPPROTO_IPV6, IPV6_V6ONLY, 0)
- return True
- except error:
- return False
-
-
-def create_server(address, *, family=AF_INET, backlog=None, reuse_port=False,
- dualstack_ipv6=False):
- """Convenience function which creates a SOCK_STREAM type socket
- bound to *address* (a 2-tuple (host, port)) and return the socket
- object.
-
- *family* should be either AF_INET or AF_INET6.
- *backlog* is the queue size passed to socket.listen().
- *reuse_port* dictates whether to use the SO_REUSEPORT socket option.
- *dualstack_ipv6*: if true and the platform supports it, it will
- create an AF_INET6 socket able to accept both IPv4 or IPv6
- connections. When false it will explicitly disable this option on
- platforms that enable it by default (e.g. Linux).
-
- >>> with create_server(('', 8000)) as server:
- ... while True:
- ... conn, addr = server.accept()
- ... # handle new connection
- """
- if reuse_port and not hasattr(_socket, "SO_REUSEPORT"):
- raise ValueError("SO_REUSEPORT not supported on this platform")
- if dualstack_ipv6:
- if not has_dualstack_ipv6():
- raise ValueError("dualstack_ipv6 not supported on this platform")
- if family != AF_INET6:
- raise ValueError("dualstack_ipv6 requires AF_INET6 family")
- sock = socket(family, SOCK_STREAM)
- try:
- # Note about Windows. We don't set SO_REUSEADDR because:
- # 1) It's unnecessary: bind() will succeed even in case of a
- # previous closed socket on the same address and still in
- # TIME_WAIT state.
- # 2) If set, another socket is free to bind() on the same
- # address, effectively preventing this one from accepting
- # connections. Also, it may set the process in a state where
- # it'll no longer respond to any signals or graceful kills.
- # See: msdn2.microsoft.com/en-us/library/ms740621(VS.85).aspx
- if os.name not in ('nt', 'cygwin') and \
- hasattr(_socket, 'SO_REUSEADDR'):
- try:
- sock.setsockopt(SOL_SOCKET, SO_REUSEADDR, 1)
- except error:
- # Fail later on bind(), for platforms which may not
- # support this option.
- pass
- if reuse_port:
- sock.setsockopt(SOL_SOCKET, SO_REUSEPORT, 1)
- if has_ipv6 and family == AF_INET6:
- if dualstack_ipv6:
- sock.setsockopt(IPPROTO_IPV6, IPV6_V6ONLY, 0)
- elif hasattr(_socket, "IPV6_V6ONLY") and \
- hasattr(_socket, "IPPROTO_IPV6"):
- sock.setsockopt(IPPROTO_IPV6, IPV6_V6ONLY, 1)
- try:
- sock.bind(address)
- except error as err:
- msg = '%s (while attempting to bind on address %r)' % \
- (err.strerror, address)
- raise error(err.errno, msg) from None
- if backlog is None:
- sock.listen()
- else:
- sock.listen(backlog)
- return sock
- except error:
- sock.close()
- raise
-
-
+
+def has_dualstack_ipv6():
+ """Return True if the platform supports creating a SOCK_STREAM socket
+ which can handle both AF_INET and AF_INET6 (IPv4 / IPv6) connections.
+ """
+ if not has_ipv6 \
+ or not hasattr(_socket, 'IPPROTO_IPV6') \
+ or not hasattr(_socket, 'IPV6_V6ONLY'):
+ return False
+ try:
+ with socket(AF_INET6, SOCK_STREAM) as sock:
+ sock.setsockopt(IPPROTO_IPV6, IPV6_V6ONLY, 0)
+ return True
+ except error:
+ return False
+
+
+def create_server(address, *, family=AF_INET, backlog=None, reuse_port=False,
+ dualstack_ipv6=False):
+ """Convenience function which creates a SOCK_STREAM type socket
+ bound to *address* (a 2-tuple (host, port)) and return the socket
+ object.
+
+ *family* should be either AF_INET or AF_INET6.
+ *backlog* is the queue size passed to socket.listen().
+ *reuse_port* dictates whether to use the SO_REUSEPORT socket option.
+ *dualstack_ipv6*: if true and the platform supports it, it will
+ create an AF_INET6 socket able to accept both IPv4 or IPv6
+ connections. When false it will explicitly disable this option on
+ platforms that enable it by default (e.g. Linux).
+
+ >>> with create_server(('', 8000)) as server:
+ ... while True:
+ ... conn, addr = server.accept()
+ ... # handle new connection
+ """
+ if reuse_port and not hasattr(_socket, "SO_REUSEPORT"):
+ raise ValueError("SO_REUSEPORT not supported on this platform")
+ if dualstack_ipv6:
+ if not has_dualstack_ipv6():
+ raise ValueError("dualstack_ipv6 not supported on this platform")
+ if family != AF_INET6:
+ raise ValueError("dualstack_ipv6 requires AF_INET6 family")
+ sock = socket(family, SOCK_STREAM)
+ try:
+ # Note about Windows. We don't set SO_REUSEADDR because:
+ # 1) It's unnecessary: bind() will succeed even in case of a
+ # previous closed socket on the same address and still in
+ # TIME_WAIT state.
+ # 2) If set, another socket is free to bind() on the same
+ # address, effectively preventing this one from accepting
+ # connections. Also, it may set the process in a state where
+ # it'll no longer respond to any signals or graceful kills.
+ # See: msdn2.microsoft.com/en-us/library/ms740621(VS.85).aspx
+ if os.name not in ('nt', 'cygwin') and \
+ hasattr(_socket, 'SO_REUSEADDR'):
+ try:
+ sock.setsockopt(SOL_SOCKET, SO_REUSEADDR, 1)
+ except error:
+ # Fail later on bind(), for platforms which may not
+ # support this option.
+ pass
+ if reuse_port:
+ sock.setsockopt(SOL_SOCKET, SO_REUSEPORT, 1)
+ if has_ipv6 and family == AF_INET6:
+ if dualstack_ipv6:
+ sock.setsockopt(IPPROTO_IPV6, IPV6_V6ONLY, 0)
+ elif hasattr(_socket, "IPV6_V6ONLY") and \
+ hasattr(_socket, "IPPROTO_IPV6"):
+ sock.setsockopt(IPPROTO_IPV6, IPV6_V6ONLY, 1)
+ try:
+ sock.bind(address)
+ except error as err:
+ msg = '%s (while attempting to bind on address %r)' % \
+ (err.strerror, address)
+ raise error(err.errno, msg) from None
+ if backlog is None:
+ sock.listen()
+ else:
+ sock.listen(backlog)
+ return sock
+ except error:
+ sock.close()
+ raise
+
+
def getaddrinfo(host, port, family=0, type=0, proto=0, flags=0):
"""Resolve host and port into list of address info entries.
diff --git a/contrib/tools/python3/src/Lib/socketserver.py b/contrib/tools/python3/src/Lib/socketserver.py
index 0d9583d56a..6e6df48003 100644
--- a/contrib/tools/python3/src/Lib/socketserver.py
+++ b/contrib/tools/python3/src/Lib/socketserver.py
@@ -24,7 +24,7 @@ For request-based servers (including socket-based):
The classes in this module favor the server type that is simplest to
write: a synchronous TCP/IP server. This is bad class design, but
-saves some typing. (There's also the issue that a deep class hierarchy
+saves some typing. (There's also the issue that a deep class hierarchy
slows down method lookups.)
There are five classes in an inheritance diagram, four of which represent
@@ -374,7 +374,7 @@ class BaseServer:
"""
print('-'*40, file=sys.stderr)
- print('Exception occurred during processing of request from',
+ print('Exception occurred during processing of request from',
client_address, file=sys.stderr)
import traceback
traceback.print_exc()
@@ -628,39 +628,39 @@ if hasattr(os, "fork"):
self.collect_children(blocking=self.block_on_close)
-class _Threads(list):
- """
- Joinable list of all non-daemon threads.
- """
- def append(self, thread):
- self.reap()
- if thread.daemon:
- return
- super().append(thread)
-
- def pop_all(self):
- self[:], result = [], self[:]
- return result
-
- def join(self):
- for thread in self.pop_all():
- thread.join()
-
- def reap(self):
- self[:] = (thread for thread in self if thread.is_alive())
-
-
-class _NoThreads:
- """
- Degenerate version of _Threads.
- """
- def append(self, thread):
- pass
-
- def join(self):
- pass
-
-
+class _Threads(list):
+ """
+ Joinable list of all non-daemon threads.
+ """
+ def append(self, thread):
+ self.reap()
+ if thread.daemon:
+ return
+ super().append(thread)
+
+ def pop_all(self):
+ self[:], result = [], self[:]
+ return result
+
+ def join(self):
+ for thread in self.pop_all():
+ thread.join()
+
+ def reap(self):
+ self[:] = (thread for thread in self if thread.is_alive())
+
+
+class _NoThreads:
+ """
+ Degenerate version of _Threads.
+ """
+ def append(self, thread):
+ pass
+
+ def join(self):
+ pass
+
+
class ThreadingMixIn:
"""Mix-in class to handle each request in a new thread."""
@@ -669,9 +669,9 @@ class ThreadingMixIn:
daemon_threads = False
# If true, server_close() waits until all non-daemonic threads terminate.
block_on_close = True
- # Threads object
+ # Threads object
# used by server_close() to wait for all threads completion.
- _threads = _NoThreads()
+ _threads = _NoThreads()
def process_request_thread(self, request, client_address):
"""Same as in BaseServer but as a thread.
@@ -688,17 +688,17 @@ class ThreadingMixIn:
def process_request(self, request, client_address):
"""Start a new thread to process the request."""
- if self.block_on_close:
- vars(self).setdefault('_threads', _Threads())
+ if self.block_on_close:
+ vars(self).setdefault('_threads', _Threads())
t = threading.Thread(target = self.process_request_thread,
args = (request, client_address))
t.daemon = self.daemon_threads
- self._threads.append(t)
+ self._threads.append(t)
t.start()
def server_close(self):
super().server_close()
- self._threads.join()
+ self._threads.join()
if hasattr(os, "fork"):
diff --git a/contrib/tools/python3/src/Lib/sqlite3/__init__.py b/contrib/tools/python3/src/Lib/sqlite3/__init__.py
index 1e717450c2..e9b3dfb944 100644
--- a/contrib/tools/python3/src/Lib/sqlite3/__init__.py
+++ b/contrib/tools/python3/src/Lib/sqlite3/__init__.py
@@ -20,38 +20,38 @@
# misrepresented as being the original software.
# 3. This notice may not be removed or altered from any source distribution.
-"""
-The sqlite3 extension module provides a DB-API 2.0 (PEP 249) compilant
-interface to the SQLite library, and requires SQLite 3.7.15 or newer.
-
-To use the module, start by creating a database Connection object:
-
- import sqlite3
- cx = sqlite3.connect("test.db") # test.db will be created or opened
-
-The special path name ":memory:" can be provided to connect to a transient
-in-memory database:
-
- cx = sqlite3.connect(":memory:") # connect to a database in RAM
-
-Once a connection has been established, create a Cursor object and call
-its execute() method to perform SQL queries:
-
- cu = cx.cursor()
-
- # create a table
- cu.execute("create table lang(name, first_appeared)")
-
- # insert values into a table
- cu.execute("insert into lang values (?, ?)", ("C", 1972))
-
- # execute a query and iterate over the result
- for row in cu.execute("select * from lang"):
- print(row)
-
- cx.close()
-
-The sqlite3 module is written by Gerhard Häring <gh@ghaering.de>.
-"""
-
+"""
+The sqlite3 extension module provides a DB-API 2.0 (PEP 249) compilant
+interface to the SQLite library, and requires SQLite 3.7.15 or newer.
+
+To use the module, start by creating a database Connection object:
+
+ import sqlite3
+ cx = sqlite3.connect("test.db") # test.db will be created or opened
+
+The special path name ":memory:" can be provided to connect to a transient
+in-memory database:
+
+ cx = sqlite3.connect(":memory:") # connect to a database in RAM
+
+Once a connection has been established, create a Cursor object and call
+its execute() method to perform SQL queries:
+
+ cu = cx.cursor()
+
+ # create a table
+ cu.execute("create table lang(name, first_appeared)")
+
+ # insert values into a table
+ cu.execute("insert into lang values (?, ?)", ("C", 1972))
+
+ # execute a query and iterate over the result
+ for row in cu.execute("select * from lang"):
+ print(row)
+
+ cx.close()
+
+The sqlite3 module is written by Gerhard Häring <gh@ghaering.de>.
+"""
+
from sqlite3.dbapi2 import *
diff --git a/contrib/tools/python3/src/Lib/sre_compile.py b/contrib/tools/python3/src/Lib/sre_compile.py
index c6398bfb83..a048eea4ba 100644
--- a/contrib/tools/python3/src/Lib/sre_compile.py
+++ b/contrib/tools/python3/src/Lib/sre_compile.py
@@ -80,7 +80,7 @@ def _compile(code, pattern, flags):
tolower = None
fixes = None
if flags & SRE_FLAG_IGNORECASE and not flags & SRE_FLAG_LOCALE:
- if flags & SRE_FLAG_UNICODE:
+ if flags & SRE_FLAG_UNICODE:
iscased = _sre.unicode_iscased
tolower = _sre.unicode_tolower
fixes = _ignorecase_fixes
@@ -196,7 +196,7 @@ def _compile(code, pattern, flags):
av = AT_MULTILINE.get(av, av)
if flags & SRE_FLAG_LOCALE:
av = AT_LOCALE.get(av, av)
- elif flags & SRE_FLAG_UNICODE:
+ elif flags & SRE_FLAG_UNICODE:
av = AT_UNICODE.get(av, av)
emit(av)
elif op is BRANCH:
@@ -217,7 +217,7 @@ def _compile(code, pattern, flags):
emit(op)
if flags & SRE_FLAG_LOCALE:
av = CH_LOCALE[av]
- elif flags & SRE_FLAG_UNICODE:
+ elif flags & SRE_FLAG_UNICODE:
av = CH_UNICODE[av]
emit(av)
elif op is GROUPREF:
@@ -265,7 +265,7 @@ def _compile_charset(charset, flags, code):
elif op is CATEGORY:
if flags & SRE_FLAG_LOCALE:
emit(CH_LOCALE[av])
- elif flags & SRE_FLAG_UNICODE:
+ elif flags & SRE_FLAG_UNICODE:
emit(CH_UNICODE[av])
else:
emit(av)
@@ -453,7 +453,7 @@ def _generate_overlap_table(prefix):
def _get_iscased(flags):
if not flags & SRE_FLAG_IGNORECASE:
return None
- elif flags & SRE_FLAG_UNICODE:
+ elif flags & SRE_FLAG_UNICODE:
return _sre.unicode_iscased
else:
return _sre.ascii_iscased
@@ -597,7 +597,7 @@ def isstring(obj):
def _code(p, flags):
- flags = p.state.flags | flags
+ flags = p.state.flags | flags
code = []
# compile info block
@@ -772,13 +772,13 @@ def compile(p, flags=0):
dis(code)
# map in either direction
- groupindex = p.state.groupdict
- indexgroup = [None] * p.state.groups
+ groupindex = p.state.groupdict
+ indexgroup = [None] * p.state.groups
for k, i in groupindex.items():
indexgroup[i] = k
return _sre.compile(
- pattern, flags | p.state.flags, code,
- p.state.groups-1,
+ pattern, flags | p.state.flags, code,
+ p.state.groups-1,
groupindex, tuple(indexgroup)
)
diff --git a/contrib/tools/python3/src/Lib/sre_constants.py b/contrib/tools/python3/src/Lib/sre_constants.py
index 8e613cb3fa..e3401b203e 100644
--- a/contrib/tools/python3/src/Lib/sre_constants.py
+++ b/contrib/tools/python3/src/Lib/sre_constants.py
@@ -59,7 +59,7 @@ class _NamedIntConstant(int):
self.name = name
return self
- def __repr__(self):
+ def __repr__(self):
return self.name
MAXREPEAT = _NamedIntConstant(MAXREPEAT, 'MAXREPEAT')
diff --git a/contrib/tools/python3/src/Lib/sre_parse.py b/contrib/tools/python3/src/Lib/sre_parse.py
index 83119168e6..c98218e3d8 100644
--- a/contrib/tools/python3/src/Lib/sre_parse.py
+++ b/contrib/tools/python3/src/Lib/sre_parse.py
@@ -71,8 +71,8 @@ GLOBAL_FLAGS = SRE_FLAG_DEBUG | SRE_FLAG_TEMPLATE
class Verbose(Exception):
pass
-class State:
- # keeps track of state for parsing
+class State:
+ # keeps track of state for parsing
def __init__(self):
self.flags = 0
self.groupdict = {}
@@ -108,8 +108,8 @@ class State:
class SubPattern:
# a subpattern, in intermediate form
- def __init__(self, state, data=None):
- self.state = state
+ def __init__(self, state, data=None):
+ self.state = state
if data is None:
data = []
self.data = data
@@ -163,7 +163,7 @@ class SubPattern:
del self.data[index]
def __getitem__(self, index):
if isinstance(index, slice):
- return SubPattern(self.state, self.data[index])
+ return SubPattern(self.state, self.data[index])
return self.data[index]
def __setitem__(self, index, code):
self.data[index] = code
@@ -202,7 +202,7 @@ class SubPattern:
lo = lo + 1
hi = hi + 1
elif op is GROUPREF:
- i, j = self.state.groupwidths[av]
+ i, j = self.state.groupwidths[av]
lo = lo + i
hi = hi + j
elif op is GROUPREF_EXISTS:
@@ -264,19 +264,19 @@ class Tokenizer:
result += c
self.__next()
return result
- def getuntil(self, terminator, name):
+ def getuntil(self, terminator, name):
result = ''
while True:
c = self.next
self.__next()
if c is None:
if not result:
- raise self.error("missing " + name)
+ raise self.error("missing " + name)
raise self.error("missing %s, unterminated name" % terminator,
len(result))
if c == terminator:
if not result:
- raise self.error("missing " + name, 1)
+ raise self.error("missing " + name, 1)
break
result += c
return result
@@ -322,18 +322,18 @@ def _class_escape(source, escape):
c = int(escape[2:], 16)
chr(c) # raise ValueError for invalid code
return LITERAL, c
- elif c == "N" and source.istext:
- import unicodedata
- # named unicode escape e.g. \N{EM DASH}
- if not source.match('{'):
- raise source.error("missing {")
- charname = source.getuntil('}', 'character name')
- try:
- c = ord(unicodedata.lookup(charname))
- except KeyError:
- raise source.error("undefined character name %r" % charname,
- len(charname) + len(r'\N{}'))
- return LITERAL, c
+ elif c == "N" and source.istext:
+ import unicodedata
+ # named unicode escape e.g. \N{EM DASH}
+ if not source.match('{'):
+ raise source.error("missing {")
+ charname = source.getuntil('}', 'character name')
+ try:
+ c = ord(unicodedata.lookup(charname))
+ except KeyError:
+ raise source.error("undefined character name %r" % charname,
+ len(charname) + len(r'\N{}'))
+ return LITERAL, c
elif c in OCTDIGITS:
# octal escape (up to three digits)
escape += source.getwhile(2, OCTDIGITS)
@@ -382,18 +382,18 @@ def _escape(source, escape, state):
c = int(escape[2:], 16)
chr(c) # raise ValueError for invalid code
return LITERAL, c
- elif c == "N" and source.istext:
- import unicodedata
- # named unicode escape e.g. \N{EM DASH}
- if not source.match('{'):
- raise source.error("missing {")
- charname = source.getuntil('}', 'character name')
- try:
- c = ord(unicodedata.lookup(charname))
- except KeyError:
- raise source.error("undefined character name %r" % charname,
- len(charname) + len(r'\N{}'))
- return LITERAL, c
+ elif c == "N" and source.istext:
+ import unicodedata
+ # named unicode escape e.g. \N{EM DASH}
+ if not source.match('{'):
+ raise source.error("missing {")
+ charname = source.getuntil('}', 'character name')
+ try:
+ c = ord(unicodedata.lookup(charname))
+ except KeyError:
+ raise source.error("undefined character name %r" % charname,
+ len(charname) + len(r'\N{}'))
+ return LITERAL, c
elif c == "0":
# octal escape
escape += source.getwhile(2, OCTDIGITS)
@@ -430,7 +430,7 @@ def _escape(source, escape, state):
raise source.error("bad escape %s" % escape, len(escape))
def _uniq(items):
- return list(dict.fromkeys(items))
+ return list(dict.fromkeys(items))
def _parse_sub(source, state, verbose, nested):
# parse an alternation: a|b|c
@@ -697,13 +697,13 @@ def _parse(source, state, verbose, nested, first=False):
# python extensions
if sourcematch("<"):
# named group: skip forward to end of name
- name = source.getuntil(">", "group name")
+ name = source.getuntil(">", "group name")
if not name.isidentifier():
msg = "bad character in group name %r" % name
raise source.error(msg, len(name) + 1)
elif sourcematch("="):
# named backreference
- name = source.getuntil(")", "group name")
+ name = source.getuntil(")", "group name")
if not name.isidentifier():
msg = "bad character in group name %r" % name
raise source.error(msg, len(name) + 1)
@@ -766,7 +766,7 @@ def _parse(source, state, verbose, nested, first=False):
elif char == "(":
# conditional backreference group
- condname = source.getuntil(")", "group name")
+ condname = source.getuntil(")", "group name")
if condname.isidentifier():
condgroup = state.groupdict.get(condname)
if condgroup is None:
@@ -934,28 +934,28 @@ def fix_flags(src, flags):
raise ValueError("ASCII and LOCALE flags are incompatible")
return flags
-def parse(str, flags=0, state=None):
+def parse(str, flags=0, state=None):
# parse 're' pattern into list of (opcode, argument) tuples
source = Tokenizer(str)
- if state is None:
- state = State()
- state.flags = flags
- state.str = str
+ if state is None:
+ state = State()
+ state.flags = flags
+ state.str = str
try:
- p = _parse_sub(source, state, flags & SRE_FLAG_VERBOSE, 0)
+ p = _parse_sub(source, state, flags & SRE_FLAG_VERBOSE, 0)
except Verbose:
# the VERBOSE flag was switched on inside the pattern. to be
# on the safe side, we'll parse the whole thing again...
- state = State()
- state.flags = flags | SRE_FLAG_VERBOSE
- state.str = str
+ state = State()
+ state.flags = flags | SRE_FLAG_VERBOSE
+ state.str = str
source.seek(0)
- p = _parse_sub(source, state, True, 0)
+ p = _parse_sub(source, state, True, 0)
- p.state.flags = fix_flags(str, p.state.flags)
+ p.state.flags = fix_flags(str, p.state.flags)
if source.next is not None:
assert source.next == ")"
@@ -966,7 +966,7 @@ def parse(str, flags=0, state=None):
return p
-def parse_template(source, state):
+def parse_template(source, state):
# parse 're' replacement string into list of literals and
# group references
s = Tokenizer(source)
@@ -976,14 +976,14 @@ def parse_template(source, state):
literal = []
lappend = literal.append
def addgroup(index, pos):
- if index > state.groups:
+ if index > state.groups:
raise s.error("invalid group reference %d" % index, pos)
if literal:
literals.append(''.join(literal))
del literal[:]
groups.append((len(literals), index))
literals.append(None)
- groupindex = state.groupindex
+ groupindex = state.groupindex
while True:
this = sget()
if this is None:
@@ -995,7 +995,7 @@ def parse_template(source, state):
name = ""
if not s.match("<"):
raise s.error("missing <")
- name = s.getuntil(">", "group name")
+ name = s.getuntil(">", "group name")
if name.isidentifier():
try:
index = groupindex[name]
diff --git a/contrib/tools/python3/src/Lib/ssl.py b/contrib/tools/python3/src/Lib/ssl.py
index e95e4cf5e9..e278e7e10d 100644
--- a/contrib/tools/python3/src/Lib/ssl.py
+++ b/contrib/tools/python3/src/Lib/ssl.py
@@ -119,32 +119,32 @@ from _ssl import (
from _ssl import _DEFAULT_CIPHERS, _OPENSSL_API_VERSION
-_IntEnum._convert_(
+_IntEnum._convert_(
'_SSLMethod', __name__,
lambda name: name.startswith('PROTOCOL_') and name != 'PROTOCOL_SSLv23',
source=_ssl)
-_IntFlag._convert_(
+_IntFlag._convert_(
'Options', __name__,
lambda name: name.startswith('OP_'),
source=_ssl)
-_IntEnum._convert_(
+_IntEnum._convert_(
'AlertDescription', __name__,
lambda name: name.startswith('ALERT_DESCRIPTION_'),
source=_ssl)
-_IntEnum._convert_(
+_IntEnum._convert_(
'SSLErrorNumber', __name__,
lambda name: name.startswith('SSL_ERROR_'),
source=_ssl)
-_IntFlag._convert_(
+_IntFlag._convert_(
'VerifyFlags', __name__,
lambda name: name.startswith('VERIFY_'),
source=_ssl)
-_IntEnum._convert_(
+_IntEnum._convert_(
'VerifyMode', __name__,
lambda name: name.startswith('CERT_'),
source=_ssl)
@@ -165,94 +165,94 @@ class TLSVersion(_IntEnum):
MAXIMUM_SUPPORTED = _ssl.PROTO_MAXIMUM_SUPPORTED
-class _TLSContentType(_IntEnum):
- """Content types (record layer)
-
- See RFC 8446, section B.1
- """
- CHANGE_CIPHER_SPEC = 20
- ALERT = 21
- HANDSHAKE = 22
- APPLICATION_DATA = 23
- # pseudo content types
- HEADER = 0x100
- INNER_CONTENT_TYPE = 0x101
-
-
-class _TLSAlertType(_IntEnum):
- """Alert types for TLSContentType.ALERT messages
-
- See RFC 8466, section B.2
- """
- CLOSE_NOTIFY = 0
- UNEXPECTED_MESSAGE = 10
- BAD_RECORD_MAC = 20
- DECRYPTION_FAILED = 21
- RECORD_OVERFLOW = 22
- DECOMPRESSION_FAILURE = 30
- HANDSHAKE_FAILURE = 40
- NO_CERTIFICATE = 41
- BAD_CERTIFICATE = 42
- UNSUPPORTED_CERTIFICATE = 43
- CERTIFICATE_REVOKED = 44
- CERTIFICATE_EXPIRED = 45
- CERTIFICATE_UNKNOWN = 46
- ILLEGAL_PARAMETER = 47
- UNKNOWN_CA = 48
- ACCESS_DENIED = 49
- DECODE_ERROR = 50
- DECRYPT_ERROR = 51
- EXPORT_RESTRICTION = 60
- PROTOCOL_VERSION = 70
- INSUFFICIENT_SECURITY = 71
- INTERNAL_ERROR = 80
- INAPPROPRIATE_FALLBACK = 86
- USER_CANCELED = 90
- NO_RENEGOTIATION = 100
- MISSING_EXTENSION = 109
- UNSUPPORTED_EXTENSION = 110
- CERTIFICATE_UNOBTAINABLE = 111
- UNRECOGNIZED_NAME = 112
- BAD_CERTIFICATE_STATUS_RESPONSE = 113
- BAD_CERTIFICATE_HASH_VALUE = 114
- UNKNOWN_PSK_IDENTITY = 115
- CERTIFICATE_REQUIRED = 116
- NO_APPLICATION_PROTOCOL = 120
-
-
-class _TLSMessageType(_IntEnum):
- """Message types (handshake protocol)
-
- See RFC 8446, section B.3
- """
- HELLO_REQUEST = 0
- CLIENT_HELLO = 1
- SERVER_HELLO = 2
- HELLO_VERIFY_REQUEST = 3
- NEWSESSION_TICKET = 4
- END_OF_EARLY_DATA = 5
- HELLO_RETRY_REQUEST = 6
- ENCRYPTED_EXTENSIONS = 8
- CERTIFICATE = 11
- SERVER_KEY_EXCHANGE = 12
- CERTIFICATE_REQUEST = 13
- SERVER_DONE = 14
- CERTIFICATE_VERIFY = 15
- CLIENT_KEY_EXCHANGE = 16
- FINISHED = 20
- CERTIFICATE_URL = 21
- CERTIFICATE_STATUS = 22
- SUPPLEMENTAL_DATA = 23
- KEY_UPDATE = 24
- NEXT_PROTO = 67
- MESSAGE_HASH = 254
- CHANGE_CIPHER_SPEC = 0x0101
-
-
+class _TLSContentType(_IntEnum):
+ """Content types (record layer)
+
+ See RFC 8446, section B.1
+ """
+ CHANGE_CIPHER_SPEC = 20
+ ALERT = 21
+ HANDSHAKE = 22
+ APPLICATION_DATA = 23
+ # pseudo content types
+ HEADER = 0x100
+ INNER_CONTENT_TYPE = 0x101
+
+
+class _TLSAlertType(_IntEnum):
+ """Alert types for TLSContentType.ALERT messages
+
+ See RFC 8466, section B.2
+ """
+ CLOSE_NOTIFY = 0
+ UNEXPECTED_MESSAGE = 10
+ BAD_RECORD_MAC = 20
+ DECRYPTION_FAILED = 21
+ RECORD_OVERFLOW = 22
+ DECOMPRESSION_FAILURE = 30
+ HANDSHAKE_FAILURE = 40
+ NO_CERTIFICATE = 41
+ BAD_CERTIFICATE = 42
+ UNSUPPORTED_CERTIFICATE = 43
+ CERTIFICATE_REVOKED = 44
+ CERTIFICATE_EXPIRED = 45
+ CERTIFICATE_UNKNOWN = 46
+ ILLEGAL_PARAMETER = 47
+ UNKNOWN_CA = 48
+ ACCESS_DENIED = 49
+ DECODE_ERROR = 50
+ DECRYPT_ERROR = 51
+ EXPORT_RESTRICTION = 60
+ PROTOCOL_VERSION = 70
+ INSUFFICIENT_SECURITY = 71
+ INTERNAL_ERROR = 80
+ INAPPROPRIATE_FALLBACK = 86
+ USER_CANCELED = 90
+ NO_RENEGOTIATION = 100
+ MISSING_EXTENSION = 109
+ UNSUPPORTED_EXTENSION = 110
+ CERTIFICATE_UNOBTAINABLE = 111
+ UNRECOGNIZED_NAME = 112
+ BAD_CERTIFICATE_STATUS_RESPONSE = 113
+ BAD_CERTIFICATE_HASH_VALUE = 114
+ UNKNOWN_PSK_IDENTITY = 115
+ CERTIFICATE_REQUIRED = 116
+ NO_APPLICATION_PROTOCOL = 120
+
+
+class _TLSMessageType(_IntEnum):
+ """Message types (handshake protocol)
+
+ See RFC 8446, section B.3
+ """
+ HELLO_REQUEST = 0
+ CLIENT_HELLO = 1
+ SERVER_HELLO = 2
+ HELLO_VERIFY_REQUEST = 3
+ NEWSESSION_TICKET = 4
+ END_OF_EARLY_DATA = 5
+ HELLO_RETRY_REQUEST = 6
+ ENCRYPTED_EXTENSIONS = 8
+ CERTIFICATE = 11
+ SERVER_KEY_EXCHANGE = 12
+ CERTIFICATE_REQUEST = 13
+ SERVER_DONE = 14
+ CERTIFICATE_VERIFY = 15
+ CLIENT_KEY_EXCHANGE = 16
+ FINISHED = 20
+ CERTIFICATE_URL = 21
+ CERTIFICATE_STATUS = 22
+ SUPPLEMENTAL_DATA = 23
+ KEY_UPDATE = 24
+ NEXT_PROTO = 67
+ MESSAGE_HASH = 254
+ CHANGE_CIPHER_SPEC = 0x0101
+
+
if sys.platform == "win32":
from _ssl import enum_certificates, enum_crls
-from socket import socket, SOCK_STREAM, create_connection
+from socket import socket, SOCK_STREAM, create_connection
from socket import SOL_SOCKET, SO_TYPE
import socket as _socket
import base64 # for DER-to-PEM translation
@@ -327,22 +327,22 @@ def _inet_paton(ipname):
Supports IPv4 addresses on all platforms and IPv6 on platforms with IPv6
support.
"""
- # inet_aton() also accepts strings like '1', '127.1', some also trailing
- # data like '127.0.0.1 whatever'.
- try:
- addr = _socket.inet_aton(ipname)
- except OSError:
- # not an IPv4 address
- pass
- else:
- if _socket.inet_ntoa(addr) == ipname:
- # only accept injective ipnames
- return addr
- else:
- # refuse for short IPv4 notation and additional trailing data
- raise ValueError(
- "{!r} is not a quad-dotted IPv4 address.".format(ipname)
- )
+ # inet_aton() also accepts strings like '1', '127.1', some also trailing
+ # data like '127.0.0.1 whatever'.
+ try:
+ addr = _socket.inet_aton(ipname)
+ except OSError:
+ # not an IPv4 address
+ pass
+ else:
+ if _socket.inet_ntoa(addr) == ipname:
+ # only accept injective ipnames
+ return addr
+ else:
+ # refuse for short IPv4 notation and additional trailing data
+ raise ValueError(
+ "{!r} is not a quad-dotted IPv4 address.".format(ipname)
+ )
try:
return _socket.inet_pton(_socket.AF_INET6, ipname)
@@ -356,15 +356,15 @@ def _inet_paton(ipname):
raise ValueError("{!r} is not an IPv4 address.".format(ipname))
-def _ipaddress_match(cert_ipaddress, host_ip):
+def _ipaddress_match(cert_ipaddress, host_ip):
"""Exact matching of IP addresses.
RFC 6125 explicitly doesn't define an algorithm for this
(section 1.7.2 - "Out of Scope").
"""
- # OpenSSL may add a trailing newline to a subjectAltName's IP address,
- # commonly woth IPv6 addresses. Strip off trailing \n.
- ip = _inet_paton(cert_ipaddress.rstrip())
+ # OpenSSL may add a trailing newline to a subjectAltName's IP address,
+ # commonly woth IPv6 addresses. Strip off trailing \n.
+ ip = _inet_paton(cert_ipaddress.rstrip())
return ip == host_ip
@@ -637,83 +637,83 @@ class SSLContext(_SSLContext):
return True
@property
- def _msg_callback(self):
- """TLS message callback
-
- The message callback provides a debugging hook to analyze TLS
- connections. The callback is called for any TLS protocol message
- (header, handshake, alert, and more), but not for application data.
- Due to technical limitations, the callback can't be used to filter
- traffic or to abort a connection. Any exception raised in the
- callback is delayed until the handshake, read, or write operation
- has been performed.
-
- def msg_cb(conn, direction, version, content_type, msg_type, data):
- pass
-
- conn
- :class:`SSLSocket` or :class:`SSLObject` instance
- direction
- ``read`` or ``write``
- version
- :class:`TLSVersion` enum member or int for unknown version. For a
- frame header, it's the header version.
- content_type
- :class:`_TLSContentType` enum member or int for unsupported
- content type.
- msg_type
- Either a :class:`_TLSContentType` enum number for a header
- message, a :class:`_TLSAlertType` enum member for an alert
- message, a :class:`_TLSMessageType` enum member for other
- messages, or int for unsupported message types.
- data
- Raw, decrypted message content as bytes
- """
- inner = super()._msg_callback
- if inner is not None:
- return inner.user_function
- else:
- return None
-
- @_msg_callback.setter
- def _msg_callback(self, callback):
- if callback is None:
- super(SSLContext, SSLContext)._msg_callback.__set__(self, None)
- return
-
- if not hasattr(callback, '__call__'):
- raise TypeError(f"{callback} is not callable.")
-
- def inner(conn, direction, version, content_type, msg_type, data):
- try:
- version = TLSVersion(version)
- except ValueError:
- pass
-
- try:
- content_type = _TLSContentType(content_type)
- except ValueError:
- pass
-
- if content_type == _TLSContentType.HEADER:
- msg_enum = _TLSContentType
- elif content_type == _TLSContentType.ALERT:
- msg_enum = _TLSAlertType
- else:
- msg_enum = _TLSMessageType
- try:
- msg_type = msg_enum(msg_type)
- except ValueError:
- pass
-
- return callback(conn, direction, version,
- content_type, msg_type, data)
-
- inner.user_function = callback
-
- super(SSLContext, SSLContext)._msg_callback.__set__(self, inner)
-
- @property
+ def _msg_callback(self):
+ """TLS message callback
+
+ The message callback provides a debugging hook to analyze TLS
+ connections. The callback is called for any TLS protocol message
+ (header, handshake, alert, and more), but not for application data.
+ Due to technical limitations, the callback can't be used to filter
+ traffic or to abort a connection. Any exception raised in the
+ callback is delayed until the handshake, read, or write operation
+ has been performed.
+
+ def msg_cb(conn, direction, version, content_type, msg_type, data):
+ pass
+
+ conn
+ :class:`SSLSocket` or :class:`SSLObject` instance
+ direction
+ ``read`` or ``write``
+ version
+ :class:`TLSVersion` enum member or int for unknown version. For a
+ frame header, it's the header version.
+ content_type
+ :class:`_TLSContentType` enum member or int for unsupported
+ content type.
+ msg_type
+ Either a :class:`_TLSContentType` enum number for a header
+ message, a :class:`_TLSAlertType` enum member for an alert
+ message, a :class:`_TLSMessageType` enum member for other
+ messages, or int for unsupported message types.
+ data
+ Raw, decrypted message content as bytes
+ """
+ inner = super()._msg_callback
+ if inner is not None:
+ return inner.user_function
+ else:
+ return None
+
+ @_msg_callback.setter
+ def _msg_callback(self, callback):
+ if callback is None:
+ super(SSLContext, SSLContext)._msg_callback.__set__(self, None)
+ return
+
+ if not hasattr(callback, '__call__'):
+ raise TypeError(f"{callback} is not callable.")
+
+ def inner(conn, direction, version, content_type, msg_type, data):
+ try:
+ version = TLSVersion(version)
+ except ValueError:
+ pass
+
+ try:
+ content_type = _TLSContentType(content_type)
+ except ValueError:
+ pass
+
+ if content_type == _TLSContentType.HEADER:
+ msg_enum = _TLSContentType
+ elif content_type == _TLSContentType.ALERT:
+ msg_enum = _TLSAlertType
+ else:
+ msg_enum = _TLSMessageType
+ try:
+ msg_type = msg_enum(msg_type)
+ except ValueError:
+ pass
+
+ return callback(conn, direction, version,
+ content_type, msg_type, data)
+
+ inner.user_function = callback
+
+ super(SSLContext, SSLContext)._msg_callback.__set__(self, inner)
+
+ @property
def protocol(self):
return _SSLMethod(super().protocol)
@@ -766,11 +766,11 @@ def create_default_context(purpose=Purpose.SERVER_AUTH, *, cafile=None,
# CERT_OPTIONAL or CERT_REQUIRED. Let's try to load default system
# root CA certificates for the given purpose. This may fail silently.
context.load_default_certs(purpose)
- # OpenSSL 1.1.1 keylog file
- if hasattr(context, 'keylog_filename'):
- keylogfile = os.environ.get('SSLKEYLOGFILE')
- if keylogfile and not sys.flags.ignore_environment:
- context.keylog_filename = keylogfile
+ # OpenSSL 1.1.1 keylog file
+ if hasattr(context, 'keylog_filename'):
+ keylogfile = os.environ.get('SSLKEYLOGFILE')
+ if keylogfile and not sys.flags.ignore_environment:
+ context.keylog_filename = keylogfile
return context
def _create_unverified_context(protocol=PROTOCOL_TLS, *, cert_reqs=CERT_NONE,
@@ -812,11 +812,11 @@ def _create_unverified_context(protocol=PROTOCOL_TLS, *, cert_reqs=CERT_NONE,
# CERT_OPTIONAL or CERT_REQUIRED. Let's try to load default system
# root CA certificates for the given purpose. This may fail silently.
context.load_default_certs(purpose)
- # OpenSSL 1.1.1 keylog file
- if hasattr(context, 'keylog_filename'):
- keylogfile = os.environ.get('SSLKEYLOGFILE')
- if keylogfile and not sys.flags.ignore_environment:
- context.keylog_filename = keylogfile
+ # OpenSSL 1.1.1 keylog file
+ if hasattr(context, 'keylog_filename'):
+ keylogfile = os.environ.get('SSLKEYLOGFILE')
+ if keylogfile and not sys.flags.ignore_environment:
+ context.keylog_filename = keylogfile
return context
# Used by http.client if no context is explicitly passed.
@@ -891,7 +891,7 @@ class SSLObject:
@property
def server_hostname(self):
"""The currently set server hostname (for SNI), or ``None`` if no
- server hostname is set."""
+ server hostname is set."""
return self._sslobj.server_hostname
def read(self, len=1024, buffer=None):
@@ -980,12 +980,12 @@ class SSLObject:
return self._sslobj.verify_client_post_handshake()
-def _sslcopydoc(func):
- """Copy docstring from SSLObject to SSLSocket"""
- func.__doc__ = getattr(SSLObject, func.__name__).__doc__
- return func
-
-
+def _sslcopydoc(func):
+ """Copy docstring from SSLObject to SSLSocket"""
+ func.__doc__ = getattr(SSLObject, func.__name__).__doc__
+ return func
+
+
class SSLSocket(socket):
"""This class implements a subtype of socket.socket that wraps
the underlying OS socket in an SSL context when necessary, and
@@ -1062,7 +1062,7 @@ class SSLSocket(socket):
return self
@property
- @_sslcopydoc
+ @_sslcopydoc
def context(self):
return self._context
@@ -1072,7 +1072,7 @@ class SSLSocket(socket):
self._sslobj.context = ctx
@property
- @_sslcopydoc
+ @_sslcopydoc
def session(self):
if self._sslobj is not None:
return self._sslobj.session
@@ -1084,7 +1084,7 @@ class SSLSocket(socket):
self._sslobj.session = session
@property
- @_sslcopydoc
+ @_sslcopydoc
def session_reused(self):
if self._sslobj is not None:
return self._sslobj.session_reused
@@ -1135,13 +1135,13 @@ class SSLSocket(socket):
raise ValueError("Write on closed or unwrapped SSL socket.")
return self._sslobj.write(data)
- @_sslcopydoc
+ @_sslcopydoc
def getpeercert(self, binary_form=False):
self._checkClosed()
self._check_connected()
return self._sslobj.getpeercert(binary_form)
- @_sslcopydoc
+ @_sslcopydoc
def selected_npn_protocol(self):
self._checkClosed()
if self._sslobj is None or not _ssl.HAS_NPN:
@@ -1149,7 +1149,7 @@ class SSLSocket(socket):
else:
return self._sslobj.selected_npn_protocol()
- @_sslcopydoc
+ @_sslcopydoc
def selected_alpn_protocol(self):
self._checkClosed()
if self._sslobj is None or not _ssl.HAS_ALPN:
@@ -1157,7 +1157,7 @@ class SSLSocket(socket):
else:
return self._sslobj.selected_alpn_protocol()
- @_sslcopydoc
+ @_sslcopydoc
def cipher(self):
self._checkClosed()
if self._sslobj is None:
@@ -1165,7 +1165,7 @@ class SSLSocket(socket):
else:
return self._sslobj.cipher()
- @_sslcopydoc
+ @_sslcopydoc
def shared_ciphers(self):
self._checkClosed()
if self._sslobj is None:
@@ -1173,7 +1173,7 @@ class SSLSocket(socket):
else:
return self._sslobj.shared_ciphers()
- @_sslcopydoc
+ @_sslcopydoc
def compression(self):
self._checkClosed()
if self._sslobj is None:
@@ -1284,7 +1284,7 @@ class SSLSocket(socket):
raise NotImplementedError("recvmsg_into not allowed on instances of "
"%s" % self.__class__)
- @_sslcopydoc
+ @_sslcopydoc
def pending(self):
self._checkClosed()
if self._sslobj is not None:
@@ -1297,7 +1297,7 @@ class SSLSocket(socket):
self._sslobj = None
super().shutdown(how)
- @_sslcopydoc
+ @_sslcopydoc
def unwrap(self):
if self._sslobj:
s = self._sslobj.shutdown()
@@ -1306,7 +1306,7 @@ class SSLSocket(socket):
else:
raise ValueError("No SSL wrapper around " + str(self))
- @_sslcopydoc
+ @_sslcopydoc
def verify_client_post_handshake(self):
if self._sslobj:
return self._sslobj.verify_client_post_handshake()
@@ -1317,7 +1317,7 @@ class SSLSocket(socket):
self._sslobj = None
super()._real_close()
- @_sslcopydoc
+ @_sslcopydoc
def do_handshake(self, block=False):
self._check_connected()
timeout = self.gettimeout()
@@ -1376,7 +1376,7 @@ class SSLSocket(socket):
server_side=True)
return newsock, addr
- @_sslcopydoc
+ @_sslcopydoc
def get_channel_binding(self, cb_type="tls-unique"):
if self._sslobj is not None:
return self._sslobj.get_channel_binding(cb_type)
@@ -1387,7 +1387,7 @@ class SSLSocket(socket):
)
return None
- @_sslcopydoc
+ @_sslcopydoc
def version(self):
if self._sslobj is not None:
return self._sslobj.version()
diff --git a/contrib/tools/python3/src/Lib/stat.py b/contrib/tools/python3/src/Lib/stat.py
index fc024db3f4..91ef0f7999 100644
--- a/contrib/tools/python3/src/Lib/stat.py
+++ b/contrib/tools/python3/src/Lib/stat.py
@@ -40,10 +40,10 @@ S_IFREG = 0o100000 # regular file
S_IFIFO = 0o010000 # fifo (named pipe)
S_IFLNK = 0o120000 # symbolic link
S_IFSOCK = 0o140000 # socket file
-# Fallbacks for uncommon platform-specific constants
-S_IFDOOR = 0
-S_IFPORT = 0
-S_IFWHT = 0
+# Fallbacks for uncommon platform-specific constants
+S_IFDOOR = 0
+S_IFPORT = 0
+S_IFWHT = 0
# Functions to test for each file type
@@ -75,18 +75,18 @@ def S_ISSOCK(mode):
"""Return True if mode is from a socket."""
return S_IFMT(mode) == S_IFSOCK
-def S_ISDOOR(mode):
- """Return True if mode is from a door."""
- return False
-
-def S_ISPORT(mode):
- """Return True if mode is from an event port."""
- return False
-
-def S_ISWHT(mode):
- """Return True if mode is from a whiteout."""
- return False
-
+def S_ISDOOR(mode):
+ """Return True if mode is from a door."""
+ return False
+
+def S_ISPORT(mode):
+ """Return True if mode is from an event port."""
+ return False
+
+def S_ISWHT(mode):
+ """Return True if mode is from a whiteout."""
+ return False
+
# Names for permission bits
S_ISUID = 0o4000 # set UID bit
@@ -127,7 +127,7 @@ SF_SNAPSHOT = 0x00200000 # file is a snapshot file
_filemode_table = (
((S_IFLNK, "l"),
- (S_IFSOCK, "s"), # Must appear before IFREG and IFDIR as IFSOCK == IFREG | IFDIR
+ (S_IFSOCK, "s"), # Must appear before IFREG and IFDIR as IFSOCK == IFREG | IFDIR
(S_IFREG, "-"),
(S_IFBLK, "b"),
(S_IFDIR, "d"),
diff --git a/contrib/tools/python3/src/Lib/statistics.py b/contrib/tools/python3/src/Lib/statistics.py
index 463ac9e92c..c82ef879db 100644
--- a/contrib/tools/python3/src/Lib/statistics.py
+++ b/contrib/tools/python3/src/Lib/statistics.py
@@ -7,21 +7,21 @@ averages, variance, and standard deviation.
Calculating averages
--------------------
-================== ==================================================
+================== ==================================================
Function Description
-================== ==================================================
+================== ==================================================
mean Arithmetic mean (average) of data.
-fmean Fast, floating point arithmetic mean.
-geometric_mean Geometric mean of data.
+fmean Fast, floating point arithmetic mean.
+geometric_mean Geometric mean of data.
harmonic_mean Harmonic mean of data.
median Median (middle value) of data.
median_low Low median of data.
median_high High median of data.
median_grouped Median, or 50th percentile, of grouped data.
mode Mode (most common value) of data.
-multimode List of modes (most common values of data).
-quantiles Divide data into intervals with equal probability.
-================== ==================================================
+multimode List of modes (most common values of data).
+quantiles Divide data into intervals with equal probability.
+================== ==================================================
Calculate the arithmetic mean ("the average") of data:
@@ -80,37 +80,37 @@ A single exception is defined: StatisticsError is a subclass of ValueError.
"""
-__all__ = [
- 'NormalDist',
- 'StatisticsError',
- 'fmean',
- 'geometric_mean',
- 'harmonic_mean',
- 'mean',
- 'median',
- 'median_grouped',
- 'median_high',
- 'median_low',
- 'mode',
- 'multimode',
- 'pstdev',
- 'pvariance',
- 'quantiles',
- 'stdev',
- 'variance',
-]
+__all__ = [
+ 'NormalDist',
+ 'StatisticsError',
+ 'fmean',
+ 'geometric_mean',
+ 'harmonic_mean',
+ 'mean',
+ 'median',
+ 'median_grouped',
+ 'median_high',
+ 'median_low',
+ 'mode',
+ 'multimode',
+ 'pstdev',
+ 'pvariance',
+ 'quantiles',
+ 'stdev',
+ 'variance',
+]
import math
import numbers
-import random
+import random
from fractions import Fraction
from decimal import Decimal
from itertools import groupby
from bisect import bisect_left, bisect_right
-from math import hypot, sqrt, fabs, exp, erf, tau, log, fsum
-from operator import itemgetter
-from collections import Counter
+from math import hypot, sqrt, fabs, exp, erf, tau, log, fsum
+from operator import itemgetter
+from collections import Counter
# === Exceptions ===
@@ -163,7 +163,7 @@ def _sum(data, start=0):
T = _coerce(int, type(start))
for typ, values in groupby(data, type):
T = _coerce(T, typ) # or raise TypeError
- for n, d in map(_exact_ratio, values):
+ for n, d in map(_exact_ratio, values):
count += 1
partials[d] = partials_get(d, 0) + n
if None in partials:
@@ -261,7 +261,7 @@ def _convert(value, T):
return T(value)
except TypeError:
if issubclass(T, Decimal):
- return T(value.numerator) / T(value.denominator)
+ return T(value.numerator) / T(value.denominator)
else:
raise
@@ -277,8 +277,8 @@ def _find_lteq(a, x):
def _find_rteq(a, l, x):
'Locate the rightmost value exactly equal to x'
i = bisect_right(a, x, lo=l)
- if i != (len(a) + 1) and a[i - 1] == x:
- return i - 1
+ if i != (len(a) + 1) and a[i - 1] == x:
+ return i - 1
raise ValueError
@@ -315,55 +315,55 @@ def mean(data):
raise StatisticsError('mean requires at least one data point')
T, total, count = _sum(data)
assert count == n
- return _convert(total / n, T)
-
-
-def fmean(data):
- """Convert data to floats and compute the arithmetic mean.
-
- This runs faster than the mean() function and it always returns a float.
- If the input dataset is empty, it raises a StatisticsError.
-
- >>> fmean([3.5, 4.0, 5.25])
- 4.25
- """
- try:
- n = len(data)
- except TypeError:
- # Handle iterators that do not define __len__().
- n = 0
- def count(iterable):
- nonlocal n
- for n, x in enumerate(iterable, start=1):
- yield x
- total = fsum(count(data))
- else:
- total = fsum(data)
- try:
- return total / n
- except ZeroDivisionError:
- raise StatisticsError('fmean requires at least one data point') from None
-
-
-def geometric_mean(data):
- """Convert data to floats and compute the geometric mean.
-
- Raises a StatisticsError if the input dataset is empty,
- if it contains a zero, or if it contains a negative value.
-
- No special efforts are made to achieve exact results.
- (However, this may change in the future.)
-
- >>> round(geometric_mean([54, 24, 36]), 9)
- 36.0
- """
- try:
- return exp(fmean(map(log, data)))
- except ValueError:
- raise StatisticsError('geometric mean requires a non-empty dataset '
- 'containing positive numbers') from None
-
-
+ return _convert(total / n, T)
+
+
+def fmean(data):
+ """Convert data to floats and compute the arithmetic mean.
+
+ This runs faster than the mean() function and it always returns a float.
+ If the input dataset is empty, it raises a StatisticsError.
+
+ >>> fmean([3.5, 4.0, 5.25])
+ 4.25
+ """
+ try:
+ n = len(data)
+ except TypeError:
+ # Handle iterators that do not define __len__().
+ n = 0
+ def count(iterable):
+ nonlocal n
+ for n, x in enumerate(iterable, start=1):
+ yield x
+ total = fsum(count(data))
+ else:
+ total = fsum(data)
+ try:
+ return total / n
+ except ZeroDivisionError:
+ raise StatisticsError('fmean requires at least one data point') from None
+
+
+def geometric_mean(data):
+ """Convert data to floats and compute the geometric mean.
+
+ Raises a StatisticsError if the input dataset is empty,
+ if it contains a zero, or if it contains a negative value.
+
+ No special efforts are made to achieve exact results.
+ (However, this may change in the future.)
+
+ >>> round(geometric_mean([54, 24, 36]), 9)
+ 36.0
+ """
+ try:
+ return exp(fmean(map(log, data)))
+ except ValueError:
+ raise StatisticsError('geometric mean requires a non-empty dataset '
+ 'containing positive numbers') from None
+
+
def harmonic_mean(data):
"""Return the harmonic mean of data.
@@ -403,11 +403,11 @@ def harmonic_mean(data):
else:
raise TypeError('unsupported type')
try:
- T, total, count = _sum(1 / x for x in _fail_neg(data, errmsg))
+ T, total, count = _sum(1 / x for x in _fail_neg(data, errmsg))
except ZeroDivisionError:
return 0
assert count == n
- return _convert(n / total, T)
+ return _convert(n / total, T)
# FIXME: investigate ways to calculate medians without sorting? Quickselect?
@@ -428,11 +428,11 @@ def median(data):
n = len(data)
if n == 0:
raise StatisticsError("no median for empty data")
- if n % 2 == 1:
- return data[n // 2]
+ if n % 2 == 1:
+ return data[n // 2]
else:
- i = n // 2
- return (data[i - 1] + data[i]) / 2
+ i = n // 2
+ return (data[i - 1] + data[i]) / 2
def median_low(data):
@@ -451,10 +451,10 @@ def median_low(data):
n = len(data)
if n == 0:
raise StatisticsError("no median for empty data")
- if n % 2 == 1:
- return data[n // 2]
+ if n % 2 == 1:
+ return data[n // 2]
else:
- return data[n // 2 - 1]
+ return data[n // 2 - 1]
def median_high(data):
@@ -473,7 +473,7 @@ def median_high(data):
n = len(data)
if n == 0:
raise StatisticsError("no median for empty data")
- return data[n // 2]
+ return data[n // 2]
def median_grouped(data, interval=1):
@@ -510,15 +510,15 @@ def median_grouped(data, interval=1):
return data[0]
# Find the value at the midpoint. Remember this corresponds to the
# centre of the class interval.
- x = data[n // 2]
+ x = data[n // 2]
for obj in (x, interval):
if isinstance(obj, (str, bytes)):
raise TypeError('expected number but got %r' % obj)
try:
- L = x - interval / 2 # The lower limit of the median interval.
+ L = x - interval / 2 # The lower limit of the median interval.
except TypeError:
# Mixed type. For now we just coerce to float.
- L = float(x) - float(interval) / 2
+ L = float(x) - float(interval) / 2
# Uses bisection search to search for x in data with log(n) time complexity
# Find the position of leftmost occurrence of x in data
@@ -528,7 +528,7 @@ def median_grouped(data, interval=1):
l2 = _find_rteq(data, l1, x)
cf = l1
f = l2 - l1 + 1
- return L + interval * (n / 2 - cf) / f
+ return L + interval * (n / 2 - cf) / f
def mode(data):
@@ -537,128 +537,128 @@ def mode(data):
``mode`` assumes discrete data, and returns a single value. This is the
standard treatment of the mode as commonly taught in schools:
- >>> mode([1, 1, 2, 3, 3, 3, 3, 4])
- 3
+ >>> mode([1, 1, 2, 3, 3, 3, 3, 4])
+ 3
This also works with nominal (non-numeric) data:
- >>> mode(["red", "blue", "blue", "red", "green", "red", "red"])
- 'red'
-
- If there are multiple modes with same frequency, return the first one
- encountered:
-
- >>> mode(['red', 'red', 'green', 'blue', 'blue'])
- 'red'
-
- If *data* is empty, ``mode``, raises StatisticsError.
-
- """
- pairs = Counter(iter(data)).most_common(1)
- try:
- return pairs[0][0]
- except IndexError:
- raise StatisticsError('no mode for empty data') from None
-
-
-def multimode(data):
- """Return a list of the most frequently occurring values.
-
- Will return more than one result if there are multiple modes
- or an empty list if *data* is empty.
-
- >>> multimode('aabbbbbbbbcc')
- ['b']
- >>> multimode('aabbbbccddddeeffffgg')
- ['b', 'd', 'f']
- >>> multimode('')
- []
- """
- counts = Counter(iter(data)).most_common()
- maxcount, mode_items = next(groupby(counts, key=itemgetter(1)), (0, []))
- return list(map(itemgetter(0), mode_items))
-
-
-# Notes on methods for computing quantiles
-# ----------------------------------------
-#
-# There is no one perfect way to compute quantiles. Here we offer
-# two methods that serve common needs. Most other packages
-# surveyed offered at least one or both of these two, making them
-# "standard" in the sense of "widely-adopted and reproducible".
-# They are also easy to explain, easy to compute manually, and have
-# straight-forward interpretations that aren't surprising.
-
-# The default method is known as "R6", "PERCENTILE.EXC", or "expected
-# value of rank order statistics". The alternative method is known as
-# "R7", "PERCENTILE.INC", or "mode of rank order statistics".
-
-# For sample data where there is a positive probability for values
-# beyond the range of the data, the R6 exclusive method is a
-# reasonable choice. Consider a random sample of nine values from a
-# population with a uniform distribution from 0.0 to 1.0. The
-# distribution of the third ranked sample point is described by
-# betavariate(alpha=3, beta=7) which has mode=0.250, median=0.286, and
-# mean=0.300. Only the latter (which corresponds with R6) gives the
-# desired cut point with 30% of the population falling below that
-# value, making it comparable to a result from an inv_cdf() function.
-# The R6 exclusive method is also idempotent.
-
-# For describing population data where the end points are known to
-# be included in the data, the R7 inclusive method is a reasonable
-# choice. Instead of the mean, it uses the mode of the beta
-# distribution for the interior points. Per Hyndman & Fan, "One nice
-# property is that the vertices of Q7(p) divide the range into n - 1
-# intervals, and exactly 100p% of the intervals lie to the left of
-# Q7(p) and 100(1 - p)% of the intervals lie to the right of Q7(p)."
-
-# If needed, other methods could be added. However, for now, the
-# position is that fewer options make for easier choices and that
-# external packages can be used for anything more advanced.
-
-def quantiles(data, *, n=4, method='exclusive'):
- """Divide *data* into *n* continuous intervals with equal probability.
-
- Returns a list of (n - 1) cut points separating the intervals.
-
- Set *n* to 4 for quartiles (the default). Set *n* to 10 for deciles.
- Set *n* to 100 for percentiles which gives the 99 cuts points that
- separate *data* in to 100 equal sized groups.
-
- The *data* can be any iterable containing sample.
- The cut points are linearly interpolated between data points.
-
- If *method* is set to *inclusive*, *data* is treated as population
- data. The minimum value is treated as the 0th percentile and the
- maximum value is treated as the 100th percentile.
+ >>> mode(["red", "blue", "blue", "red", "green", "red", "red"])
+ 'red'
+
+ If there are multiple modes with same frequency, return the first one
+ encountered:
+
+ >>> mode(['red', 'red', 'green', 'blue', 'blue'])
+ 'red'
+
+ If *data* is empty, ``mode``, raises StatisticsError.
+
"""
- if n < 1:
- raise StatisticsError('n must be at least 1')
- data = sorted(data)
- ld = len(data)
- if ld < 2:
- raise StatisticsError('must have at least two data points')
- if method == 'inclusive':
- m = ld - 1
- result = []
- for i in range(1, n):
- j, delta = divmod(i * m, n)
- interpolated = (data[j] * (n - delta) + data[j + 1] * delta) / n
- result.append(interpolated)
- return result
- if method == 'exclusive':
- m = ld + 1
- result = []
- for i in range(1, n):
- j = i * m // n # rescale i to m/n
- j = 1 if j < 1 else ld-1 if j > ld-1 else j # clamp to 1 .. ld-1
- delta = i*m - j*n # exact integer math
- interpolated = (data[j - 1] * (n - delta) + data[j] * delta) / n
- result.append(interpolated)
- return result
- raise ValueError(f'Unknown method: {method!r}')
-
-
+ pairs = Counter(iter(data)).most_common(1)
+ try:
+ return pairs[0][0]
+ except IndexError:
+ raise StatisticsError('no mode for empty data') from None
+
+
+def multimode(data):
+ """Return a list of the most frequently occurring values.
+
+ Will return more than one result if there are multiple modes
+ or an empty list if *data* is empty.
+
+ >>> multimode('aabbbbbbbbcc')
+ ['b']
+ >>> multimode('aabbbbccddddeeffffgg')
+ ['b', 'd', 'f']
+ >>> multimode('')
+ []
+ """
+ counts = Counter(iter(data)).most_common()
+ maxcount, mode_items = next(groupby(counts, key=itemgetter(1)), (0, []))
+ return list(map(itemgetter(0), mode_items))
+
+
+# Notes on methods for computing quantiles
+# ----------------------------------------
+#
+# There is no one perfect way to compute quantiles. Here we offer
+# two methods that serve common needs. Most other packages
+# surveyed offered at least one or both of these two, making them
+# "standard" in the sense of "widely-adopted and reproducible".
+# They are also easy to explain, easy to compute manually, and have
+# straight-forward interpretations that aren't surprising.
+
+# The default method is known as "R6", "PERCENTILE.EXC", or "expected
+# value of rank order statistics". The alternative method is known as
+# "R7", "PERCENTILE.INC", or "mode of rank order statistics".
+
+# For sample data where there is a positive probability for values
+# beyond the range of the data, the R6 exclusive method is a
+# reasonable choice. Consider a random sample of nine values from a
+# population with a uniform distribution from 0.0 to 1.0. The
+# distribution of the third ranked sample point is described by
+# betavariate(alpha=3, beta=7) which has mode=0.250, median=0.286, and
+# mean=0.300. Only the latter (which corresponds with R6) gives the
+# desired cut point with 30% of the population falling below that
+# value, making it comparable to a result from an inv_cdf() function.
+# The R6 exclusive method is also idempotent.
+
+# For describing population data where the end points are known to
+# be included in the data, the R7 inclusive method is a reasonable
+# choice. Instead of the mean, it uses the mode of the beta
+# distribution for the interior points. Per Hyndman & Fan, "One nice
+# property is that the vertices of Q7(p) divide the range into n - 1
+# intervals, and exactly 100p% of the intervals lie to the left of
+# Q7(p) and 100(1 - p)% of the intervals lie to the right of Q7(p)."
+
+# If needed, other methods could be added. However, for now, the
+# position is that fewer options make for easier choices and that
+# external packages can be used for anything more advanced.
+
+def quantiles(data, *, n=4, method='exclusive'):
+ """Divide *data* into *n* continuous intervals with equal probability.
+
+ Returns a list of (n - 1) cut points separating the intervals.
+
+ Set *n* to 4 for quartiles (the default). Set *n* to 10 for deciles.
+ Set *n* to 100 for percentiles which gives the 99 cuts points that
+ separate *data* in to 100 equal sized groups.
+
+ The *data* can be any iterable containing sample.
+ The cut points are linearly interpolated between data points.
+
+ If *method* is set to *inclusive*, *data* is treated as population
+ data. The minimum value is treated as the 0th percentile and the
+ maximum value is treated as the 100th percentile.
+ """
+ if n < 1:
+ raise StatisticsError('n must be at least 1')
+ data = sorted(data)
+ ld = len(data)
+ if ld < 2:
+ raise StatisticsError('must have at least two data points')
+ if method == 'inclusive':
+ m = ld - 1
+ result = []
+ for i in range(1, n):
+ j, delta = divmod(i * m, n)
+ interpolated = (data[j] * (n - delta) + data[j + 1] * delta) / n
+ result.append(interpolated)
+ return result
+ if method == 'exclusive':
+ m = ld + 1
+ result = []
+ for i in range(1, n):
+ j = i * m // n # rescale i to m/n
+ j = 1 if j < 1 else ld-1 if j > ld-1 else j # clamp to 1 .. ld-1
+ delta = i*m - j*n # exact integer math
+ interpolated = (data[j - 1] * (n - delta) + data[j] * delta) / n
+ result.append(interpolated)
+ return result
+ raise ValueError(f'Unknown method: {method!r}')
+
+
# === Measures of spread ===
# See http://mathworld.wolfram.com/Variance.html
@@ -680,16 +680,16 @@ def _ss(data, c=None):
calculated from ``c`` as given. Use the second case with care, as it can
lead to garbage results.
"""
- if c is not None:
- T, total, count = _sum((x-c)**2 for x in data)
- return (T, total)
- c = mean(data)
+ if c is not None:
+ T, total, count = _sum((x-c)**2 for x in data)
+ return (T, total)
+ c = mean(data)
T, total, count = _sum((x-c)**2 for x in data)
# The following sum should mathematically equal zero, but due to rounding
# error may not.
- U, total2, count2 = _sum((x - c) for x in data)
+ U, total2, count2 = _sum((x - c) for x in data)
assert T == U and count == count2
- total -= total2 ** 2 / len(data)
+ total -= total2 ** 2 / len(data)
assert not total < 0, 'negative sum of square deviations: %f' % total
return (T, total)
@@ -738,13 +738,13 @@ def variance(data, xbar=None):
if n < 2:
raise StatisticsError('variance requires at least two data points')
T, ss = _ss(data, xbar)
- return _convert(ss / (n - 1), T)
+ return _convert(ss / (n - 1), T)
def pvariance(data, mu=None):
"""Return the population variance of ``data``.
- data should be a sequence or iterable of Real-valued numbers, with at least one
+ data should be a sequence or iterable of Real-valued numbers, with at least one
value. The optional argument mu, if given, should be the mean of
the data. If it is missing or None, the mean is automatically calculated.
@@ -782,7 +782,7 @@ def pvariance(data, mu=None):
if n < 1:
raise StatisticsError('pvariance requires at least one data point')
T, ss = _ss(data, mu)
- return _convert(ss / n, T)
+ return _convert(ss / n, T)
def stdev(data, xbar=None):
@@ -815,306 +815,306 @@ def pstdev(data, mu=None):
return var.sqrt()
except AttributeError:
return math.sqrt(var)
-
-
-## Normal Distribution #####################################################
-
-
-def _normal_dist_inv_cdf(p, mu, sigma):
- # There is no closed-form solution to the inverse CDF for the normal
- # distribution, so we use a rational approximation instead:
- # Wichura, M.J. (1988). "Algorithm AS241: The Percentage Points of the
- # Normal Distribution". Applied Statistics. Blackwell Publishing. 37
- # (3): 477–484. doi:10.2307/2347330. JSTOR 2347330.
- q = p - 0.5
- if fabs(q) <= 0.425:
- r = 0.180625 - q * q
- # Hash sum: 55.88319_28806_14901_4439
- num = (((((((2.50908_09287_30122_6727e+3 * r +
- 3.34305_75583_58812_8105e+4) * r +
- 6.72657_70927_00870_0853e+4) * r +
- 4.59219_53931_54987_1457e+4) * r +
- 1.37316_93765_50946_1125e+4) * r +
- 1.97159_09503_06551_4427e+3) * r +
- 1.33141_66789_17843_7745e+2) * r +
- 3.38713_28727_96366_6080e+0) * q
- den = (((((((5.22649_52788_52854_5610e+3 * r +
- 2.87290_85735_72194_2674e+4) * r +
- 3.93078_95800_09271_0610e+4) * r +
- 2.12137_94301_58659_5867e+4) * r +
- 5.39419_60214_24751_1077e+3) * r +
- 6.87187_00749_20579_0830e+2) * r +
- 4.23133_30701_60091_1252e+1) * r +
- 1.0)
- x = num / den
- return mu + (x * sigma)
- r = p if q <= 0.0 else 1.0 - p
- r = sqrt(-log(r))
- if r <= 5.0:
- r = r - 1.6
- # Hash sum: 49.33206_50330_16102_89036
- num = (((((((7.74545_01427_83414_07640e-4 * r +
- 2.27238_44989_26918_45833e-2) * r +
- 2.41780_72517_74506_11770e-1) * r +
- 1.27045_82524_52368_38258e+0) * r +
- 3.64784_83247_63204_60504e+0) * r +
- 5.76949_72214_60691_40550e+0) * r +
- 4.63033_78461_56545_29590e+0) * r +
- 1.42343_71107_49683_57734e+0)
- den = (((((((1.05075_00716_44416_84324e-9 * r +
- 5.47593_80849_95344_94600e-4) * r +
- 1.51986_66563_61645_71966e-2) * r +
- 1.48103_97642_74800_74590e-1) * r +
- 6.89767_33498_51000_04550e-1) * r +
- 1.67638_48301_83803_84940e+0) * r +
- 2.05319_16266_37758_82187e+0) * r +
- 1.0)
- else:
- r = r - 5.0
- # Hash sum: 47.52583_31754_92896_71629
- num = (((((((2.01033_43992_92288_13265e-7 * r +
- 2.71155_55687_43487_57815e-5) * r +
- 1.24266_09473_88078_43860e-3) * r +
- 2.65321_89526_57612_30930e-2) * r +
- 2.96560_57182_85048_91230e-1) * r +
- 1.78482_65399_17291_33580e+0) * r +
- 5.46378_49111_64114_36990e+0) * r +
- 6.65790_46435_01103_77720e+0)
- den = (((((((2.04426_31033_89939_78564e-15 * r +
- 1.42151_17583_16445_88870e-7) * r +
- 1.84631_83175_10054_68180e-5) * r +
- 7.86869_13114_56132_59100e-4) * r +
- 1.48753_61290_85061_48525e-2) * r +
- 1.36929_88092_27358_05310e-1) * r +
- 5.99832_20655_58879_37690e-1) * r +
- 1.0)
- x = num / den
- if q < 0.0:
- x = -x
- return mu + (x * sigma)
-
-
-# If available, use C implementation
-try:
- from _statistics import _normal_dist_inv_cdf
-except ImportError:
- pass
-
-
-class NormalDist:
- "Normal distribution of a random variable"
- # https://en.wikipedia.org/wiki/Normal_distribution
- # https://en.wikipedia.org/wiki/Variance#Properties
-
- __slots__ = {
- '_mu': 'Arithmetic mean of a normal distribution',
- '_sigma': 'Standard deviation of a normal distribution',
- }
-
- def __init__(self, mu=0.0, sigma=1.0):
- "NormalDist where mu is the mean and sigma is the standard deviation."
- if sigma < 0.0:
- raise StatisticsError('sigma must be non-negative')
- self._mu = float(mu)
- self._sigma = float(sigma)
-
- @classmethod
- def from_samples(cls, data):
- "Make a normal distribution instance from sample data."
- if not isinstance(data, (list, tuple)):
- data = list(data)
- xbar = fmean(data)
- return cls(xbar, stdev(data, xbar))
-
- def samples(self, n, *, seed=None):
- "Generate *n* samples for a given mean and standard deviation."
- gauss = random.gauss if seed is None else random.Random(seed).gauss
- mu, sigma = self._mu, self._sigma
- return [gauss(mu, sigma) for i in range(n)]
-
- def pdf(self, x):
- "Probability density function. P(x <= X < x+dx) / dx"
- variance = self._sigma ** 2.0
- if not variance:
- raise StatisticsError('pdf() not defined when sigma is zero')
- return exp((x - self._mu)**2.0 / (-2.0*variance)) / sqrt(tau*variance)
-
- def cdf(self, x):
- "Cumulative distribution function. P(X <= x)"
- if not self._sigma:
- raise StatisticsError('cdf() not defined when sigma is zero')
- return 0.5 * (1.0 + erf((x - self._mu) / (self._sigma * sqrt(2.0))))
-
- def inv_cdf(self, p):
- """Inverse cumulative distribution function. x : P(X <= x) = p
-
- Finds the value of the random variable such that the probability of
- the variable being less than or equal to that value equals the given
- probability.
-
- This function is also called the percent point function or quantile
- function.
- """
- if p <= 0.0 or p >= 1.0:
- raise StatisticsError('p must be in the range 0.0 < p < 1.0')
- if self._sigma <= 0.0:
- raise StatisticsError('cdf() not defined when sigma at or below zero')
- return _normal_dist_inv_cdf(p, self._mu, self._sigma)
-
- def quantiles(self, n=4):
- """Divide into *n* continuous intervals with equal probability.
-
- Returns a list of (n - 1) cut points separating the intervals.
-
- Set *n* to 4 for quartiles (the default). Set *n* to 10 for deciles.
- Set *n* to 100 for percentiles which gives the 99 cuts points that
- separate the normal distribution in to 100 equal sized groups.
- """
- return [self.inv_cdf(i / n) for i in range(1, n)]
-
- def overlap(self, other):
- """Compute the overlapping coefficient (OVL) between two normal distributions.
-
- Measures the agreement between two normal probability distributions.
- Returns a value between 0.0 and 1.0 giving the overlapping area in
- the two underlying probability density functions.
-
- >>> N1 = NormalDist(2.4, 1.6)
- >>> N2 = NormalDist(3.2, 2.0)
- >>> N1.overlap(N2)
- 0.8035050657330205
- """
- # See: "The overlapping coefficient as a measure of agreement between
- # probability distributions and point estimation of the overlap of two
- # normal densities" -- Henry F. Inman and Edwin L. Bradley Jr
- # http://dx.doi.org/10.1080/03610928908830127
- if not isinstance(other, NormalDist):
- raise TypeError('Expected another NormalDist instance')
- X, Y = self, other
- if (Y._sigma, Y._mu) < (X._sigma, X._mu): # sort to assure commutativity
- X, Y = Y, X
- X_var, Y_var = X.variance, Y.variance
- if not X_var or not Y_var:
- raise StatisticsError('overlap() not defined when sigma is zero')
- dv = Y_var - X_var
- dm = fabs(Y._mu - X._mu)
- if not dv:
- return 1.0 - erf(dm / (2.0 * X._sigma * sqrt(2.0)))
- a = X._mu * Y_var - Y._mu * X_var
- b = X._sigma * Y._sigma * sqrt(dm**2.0 + dv * log(Y_var / X_var))
- x1 = (a + b) / dv
- x2 = (a - b) / dv
- return 1.0 - (fabs(Y.cdf(x1) - X.cdf(x1)) + fabs(Y.cdf(x2) - X.cdf(x2)))
-
- def zscore(self, x):
- """Compute the Standard Score. (x - mean) / stdev
-
- Describes *x* in terms of the number of standard deviations
- above or below the mean of the normal distribution.
- """
- # https://www.statisticshowto.com/probability-and-statistics/z-score/
- if not self._sigma:
- raise StatisticsError('zscore() not defined when sigma is zero')
- return (x - self._mu) / self._sigma
-
- @property
- def mean(self):
- "Arithmetic mean of the normal distribution."
- return self._mu
-
- @property
- def median(self):
- "Return the median of the normal distribution"
- return self._mu
-
- @property
- def mode(self):
- """Return the mode of the normal distribution
-
- The mode is the value x where which the probability density
- function (pdf) takes its maximum value.
- """
- return self._mu
-
- @property
- def stdev(self):
- "Standard deviation of the normal distribution."
- return self._sigma
-
- @property
- def variance(self):
- "Square of the standard deviation."
- return self._sigma ** 2.0
-
- def __add__(x1, x2):
- """Add a constant or another NormalDist instance.
-
- If *other* is a constant, translate mu by the constant,
- leaving sigma unchanged.
-
- If *other* is a NormalDist, add both the means and the variances.
- Mathematically, this works only if the two distributions are
- independent or if they are jointly normally distributed.
- """
- if isinstance(x2, NormalDist):
- return NormalDist(x1._mu + x2._mu, hypot(x1._sigma, x2._sigma))
- return NormalDist(x1._mu + x2, x1._sigma)
-
- def __sub__(x1, x2):
- """Subtract a constant or another NormalDist instance.
-
- If *other* is a constant, translate by the constant mu,
- leaving sigma unchanged.
-
- If *other* is a NormalDist, subtract the means and add the variances.
- Mathematically, this works only if the two distributions are
- independent or if they are jointly normally distributed.
- """
- if isinstance(x2, NormalDist):
- return NormalDist(x1._mu - x2._mu, hypot(x1._sigma, x2._sigma))
- return NormalDist(x1._mu - x2, x1._sigma)
-
- def __mul__(x1, x2):
- """Multiply both mu and sigma by a constant.
-
- Used for rescaling, perhaps to change measurement units.
- Sigma is scaled with the absolute value of the constant.
- """
- return NormalDist(x1._mu * x2, x1._sigma * fabs(x2))
-
- def __truediv__(x1, x2):
- """Divide both mu and sigma by a constant.
-
- Used for rescaling, perhaps to change measurement units.
- Sigma is scaled with the absolute value of the constant.
- """
- return NormalDist(x1._mu / x2, x1._sigma / fabs(x2))
-
- def __pos__(x1):
- "Return a copy of the instance."
- return NormalDist(x1._mu, x1._sigma)
-
- def __neg__(x1):
- "Negates mu while keeping sigma the same."
- return NormalDist(-x1._mu, x1._sigma)
-
- __radd__ = __add__
-
- def __rsub__(x1, x2):
- "Subtract a NormalDist from a constant or another NormalDist."
- return -(x1 - x2)
-
- __rmul__ = __mul__
-
- def __eq__(x1, x2):
- "Two NormalDist objects are equal if their mu and sigma are both equal."
- if not isinstance(x2, NormalDist):
- return NotImplemented
- return x1._mu == x2._mu and x1._sigma == x2._sigma
-
- def __hash__(self):
- "NormalDist objects hash equal if their mu and sigma are both equal."
- return hash((self._mu, self._sigma))
-
- def __repr__(self):
- return f'{type(self).__name__}(mu={self._mu!r}, sigma={self._sigma!r})'
+
+
+## Normal Distribution #####################################################
+
+
+def _normal_dist_inv_cdf(p, mu, sigma):
+ # There is no closed-form solution to the inverse CDF for the normal
+ # distribution, so we use a rational approximation instead:
+ # Wichura, M.J. (1988). "Algorithm AS241: The Percentage Points of the
+ # Normal Distribution". Applied Statistics. Blackwell Publishing. 37
+ # (3): 477–484. doi:10.2307/2347330. JSTOR 2347330.
+ q = p - 0.5
+ if fabs(q) <= 0.425:
+ r = 0.180625 - q * q
+ # Hash sum: 55.88319_28806_14901_4439
+ num = (((((((2.50908_09287_30122_6727e+3 * r +
+ 3.34305_75583_58812_8105e+4) * r +
+ 6.72657_70927_00870_0853e+4) * r +
+ 4.59219_53931_54987_1457e+4) * r +
+ 1.37316_93765_50946_1125e+4) * r +
+ 1.97159_09503_06551_4427e+3) * r +
+ 1.33141_66789_17843_7745e+2) * r +
+ 3.38713_28727_96366_6080e+0) * q
+ den = (((((((5.22649_52788_52854_5610e+3 * r +
+ 2.87290_85735_72194_2674e+4) * r +
+ 3.93078_95800_09271_0610e+4) * r +
+ 2.12137_94301_58659_5867e+4) * r +
+ 5.39419_60214_24751_1077e+3) * r +
+ 6.87187_00749_20579_0830e+2) * r +
+ 4.23133_30701_60091_1252e+1) * r +
+ 1.0)
+ x = num / den
+ return mu + (x * sigma)
+ r = p if q <= 0.0 else 1.0 - p
+ r = sqrt(-log(r))
+ if r <= 5.0:
+ r = r - 1.6
+ # Hash sum: 49.33206_50330_16102_89036
+ num = (((((((7.74545_01427_83414_07640e-4 * r +
+ 2.27238_44989_26918_45833e-2) * r +
+ 2.41780_72517_74506_11770e-1) * r +
+ 1.27045_82524_52368_38258e+0) * r +
+ 3.64784_83247_63204_60504e+0) * r +
+ 5.76949_72214_60691_40550e+0) * r +
+ 4.63033_78461_56545_29590e+0) * r +
+ 1.42343_71107_49683_57734e+0)
+ den = (((((((1.05075_00716_44416_84324e-9 * r +
+ 5.47593_80849_95344_94600e-4) * r +
+ 1.51986_66563_61645_71966e-2) * r +
+ 1.48103_97642_74800_74590e-1) * r +
+ 6.89767_33498_51000_04550e-1) * r +
+ 1.67638_48301_83803_84940e+0) * r +
+ 2.05319_16266_37758_82187e+0) * r +
+ 1.0)
+ else:
+ r = r - 5.0
+ # Hash sum: 47.52583_31754_92896_71629
+ num = (((((((2.01033_43992_92288_13265e-7 * r +
+ 2.71155_55687_43487_57815e-5) * r +
+ 1.24266_09473_88078_43860e-3) * r +
+ 2.65321_89526_57612_30930e-2) * r +
+ 2.96560_57182_85048_91230e-1) * r +
+ 1.78482_65399_17291_33580e+0) * r +
+ 5.46378_49111_64114_36990e+0) * r +
+ 6.65790_46435_01103_77720e+0)
+ den = (((((((2.04426_31033_89939_78564e-15 * r +
+ 1.42151_17583_16445_88870e-7) * r +
+ 1.84631_83175_10054_68180e-5) * r +
+ 7.86869_13114_56132_59100e-4) * r +
+ 1.48753_61290_85061_48525e-2) * r +
+ 1.36929_88092_27358_05310e-1) * r +
+ 5.99832_20655_58879_37690e-1) * r +
+ 1.0)
+ x = num / den
+ if q < 0.0:
+ x = -x
+ return mu + (x * sigma)
+
+
+# If available, use C implementation
+try:
+ from _statistics import _normal_dist_inv_cdf
+except ImportError:
+ pass
+
+
+class NormalDist:
+ "Normal distribution of a random variable"
+ # https://en.wikipedia.org/wiki/Normal_distribution
+ # https://en.wikipedia.org/wiki/Variance#Properties
+
+ __slots__ = {
+ '_mu': 'Arithmetic mean of a normal distribution',
+ '_sigma': 'Standard deviation of a normal distribution',
+ }
+
+ def __init__(self, mu=0.0, sigma=1.0):
+ "NormalDist where mu is the mean and sigma is the standard deviation."
+ if sigma < 0.0:
+ raise StatisticsError('sigma must be non-negative')
+ self._mu = float(mu)
+ self._sigma = float(sigma)
+
+ @classmethod
+ def from_samples(cls, data):
+ "Make a normal distribution instance from sample data."
+ if not isinstance(data, (list, tuple)):
+ data = list(data)
+ xbar = fmean(data)
+ return cls(xbar, stdev(data, xbar))
+
+ def samples(self, n, *, seed=None):
+ "Generate *n* samples for a given mean and standard deviation."
+ gauss = random.gauss if seed is None else random.Random(seed).gauss
+ mu, sigma = self._mu, self._sigma
+ return [gauss(mu, sigma) for i in range(n)]
+
+ def pdf(self, x):
+ "Probability density function. P(x <= X < x+dx) / dx"
+ variance = self._sigma ** 2.0
+ if not variance:
+ raise StatisticsError('pdf() not defined when sigma is zero')
+ return exp((x - self._mu)**2.0 / (-2.0*variance)) / sqrt(tau*variance)
+
+ def cdf(self, x):
+ "Cumulative distribution function. P(X <= x)"
+ if not self._sigma:
+ raise StatisticsError('cdf() not defined when sigma is zero')
+ return 0.5 * (1.0 + erf((x - self._mu) / (self._sigma * sqrt(2.0))))
+
+ def inv_cdf(self, p):
+ """Inverse cumulative distribution function. x : P(X <= x) = p
+
+ Finds the value of the random variable such that the probability of
+ the variable being less than or equal to that value equals the given
+ probability.
+
+ This function is also called the percent point function or quantile
+ function.
+ """
+ if p <= 0.0 or p >= 1.0:
+ raise StatisticsError('p must be in the range 0.0 < p < 1.0')
+ if self._sigma <= 0.0:
+ raise StatisticsError('cdf() not defined when sigma at or below zero')
+ return _normal_dist_inv_cdf(p, self._mu, self._sigma)
+
+ def quantiles(self, n=4):
+ """Divide into *n* continuous intervals with equal probability.
+
+ Returns a list of (n - 1) cut points separating the intervals.
+
+ Set *n* to 4 for quartiles (the default). Set *n* to 10 for deciles.
+ Set *n* to 100 for percentiles which gives the 99 cuts points that
+ separate the normal distribution in to 100 equal sized groups.
+ """
+ return [self.inv_cdf(i / n) for i in range(1, n)]
+
+ def overlap(self, other):
+ """Compute the overlapping coefficient (OVL) between two normal distributions.
+
+ Measures the agreement between two normal probability distributions.
+ Returns a value between 0.0 and 1.0 giving the overlapping area in
+ the two underlying probability density functions.
+
+ >>> N1 = NormalDist(2.4, 1.6)
+ >>> N2 = NormalDist(3.2, 2.0)
+ >>> N1.overlap(N2)
+ 0.8035050657330205
+ """
+ # See: "The overlapping coefficient as a measure of agreement between
+ # probability distributions and point estimation of the overlap of two
+ # normal densities" -- Henry F. Inman and Edwin L. Bradley Jr
+ # http://dx.doi.org/10.1080/03610928908830127
+ if not isinstance(other, NormalDist):
+ raise TypeError('Expected another NormalDist instance')
+ X, Y = self, other
+ if (Y._sigma, Y._mu) < (X._sigma, X._mu): # sort to assure commutativity
+ X, Y = Y, X
+ X_var, Y_var = X.variance, Y.variance
+ if not X_var or not Y_var:
+ raise StatisticsError('overlap() not defined when sigma is zero')
+ dv = Y_var - X_var
+ dm = fabs(Y._mu - X._mu)
+ if not dv:
+ return 1.0 - erf(dm / (2.0 * X._sigma * sqrt(2.0)))
+ a = X._mu * Y_var - Y._mu * X_var
+ b = X._sigma * Y._sigma * sqrt(dm**2.0 + dv * log(Y_var / X_var))
+ x1 = (a + b) / dv
+ x2 = (a - b) / dv
+ return 1.0 - (fabs(Y.cdf(x1) - X.cdf(x1)) + fabs(Y.cdf(x2) - X.cdf(x2)))
+
+ def zscore(self, x):
+ """Compute the Standard Score. (x - mean) / stdev
+
+ Describes *x* in terms of the number of standard deviations
+ above or below the mean of the normal distribution.
+ """
+ # https://www.statisticshowto.com/probability-and-statistics/z-score/
+ if not self._sigma:
+ raise StatisticsError('zscore() not defined when sigma is zero')
+ return (x - self._mu) / self._sigma
+
+ @property
+ def mean(self):
+ "Arithmetic mean of the normal distribution."
+ return self._mu
+
+ @property
+ def median(self):
+ "Return the median of the normal distribution"
+ return self._mu
+
+ @property
+ def mode(self):
+ """Return the mode of the normal distribution
+
+ The mode is the value x where which the probability density
+ function (pdf) takes its maximum value.
+ """
+ return self._mu
+
+ @property
+ def stdev(self):
+ "Standard deviation of the normal distribution."
+ return self._sigma
+
+ @property
+ def variance(self):
+ "Square of the standard deviation."
+ return self._sigma ** 2.0
+
+ def __add__(x1, x2):
+ """Add a constant or another NormalDist instance.
+
+ If *other* is a constant, translate mu by the constant,
+ leaving sigma unchanged.
+
+ If *other* is a NormalDist, add both the means and the variances.
+ Mathematically, this works only if the two distributions are
+ independent or if they are jointly normally distributed.
+ """
+ if isinstance(x2, NormalDist):
+ return NormalDist(x1._mu + x2._mu, hypot(x1._sigma, x2._sigma))
+ return NormalDist(x1._mu + x2, x1._sigma)
+
+ def __sub__(x1, x2):
+ """Subtract a constant or another NormalDist instance.
+
+ If *other* is a constant, translate by the constant mu,
+ leaving sigma unchanged.
+
+ If *other* is a NormalDist, subtract the means and add the variances.
+ Mathematically, this works only if the two distributions are
+ independent or if they are jointly normally distributed.
+ """
+ if isinstance(x2, NormalDist):
+ return NormalDist(x1._mu - x2._mu, hypot(x1._sigma, x2._sigma))
+ return NormalDist(x1._mu - x2, x1._sigma)
+
+ def __mul__(x1, x2):
+ """Multiply both mu and sigma by a constant.
+
+ Used for rescaling, perhaps to change measurement units.
+ Sigma is scaled with the absolute value of the constant.
+ """
+ return NormalDist(x1._mu * x2, x1._sigma * fabs(x2))
+
+ def __truediv__(x1, x2):
+ """Divide both mu and sigma by a constant.
+
+ Used for rescaling, perhaps to change measurement units.
+ Sigma is scaled with the absolute value of the constant.
+ """
+ return NormalDist(x1._mu / x2, x1._sigma / fabs(x2))
+
+ def __pos__(x1):
+ "Return a copy of the instance."
+ return NormalDist(x1._mu, x1._sigma)
+
+ def __neg__(x1):
+ "Negates mu while keeping sigma the same."
+ return NormalDist(-x1._mu, x1._sigma)
+
+ __radd__ = __add__
+
+ def __rsub__(x1, x2):
+ "Subtract a NormalDist from a constant or another NormalDist."
+ return -(x1 - x2)
+
+ __rmul__ = __mul__
+
+ def __eq__(x1, x2):
+ "Two NormalDist objects are equal if their mu and sigma are both equal."
+ if not isinstance(x2, NormalDist):
+ return NotImplemented
+ return x1._mu == x2._mu and x1._sigma == x2._sigma
+
+ def __hash__(self):
+ "NormalDist objects hash equal if their mu and sigma are both equal."
+ return hash((self._mu, self._sigma))
+
+ def __repr__(self):
+ return f'{type(self).__name__}(mu={self._mu!r}, sigma={self._sigma!r})'
diff --git a/contrib/tools/python3/src/Lib/string.py b/contrib/tools/python3/src/Lib/string.py
index 489777b10c..861f158628 100644
--- a/contrib/tools/python3/src/Lib/string.py
+++ b/contrib/tools/python3/src/Lib/string.py
@@ -52,9 +52,9 @@ def capwords(s, sep=None):
import re as _re
from collections import ChainMap as _ChainMap
-_sentinel_dict = {}
-
-class Template:
+_sentinel_dict = {}
+
+class Template:
"""A string class for supporting $-substitutions."""
delimiter = '$'
@@ -66,24 +66,24 @@ class Template:
braceidpattern = None
flags = _re.IGNORECASE
- def __init_subclass__(cls):
- super().__init_subclass__()
- if 'pattern' in cls.__dict__:
- pattern = cls.pattern
- else:
- delim = _re.escape(cls.delimiter)
- id = cls.idpattern
- bid = cls.braceidpattern or cls.idpattern
- pattern = fr"""
- {delim}(?:
- (?P<escaped>{delim}) | # Escape sequence of two delimiters
- (?P<named>{id}) | # delimiter and a Python identifier
- {{(?P<braced>{bid})}} | # delimiter and a braced identifier
- (?P<invalid>) # Other ill-formed delimiter exprs
- )
- """
- cls.pattern = _re.compile(pattern, cls.flags | _re.VERBOSE)
-
+ def __init_subclass__(cls):
+ super().__init_subclass__()
+ if 'pattern' in cls.__dict__:
+ pattern = cls.pattern
+ else:
+ delim = _re.escape(cls.delimiter)
+ id = cls.idpattern
+ bid = cls.braceidpattern or cls.idpattern
+ pattern = fr"""
+ {delim}(?:
+ (?P<escaped>{delim}) | # Escape sequence of two delimiters
+ (?P<named>{id}) | # delimiter and a Python identifier
+ {{(?P<braced>{bid})}} | # delimiter and a braced identifier
+ (?P<invalid>) # Other ill-formed delimiter exprs
+ )
+ """
+ cls.pattern = _re.compile(pattern, cls.flags | _re.VERBOSE)
+
def __init__(self, template):
self.template = template
@@ -101,11 +101,11 @@ class Template:
raise ValueError('Invalid placeholder in string: line %d, col %d' %
(lineno, colno))
- def substitute(self, mapping=_sentinel_dict, /, **kws):
- if mapping is _sentinel_dict:
+ def substitute(self, mapping=_sentinel_dict, /, **kws):
+ if mapping is _sentinel_dict:
mapping = kws
elif kws:
- mapping = _ChainMap(kws, mapping)
+ mapping = _ChainMap(kws, mapping)
# Helper function for .sub()
def convert(mo):
# Check the most common path first.
@@ -120,11 +120,11 @@ class Template:
self.pattern)
return self.pattern.sub(convert, self.template)
- def safe_substitute(self, mapping=_sentinel_dict, /, **kws):
- if mapping is _sentinel_dict:
+ def safe_substitute(self, mapping=_sentinel_dict, /, **kws):
+ if mapping is _sentinel_dict:
mapping = kws
elif kws:
- mapping = _ChainMap(kws, mapping)
+ mapping = _ChainMap(kws, mapping)
# Helper function for .sub()
def convert(mo):
named = mo.group('named') or mo.group('braced')
@@ -141,9 +141,9 @@ class Template:
self.pattern)
return self.pattern.sub(convert, self.template)
-# Initialize Template.pattern. __init_subclass__() is automatically called
-# only for subclasses, not for the Template class itself.
-Template.__init_subclass__()
+# Initialize Template.pattern. __init_subclass__() is automatically called
+# only for subclasses, not for the Template class itself.
+Template.__init_subclass__()
########################################################################
@@ -157,7 +157,7 @@ Template.__init_subclass__()
# The field name parser is implemented in _string.formatter_field_name_split
class Formatter:
- def format(self, format_string, /, *args, **kwargs):
+ def format(self, format_string, /, *args, **kwargs):
return self.vformat(format_string, args, kwargs)
def vformat(self, format_string, args, kwargs):
diff --git a/contrib/tools/python3/src/Lib/subprocess.py b/contrib/tools/python3/src/Lib/subprocess.py
index 4effc1d8b3..bcae14a411 100644
--- a/contrib/tools/python3/src/Lib/subprocess.py
+++ b/contrib/tools/python3/src/Lib/subprocess.py
@@ -41,66 +41,66 @@ getstatusoutput(...): Runs a command in the shell, waits for it to complete,
then returns a (exitcode, output) tuple
"""
-import builtins
-import errno
+import builtins
+import errno
import io
import os
import time
import signal
-import sys
-import threading
+import sys
+import threading
import warnings
-import contextlib
+import contextlib
from time import monotonic as _time
-import types
-
-try:
- import pwd
-except ImportError:
- pwd = None
-try:
- import grp
-except ImportError:
- grp = None
-
-__all__ = ["Popen", "PIPE", "STDOUT", "call", "check_call", "getstatusoutput",
- "getoutput", "check_output", "run", "CalledProcessError", "DEVNULL",
- "SubprocessError", "TimeoutExpired", "CompletedProcess"]
- # NOTE: We intentionally exclude list2cmdline as it is
- # considered an internal implementation detail. issue10838.
-
-try:
- import msvcrt
- import _winapi
- _mswindows = True
-except ModuleNotFoundError:
- _mswindows = False
- import _posixsubprocess
- import select
- import selectors
-else:
- from _winapi import (CREATE_NEW_CONSOLE, CREATE_NEW_PROCESS_GROUP,
- STD_INPUT_HANDLE, STD_OUTPUT_HANDLE,
- STD_ERROR_HANDLE, SW_HIDE,
- STARTF_USESTDHANDLES, STARTF_USESHOWWINDOW,
- ABOVE_NORMAL_PRIORITY_CLASS, BELOW_NORMAL_PRIORITY_CLASS,
- HIGH_PRIORITY_CLASS, IDLE_PRIORITY_CLASS,
- NORMAL_PRIORITY_CLASS, REALTIME_PRIORITY_CLASS,
- CREATE_NO_WINDOW, DETACHED_PROCESS,
- CREATE_DEFAULT_ERROR_MODE, CREATE_BREAKAWAY_FROM_JOB)
-
- __all__.extend(["CREATE_NEW_CONSOLE", "CREATE_NEW_PROCESS_GROUP",
- "STD_INPUT_HANDLE", "STD_OUTPUT_HANDLE",
- "STD_ERROR_HANDLE", "SW_HIDE",
- "STARTF_USESTDHANDLES", "STARTF_USESHOWWINDOW",
- "STARTUPINFO",
- "ABOVE_NORMAL_PRIORITY_CLASS", "BELOW_NORMAL_PRIORITY_CLASS",
- "HIGH_PRIORITY_CLASS", "IDLE_PRIORITY_CLASS",
- "NORMAL_PRIORITY_CLASS", "REALTIME_PRIORITY_CLASS",
- "CREATE_NO_WINDOW", "DETACHED_PROCESS",
- "CREATE_DEFAULT_ERROR_MODE", "CREATE_BREAKAWAY_FROM_JOB"])
-
-
+import types
+
+try:
+ import pwd
+except ImportError:
+ pwd = None
+try:
+ import grp
+except ImportError:
+ grp = None
+
+__all__ = ["Popen", "PIPE", "STDOUT", "call", "check_call", "getstatusoutput",
+ "getoutput", "check_output", "run", "CalledProcessError", "DEVNULL",
+ "SubprocessError", "TimeoutExpired", "CompletedProcess"]
+ # NOTE: We intentionally exclude list2cmdline as it is
+ # considered an internal implementation detail. issue10838.
+
+try:
+ import msvcrt
+ import _winapi
+ _mswindows = True
+except ModuleNotFoundError:
+ _mswindows = False
+ import _posixsubprocess
+ import select
+ import selectors
+else:
+ from _winapi import (CREATE_NEW_CONSOLE, CREATE_NEW_PROCESS_GROUP,
+ STD_INPUT_HANDLE, STD_OUTPUT_HANDLE,
+ STD_ERROR_HANDLE, SW_HIDE,
+ STARTF_USESTDHANDLES, STARTF_USESHOWWINDOW,
+ ABOVE_NORMAL_PRIORITY_CLASS, BELOW_NORMAL_PRIORITY_CLASS,
+ HIGH_PRIORITY_CLASS, IDLE_PRIORITY_CLASS,
+ NORMAL_PRIORITY_CLASS, REALTIME_PRIORITY_CLASS,
+ CREATE_NO_WINDOW, DETACHED_PROCESS,
+ CREATE_DEFAULT_ERROR_MODE, CREATE_BREAKAWAY_FROM_JOB)
+
+ __all__.extend(["CREATE_NEW_CONSOLE", "CREATE_NEW_PROCESS_GROUP",
+ "STD_INPUT_HANDLE", "STD_OUTPUT_HANDLE",
+ "STD_ERROR_HANDLE", "SW_HIDE",
+ "STARTF_USESTDHANDLES", "STARTF_USESHOWWINDOW",
+ "STARTUPINFO",
+ "ABOVE_NORMAL_PRIORITY_CLASS", "BELOW_NORMAL_PRIORITY_CLASS",
+ "HIGH_PRIORITY_CLASS", "IDLE_PRIORITY_CLASS",
+ "NORMAL_PRIORITY_CLASS", "REALTIME_PRIORITY_CLASS",
+ "CREATE_NO_WINDOW", "DETACHED_PROCESS",
+ "CREATE_DEFAULT_ERROR_MODE", "CREATE_BREAKAWAY_FROM_JOB"])
+
+
# Exception classes used by this module.
class SubprocessError(Exception): pass
@@ -181,7 +181,7 @@ if _mswindows:
self.wShowWindow = wShowWindow
self.lpAttributeList = lpAttributeList or {"handle_list": []}
- def copy(self):
+ def copy(self):
attr_list = self.lpAttributeList.copy()
if 'handle_list' in attr_list:
attr_list['handle_list'] = list(attr_list['handle_list'])
@@ -212,54 +212,54 @@ if _mswindows:
return "%s(%d)" % (self.__class__.__name__, int(self))
__del__ = Close
-else:
- # When select or poll has indicated that the file is writable,
- # we can write up to _PIPE_BUF bytes without risk of blocking.
- # POSIX defines PIPE_BUF as >= 512.
- _PIPE_BUF = getattr(select, 'PIPE_BUF', 512)
-
- # poll/select have the advantage of not requiring any extra file
- # descriptor, contrarily to epoll/kqueue (also, they require a single
- # syscall).
- if hasattr(selectors, 'PollSelector'):
- _PopenSelector = selectors.PollSelector
- else:
- _PopenSelector = selectors.SelectSelector
-
-
-if _mswindows:
- # On Windows we just need to close `Popen._handle` when we no longer need
- # it, so that the kernel can free it. `Popen._handle` gets closed
- # implicitly when the `Popen` instance is finalized (see `Handle.__del__`,
- # which is calling `CloseHandle` as requested in [1]), so there is nothing
- # for `_cleanup` to do.
- #
- # [1] https://docs.microsoft.com/en-us/windows/desktop/ProcThread/
- # creating-processes
- _active = None
-
- def _cleanup():
- pass
-else:
- # This lists holds Popen instances for which the underlying process had not
- # exited at the time its __del__ method got called: those processes are
- # wait()ed for synchronously from _cleanup() when a new Popen object is
- # created, to avoid zombie processes.
- _active = []
-
- def _cleanup():
- if _active is None:
- return
- for inst in _active[:]:
- res = inst._internal_poll(_deadstate=sys.maxsize)
- if res is not None:
- try:
- _active.remove(inst)
- except ValueError:
- # This can happen if two threads create a new Popen instance.
- # It's harmless that it was already removed, so ignore.
- pass
-
+else:
+ # When select or poll has indicated that the file is writable,
+ # we can write up to _PIPE_BUF bytes without risk of blocking.
+ # POSIX defines PIPE_BUF as >= 512.
+ _PIPE_BUF = getattr(select, 'PIPE_BUF', 512)
+
+ # poll/select have the advantage of not requiring any extra file
+ # descriptor, contrarily to epoll/kqueue (also, they require a single
+ # syscall).
+ if hasattr(selectors, 'PollSelector'):
+ _PopenSelector = selectors.PollSelector
+ else:
+ _PopenSelector = selectors.SelectSelector
+
+
+if _mswindows:
+ # On Windows we just need to close `Popen._handle` when we no longer need
+ # it, so that the kernel can free it. `Popen._handle` gets closed
+ # implicitly when the `Popen` instance is finalized (see `Handle.__del__`,
+ # which is calling `CloseHandle` as requested in [1]), so there is nothing
+ # for `_cleanup` to do.
+ #
+ # [1] https://docs.microsoft.com/en-us/windows/desktop/ProcThread/
+ # creating-processes
+ _active = None
+
+ def _cleanup():
+ pass
+else:
+ # This lists holds Popen instances for which the underlying process had not
+ # exited at the time its __del__ method got called: those processes are
+ # wait()ed for synchronously from _cleanup() when a new Popen object is
+ # created, to avoid zombie processes.
+ _active = []
+
+ def _cleanup():
+ if _active is None:
+ return
+ for inst in _active[:]:
+ res = inst._internal_poll(_deadstate=sys.maxsize)
+ if res is not None:
+ try:
+ _active.remove(inst)
+ except ValueError:
+ # This can happen if two threads create a new Popen instance.
+ # It's harmless that it was already removed, so ignore.
+ pass
+
PIPE = -1
STDOUT = -2
DEVNULL = -3
@@ -326,7 +326,7 @@ def _args_from_interpreter_flags():
if dev_mode:
args.extend(('-X', 'dev'))
for opt in ('faulthandler', 'tracemalloc', 'importtime',
- 'showrefcount', 'utf8', 'oldparser'):
+ 'showrefcount', 'utf8', 'oldparser'):
if opt in xoptions:
value = xoptions[opt]
if value is True:
@@ -404,7 +404,7 @@ def check_output(*popenargs, timeout=None, **kwargs):
b'when in the course of barman events\n'
By default, all communication is in bytes, and therefore any "input"
- should be bytes, and the return value will be bytes. If in text mode,
+ should be bytes, and the return value will be bytes. If in text mode,
any "input" should be a string, and the return value will be a string
decoded according to locale encoding, or by "encoding" if set. Text mode
is triggered by setting any of text, encoding, errors or universal_newlines.
@@ -415,11 +415,11 @@ def check_output(*popenargs, timeout=None, **kwargs):
if 'input' in kwargs and kwargs['input'] is None:
# Explicitly passing input=None was previously equivalent to passing an
# empty string. That is maintained here for backwards compatibility.
- if kwargs.get('universal_newlines') or kwargs.get('text'):
- empty = ''
- else:
- empty = b''
- kwargs['input'] = empty
+ if kwargs.get('universal_newlines') or kwargs.get('text'):
+ empty = ''
+ else:
+ empty = b''
+ kwargs['input'] = empty
return run(*popenargs, stdout=PIPE, timeout=timeout, check=True,
**kwargs).stdout
@@ -451,9 +451,9 @@ class CompletedProcess(object):
args.append('stderr={!r}'.format(self.stderr))
return "{}({})".format(type(self).__name__, ', '.join(args))
- __class_getitem__ = classmethod(types.GenericAlias)
-
-
+ __class_getitem__ = classmethod(types.GenericAlias)
+
+
def check_returncode(self):
"""Raise CalledProcessError if the exit code is non-zero."""
if self.returncode:
@@ -491,12 +491,12 @@ def run(*popenargs,
The other arguments are the same as for the Popen constructor.
"""
if input is not None:
- if kwargs.get('stdin') is not None:
+ if kwargs.get('stdin') is not None:
raise ValueError('stdin and input arguments may not both be used.')
kwargs['stdin'] = PIPE
if capture_output:
- if kwargs.get('stdout') is not None or kwargs.get('stderr') is not None:
+ if kwargs.get('stdout') is not None or kwargs.get('stderr') is not None:
raise ValueError('stdout and stderr arguments may not be used '
'with capture_output.')
kwargs['stdout'] = PIPE
@@ -505,20 +505,20 @@ def run(*popenargs,
with Popen(*popenargs, **kwargs) as process:
try:
stdout, stderr = process.communicate(input, timeout=timeout)
- except TimeoutExpired as exc:
+ except TimeoutExpired as exc:
process.kill()
- if _mswindows:
- # Windows accumulates the output in a single blocking
- # read() call run on child threads, with the timeout
- # being done in a join() on those threads. communicate()
- # _after_ kill() is required to collect that and add it
- # to the exception.
- exc.stdout, exc.stderr = process.communicate()
- else:
- # POSIX _communicate already populated the output so
- # far into the TimeoutExpired exception.
- process.wait()
- raise
+ if _mswindows:
+ # Windows accumulates the output in a single blocking
+ # read() call run on child threads, with the timeout
+ # being done in a join() on those threads. communicate()
+ # _after_ kill() is required to collect that and add it
+ # to the exception.
+ exc.stdout, exc.stderr = process.communicate()
+ else:
+ # POSIX _communicate already populated the output so
+ # far into the TimeoutExpired exception.
+ process.wait()
+ raise
except: # Including KeyboardInterrupt, communicate handled that.
process.kill()
# We don't call process.wait() as .__exit__ does that for us.
@@ -562,7 +562,7 @@ def list2cmdline(seq):
# "Parsing C++ Command-Line Arguments"
result = []
needquote = False
- for arg in map(os.fsdecode, seq):
+ for arg in map(os.fsdecode, seq):
bs_buf = []
# Add a space to separate this argument from the others
@@ -647,56 +647,56 @@ def getoutput(cmd):
return getstatusoutput(cmd)[1]
-def _use_posix_spawn():
- """Check if posix_spawn() can be used for subprocess.
-
- subprocess requires a posix_spawn() implementation that properly reports
- errors to the parent process, & sets errno on the following failures:
-
- * Process attribute actions failed.
- * File actions failed.
- * exec() failed.
-
- Prefer an implementation which can use vfork() in some cases for best
- performance.
- """
- if _mswindows or not hasattr(os, 'posix_spawn'):
- # os.posix_spawn() is not available
- return False
-
- if sys.platform == 'darwin':
- # posix_spawn() is a syscall on macOS and properly reports errors
- return True
-
- # Check libc name and runtime libc version
- try:
- ver = os.confstr('CS_GNU_LIBC_VERSION')
- # parse 'glibc 2.28' as ('glibc', (2, 28))
- parts = ver.split(maxsplit=1)
- if len(parts) != 2:
- # reject unknown format
- raise ValueError
- libc = parts[0]
- version = tuple(map(int, parts[1].split('.')))
-
- if sys.platform == 'linux' and libc == 'glibc' and version >= (2, 24):
- # glibc 2.24 has a new Linux posix_spawn implementation using vfork
- # which properly reports errors to the parent process.
- return True
- # Note: Don't use the implementation in earlier glibc because it doesn't
- # use vfork (even if glibc 2.26 added a pipe to properly report errors
- # to the parent process).
- except (AttributeError, ValueError, OSError):
- # os.confstr() or CS_GNU_LIBC_VERSION value not available
- pass
-
- # By default, assume that posix_spawn() does not properly report errors.
- return False
-
-
-_USE_POSIX_SPAWN = _use_posix_spawn()
-
-
+def _use_posix_spawn():
+ """Check if posix_spawn() can be used for subprocess.
+
+ subprocess requires a posix_spawn() implementation that properly reports
+ errors to the parent process, & sets errno on the following failures:
+
+ * Process attribute actions failed.
+ * File actions failed.
+ * exec() failed.
+
+ Prefer an implementation which can use vfork() in some cases for best
+ performance.
+ """
+ if _mswindows or not hasattr(os, 'posix_spawn'):
+ # os.posix_spawn() is not available
+ return False
+
+ if sys.platform == 'darwin':
+ # posix_spawn() is a syscall on macOS and properly reports errors
+ return True
+
+ # Check libc name and runtime libc version
+ try:
+ ver = os.confstr('CS_GNU_LIBC_VERSION')
+ # parse 'glibc 2.28' as ('glibc', (2, 28))
+ parts = ver.split(maxsplit=1)
+ if len(parts) != 2:
+ # reject unknown format
+ raise ValueError
+ libc = parts[0]
+ version = tuple(map(int, parts[1].split('.')))
+
+ if sys.platform == 'linux' and libc == 'glibc' and version >= (2, 24):
+ # glibc 2.24 has a new Linux posix_spawn implementation using vfork
+ # which properly reports errors to the parent process.
+ return True
+ # Note: Don't use the implementation in earlier glibc because it doesn't
+ # use vfork (even if glibc 2.26 added a pipe to properly report errors
+ # to the parent process).
+ except (AttributeError, ValueError, OSError):
+ # os.confstr() or CS_GNU_LIBC_VERSION value not available
+ pass
+
+ # By default, assume that posix_spawn() does not properly report errors.
+ return False
+
+
+_USE_POSIX_SPAWN = _use_posix_spawn()
+
+
class Popen(object):
""" Execute a child program in a new process.
@@ -735,14 +735,14 @@ class Popen(object):
start_new_session (POSIX only)
- group (POSIX only)
-
- extra_groups (POSIX only)
-
- user (POSIX only)
-
- umask (POSIX only)
-
+ group (POSIX only)
+
+ extra_groups (POSIX only)
+
+ user (POSIX only)
+
+ umask (POSIX only)
+
pass_fds (POSIX only)
encoding and errors: Text mode encoding and error handling to use for
@@ -759,8 +759,8 @@ class Popen(object):
shell=False, cwd=None, env=None, universal_newlines=None,
startupinfo=None, creationflags=0,
restore_signals=True, start_new_session=False,
- pass_fds=(), *, user=None, group=None, extra_groups=None,
- encoding=None, errors=None, text=None, umask=-1):
+ pass_fds=(), *, user=None, group=None, extra_groups=None,
+ encoding=None, errors=None, text=None, umask=-1):
"""Create new Popen instance."""
_cleanup()
# Held while anything is calling waitpid before returncode has been
@@ -849,93 +849,93 @@ class Popen(object):
self._closed_child_pipe_fds = False
- if self.text_mode:
- if bufsize == 1:
- line_buffering = True
- # Use the default buffer size for the underlying binary streams
- # since they don't support line buffering.
- bufsize = -1
- else:
- line_buffering = False
-
- gid = None
- if group is not None:
- if not hasattr(os, 'setregid'):
- raise ValueError("The 'group' parameter is not supported on the "
- "current platform")
-
- elif isinstance(group, str):
- if grp is None:
- raise ValueError("The group parameter cannot be a string "
- "on systems without the grp module")
-
- gid = grp.getgrnam(group).gr_gid
- elif isinstance(group, int):
- gid = group
- else:
- raise TypeError("Group must be a string or an integer, not {}"
- .format(type(group)))
-
- if gid < 0:
- raise ValueError(f"Group ID cannot be negative, got {gid}")
-
- gids = None
- if extra_groups is not None:
- if not hasattr(os, 'setgroups'):
- raise ValueError("The 'extra_groups' parameter is not "
- "supported on the current platform")
-
- elif isinstance(extra_groups, str):
- raise ValueError("Groups must be a list, not a string")
-
- gids = []
- for extra_group in extra_groups:
- if isinstance(extra_group, str):
- if grp is None:
- raise ValueError("Items in extra_groups cannot be "
- "strings on systems without the "
- "grp module")
-
- gids.append(grp.getgrnam(extra_group).gr_gid)
- elif isinstance(extra_group, int):
- gids.append(extra_group)
- else:
- raise TypeError("Items in extra_groups must be a string "
- "or integer, not {}"
- .format(type(extra_group)))
-
- # make sure that the gids are all positive here so we can do less
- # checking in the C code
- for gid_check in gids:
- if gid_check < 0:
- raise ValueError(f"Group ID cannot be negative, got {gid_check}")
-
- uid = None
- if user is not None:
- if not hasattr(os, 'setreuid'):
- raise ValueError("The 'user' parameter is not supported on "
- "the current platform")
-
- elif isinstance(user, str):
- if pwd is None:
- raise ValueError("The user parameter cannot be a string "
- "on systems without the pwd module")
-
- uid = pwd.getpwnam(user).pw_uid
- elif isinstance(user, int):
- uid = user
- else:
- raise TypeError("User must be a string or an integer")
-
- if uid < 0:
- raise ValueError(f"User ID cannot be negative, got {uid}")
-
+ if self.text_mode:
+ if bufsize == 1:
+ line_buffering = True
+ # Use the default buffer size for the underlying binary streams
+ # since they don't support line buffering.
+ bufsize = -1
+ else:
+ line_buffering = False
+
+ gid = None
+ if group is not None:
+ if not hasattr(os, 'setregid'):
+ raise ValueError("The 'group' parameter is not supported on the "
+ "current platform")
+
+ elif isinstance(group, str):
+ if grp is None:
+ raise ValueError("The group parameter cannot be a string "
+ "on systems without the grp module")
+
+ gid = grp.getgrnam(group).gr_gid
+ elif isinstance(group, int):
+ gid = group
+ else:
+ raise TypeError("Group must be a string or an integer, not {}"
+ .format(type(group)))
+
+ if gid < 0:
+ raise ValueError(f"Group ID cannot be negative, got {gid}")
+
+ gids = None
+ if extra_groups is not None:
+ if not hasattr(os, 'setgroups'):
+ raise ValueError("The 'extra_groups' parameter is not "
+ "supported on the current platform")
+
+ elif isinstance(extra_groups, str):
+ raise ValueError("Groups must be a list, not a string")
+
+ gids = []
+ for extra_group in extra_groups:
+ if isinstance(extra_group, str):
+ if grp is None:
+ raise ValueError("Items in extra_groups cannot be "
+ "strings on systems without the "
+ "grp module")
+
+ gids.append(grp.getgrnam(extra_group).gr_gid)
+ elif isinstance(extra_group, int):
+ gids.append(extra_group)
+ else:
+ raise TypeError("Items in extra_groups must be a string "
+ "or integer, not {}"
+ .format(type(extra_group)))
+
+ # make sure that the gids are all positive here so we can do less
+ # checking in the C code
+ for gid_check in gids:
+ if gid_check < 0:
+ raise ValueError(f"Group ID cannot be negative, got {gid_check}")
+
+ uid = None
+ if user is not None:
+ if not hasattr(os, 'setreuid'):
+ raise ValueError("The 'user' parameter is not supported on "
+ "the current platform")
+
+ elif isinstance(user, str):
+ if pwd is None:
+ raise ValueError("The user parameter cannot be a string "
+ "on systems without the pwd module")
+
+ uid = pwd.getpwnam(user).pw_uid
+ elif isinstance(user, int):
+ uid = user
+ else:
+ raise TypeError("User must be a string or an integer")
+
+ if uid < 0:
+ raise ValueError(f"User ID cannot be negative, got {uid}")
+
try:
if p2cwrite != -1:
self.stdin = io.open(p2cwrite, 'wb', bufsize)
if self.text_mode:
self.stdin = io.TextIOWrapper(self.stdin, write_through=True,
- line_buffering=line_buffering,
+ line_buffering=line_buffering,
encoding=encoding, errors=errors)
if c2pread != -1:
self.stdout = io.open(c2pread, 'rb', bufsize)
@@ -954,9 +954,9 @@ class Popen(object):
p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite,
- restore_signals,
- gid, gids, uid, umask,
- start_new_session)
+ restore_signals,
+ gid, gids, uid, umask,
+ start_new_session)
except:
# Cleanup if the child failed starting.
for f in filter(None, (self.stdin, self.stdout, self.stderr)):
@@ -986,17 +986,17 @@ class Popen(object):
raise
- def __repr__(self):
- obj_repr = (
- f"<{self.__class__.__name__}: "
- f"returncode: {self.returncode} args: {self.args!r}>"
- )
- if len(obj_repr) > 80:
- obj_repr = obj_repr[:76] + "...>"
- return obj_repr
-
- __class_getitem__ = classmethod(types.GenericAlias)
-
+ def __repr__(self):
+ obj_repr = (
+ f"<{self.__class__.__name__}: "
+ f"returncode: {self.returncode} args: {self.args!r}>"
+ )
+ if len(obj_repr) > 80:
+ obj_repr = obj_repr[:76] + "...>"
+ return obj_repr
+
+ __class_getitem__ = classmethod(types.GenericAlias)
+
@property
def universal_newlines(self):
# universal_newlines as retained as an alias of text_mode for API
@@ -1169,16 +1169,16 @@ class Popen(object):
return endtime - _time()
- def _check_timeout(self, endtime, orig_timeout, stdout_seq, stderr_seq,
- skip_check_and_raise=False):
+ def _check_timeout(self, endtime, orig_timeout, stdout_seq, stderr_seq,
+ skip_check_and_raise=False):
"""Convenience for checking if a timeout has expired."""
if endtime is None:
return
- if skip_check_and_raise or _time() > endtime:
- raise TimeoutExpired(
- self.args, orig_timeout,
- output=b''.join(stdout_seq) if stdout_seq else None,
- stderr=b''.join(stderr_seq) if stderr_seq else None)
+ if skip_check_and_raise or _time() > endtime:
+ raise TimeoutExpired(
+ self.args, orig_timeout,
+ output=b''.join(stdout_seq) if stdout_seq else None,
+ stderr=b''.join(stderr_seq) if stderr_seq else None)
def wait(self, timeout=None):
@@ -1204,35 +1204,35 @@ class Popen(object):
pass
raise # resume the KeyboardInterrupt
- def _close_pipe_fds(self,
- p2cread, p2cwrite,
- c2pread, c2pwrite,
- errread, errwrite):
- # self._devnull is not always defined.
- devnull_fd = getattr(self, '_devnull', None)
-
- with contextlib.ExitStack() as stack:
- if _mswindows:
- if p2cread != -1:
- stack.callback(p2cread.Close)
- if c2pwrite != -1:
- stack.callback(c2pwrite.Close)
- if errwrite != -1:
- stack.callback(errwrite.Close)
- else:
- if p2cread != -1 and p2cwrite != -1 and p2cread != devnull_fd:
- stack.callback(os.close, p2cread)
- if c2pwrite != -1 and c2pread != -1 and c2pwrite != devnull_fd:
- stack.callback(os.close, c2pwrite)
- if errwrite != -1 and errread != -1 and errwrite != devnull_fd:
- stack.callback(os.close, errwrite)
-
- if devnull_fd is not None:
- stack.callback(os.close, devnull_fd)
-
- # Prevent a double close of these handles/fds from __init__ on error.
- self._closed_child_pipe_fds = True
-
+ def _close_pipe_fds(self,
+ p2cread, p2cwrite,
+ c2pread, c2pwrite,
+ errread, errwrite):
+ # self._devnull is not always defined.
+ devnull_fd = getattr(self, '_devnull', None)
+
+ with contextlib.ExitStack() as stack:
+ if _mswindows:
+ if p2cread != -1:
+ stack.callback(p2cread.Close)
+ if c2pwrite != -1:
+ stack.callback(c2pwrite.Close)
+ if errwrite != -1:
+ stack.callback(errwrite.Close)
+ else:
+ if p2cread != -1 and p2cwrite != -1 and p2cread != devnull_fd:
+ stack.callback(os.close, p2cread)
+ if c2pwrite != -1 and c2pread != -1 and c2pwrite != devnull_fd:
+ stack.callback(os.close, c2pwrite)
+ if errwrite != -1 and errread != -1 and errwrite != devnull_fd:
+ stack.callback(os.close, errwrite)
+
+ if devnull_fd is not None:
+ stack.callback(os.close, devnull_fd)
+
+ # Prevent a double close of these handles/fds from __init__ on error.
+ self._closed_child_pipe_fds = True
+
if _mswindows:
#
# Windows methods
@@ -1337,38 +1337,38 @@ class Popen(object):
p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite,
- unused_restore_signals,
- unused_gid, unused_gids, unused_uid,
- unused_umask,
- unused_start_new_session):
+ unused_restore_signals,
+ unused_gid, unused_gids, unused_uid,
+ unused_umask,
+ unused_start_new_session):
"""Execute program (MS Windows version)"""
assert not pass_fds, "pass_fds not supported on Windows."
- if isinstance(args, str):
- pass
- elif isinstance(args, bytes):
- if shell:
- raise TypeError('bytes args is not allowed on Windows')
- args = list2cmdline([args])
- elif isinstance(args, os.PathLike):
- if shell:
- raise TypeError('path-like args is not allowed when '
- 'shell is true')
- args = list2cmdline([args])
- else:
+ if isinstance(args, str):
+ pass
+ elif isinstance(args, bytes):
+ if shell:
+ raise TypeError('bytes args is not allowed on Windows')
+ args = list2cmdline([args])
+ elif isinstance(args, os.PathLike):
+ if shell:
+ raise TypeError('path-like args is not allowed when '
+ 'shell is true')
+ args = list2cmdline([args])
+ else:
args = list2cmdline(args)
- if executable is not None:
- executable = os.fsdecode(executable)
-
+ if executable is not None:
+ executable = os.fsdecode(executable)
+
# Process startup details
if startupinfo is None:
startupinfo = STARTUPINFO()
else:
# bpo-34044: Copy STARTUPINFO since it is modified above,
# so the caller can reuse it multiple times.
- startupinfo = startupinfo.copy()
+ startupinfo = startupinfo.copy()
use_std_handles = -1 not in (p2cread, c2pwrite, errwrite)
if use_std_handles:
@@ -1410,11 +1410,11 @@ class Popen(object):
comspec = os.environ.get("COMSPEC", "cmd.exe")
args = '{} /c "{}"'.format (comspec, args)
- if cwd is not None:
- cwd = os.fsdecode(cwd)
-
- sys.audit("subprocess.Popen", executable, args, cwd, env)
-
+ if cwd is not None:
+ cwd = os.fsdecode(cwd)
+
+ sys.audit("subprocess.Popen", executable, args, cwd, env)
+
# Start the process
try:
hp, ht, pid, tid = _winapi.CreateProcess(executable, args,
@@ -1423,7 +1423,7 @@ class Popen(object):
int(not close_fds),
creationflags,
env,
- cwd,
+ cwd,
startupinfo)
finally:
# Child is launched. Close the parent's copy of those pipe
@@ -1432,9 +1432,9 @@ class Popen(object):
# output pipe are maintained in this process or else the
# pipe will not close when the child process exits and the
# ReadFile will hang.
- self._close_pipe_fds(p2cread, p2cwrite,
- c2pread, c2pwrite,
- errread, errwrite)
+ self._close_pipe_fds(p2cread, p2cwrite,
+ c2pread, c2pwrite,
+ errread, errwrite)
# Retain the process handle, but close the thread handle
self._child_created = True
@@ -1525,8 +1525,8 @@ class Popen(object):
self.stderr.close()
# All data exchanged. Translate lists into strings.
- stdout = stdout[0] if stdout else None
- stderr = stderr[0] if stderr else None
+ stdout = stdout[0] if stdout else None
+ stderr = stderr[0] if stderr else None
return (stdout, stderr)
@@ -1619,63 +1619,63 @@ class Popen(object):
errread, errwrite)
- def _posix_spawn(self, args, executable, env, restore_signals,
- p2cread, p2cwrite,
- c2pread, c2pwrite,
- errread, errwrite):
- """Execute program using os.posix_spawn()."""
- if env is None:
- env = os.environ
-
- kwargs = {}
- if restore_signals:
- # See _Py_RestoreSignals() in Python/pylifecycle.c
- sigset = []
- for signame in ('SIGPIPE', 'SIGXFZ', 'SIGXFSZ'):
- signum = getattr(signal, signame, None)
- if signum is not None:
- sigset.append(signum)
- kwargs['setsigdef'] = sigset
-
- file_actions = []
- for fd in (p2cwrite, c2pread, errread):
- if fd != -1:
- file_actions.append((os.POSIX_SPAWN_CLOSE, fd))
- for fd, fd2 in (
- (p2cread, 0),
- (c2pwrite, 1),
- (errwrite, 2),
- ):
- if fd != -1:
- file_actions.append((os.POSIX_SPAWN_DUP2, fd, fd2))
- if file_actions:
- kwargs['file_actions'] = file_actions
-
- self.pid = os.posix_spawn(executable, args, env, **kwargs)
- self._child_created = True
-
- self._close_pipe_fds(p2cread, p2cwrite,
- c2pread, c2pwrite,
- errread, errwrite)
-
+ def _posix_spawn(self, args, executable, env, restore_signals,
+ p2cread, p2cwrite,
+ c2pread, c2pwrite,
+ errread, errwrite):
+ """Execute program using os.posix_spawn()."""
+ if env is None:
+ env = os.environ
+
+ kwargs = {}
+ if restore_signals:
+ # See _Py_RestoreSignals() in Python/pylifecycle.c
+ sigset = []
+ for signame in ('SIGPIPE', 'SIGXFZ', 'SIGXFSZ'):
+ signum = getattr(signal, signame, None)
+ if signum is not None:
+ sigset.append(signum)
+ kwargs['setsigdef'] = sigset
+
+ file_actions = []
+ for fd in (p2cwrite, c2pread, errread):
+ if fd != -1:
+ file_actions.append((os.POSIX_SPAWN_CLOSE, fd))
+ for fd, fd2 in (
+ (p2cread, 0),
+ (c2pwrite, 1),
+ (errwrite, 2),
+ ):
+ if fd != -1:
+ file_actions.append((os.POSIX_SPAWN_DUP2, fd, fd2))
+ if file_actions:
+ kwargs['file_actions'] = file_actions
+
+ self.pid = os.posix_spawn(executable, args, env, **kwargs)
+ self._child_created = True
+
+ self._close_pipe_fds(p2cread, p2cwrite,
+ c2pread, c2pwrite,
+ errread, errwrite)
+
def _execute_child(self, args, executable, preexec_fn, close_fds,
pass_fds, cwd, env,
startupinfo, creationflags, shell,
p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite,
- restore_signals,
- gid, gids, uid, umask,
- start_new_session):
+ restore_signals,
+ gid, gids, uid, umask,
+ start_new_session):
"""Execute program (POSIX version)"""
if isinstance(args, (str, bytes)):
args = [args]
- elif isinstance(args, os.PathLike):
- if shell:
- raise TypeError('path-like args is not allowed when '
- 'shell is true')
- args = [args]
+ elif isinstance(args, os.PathLike):
+ if shell:
+ raise TypeError('path-like args is not allowed when '
+ 'shell is true')
+ args = [args]
else:
args = list(args)
@@ -1689,29 +1689,29 @@ class Popen(object):
if executable is None:
executable = args[0]
-
- sys.audit("subprocess.Popen", executable, args, cwd, env)
-
- if (_USE_POSIX_SPAWN
- and os.path.dirname(executable)
- and preexec_fn is None
- and not close_fds
- and not pass_fds
- and cwd is None
- and (p2cread == -1 or p2cread > 2)
- and (c2pwrite == -1 or c2pwrite > 2)
- and (errwrite == -1 or errwrite > 2)
- and not start_new_session
- and gid is None
- and gids is None
- and uid is None
- and umask < 0):
- self._posix_spawn(args, executable, env, restore_signals,
- p2cread, p2cwrite,
- c2pread, c2pwrite,
- errread, errwrite)
- return
-
+
+ sys.audit("subprocess.Popen", executable, args, cwd, env)
+
+ if (_USE_POSIX_SPAWN
+ and os.path.dirname(executable)
+ and preexec_fn is None
+ and not close_fds
+ and not pass_fds
+ and cwd is None
+ and (p2cread == -1 or p2cread > 2)
+ and (c2pwrite == -1 or c2pwrite > 2)
+ and (errwrite == -1 or errwrite > 2)
+ and not start_new_session
+ and gid is None
+ and gids is None
+ and uid is None
+ and umask < 0):
+ self._posix_spawn(args, executable, env, restore_signals,
+ p2cread, p2cwrite,
+ c2pread, c2pwrite,
+ errread, errwrite)
+ return
+
orig_executable = executable
# For transferring possible exec failure from child to parent.
@@ -1758,17 +1758,17 @@ class Popen(object):
p2cread, p2cwrite, c2pread, c2pwrite,
errread, errwrite,
errpipe_read, errpipe_write,
- restore_signals, start_new_session,
- gid, gids, uid, umask,
- preexec_fn)
+ restore_signals, start_new_session,
+ gid, gids, uid, umask,
+ preexec_fn)
self._child_created = True
finally:
# be sure the FD is closed no matter what
os.close(errpipe_write)
- self._close_pipe_fds(p2cread, p2cwrite,
- c2pread, c2pwrite,
- errread, errwrite)
+ self._close_pipe_fds(p2cread, p2cwrite,
+ c2pread, c2pwrite,
+ errread, errwrite)
# Wait for exec to fail or succeed; possibly raising an
# exception (limited in size)
@@ -1822,17 +1822,17 @@ class Popen(object):
raise child_exception_type(err_msg)
- def _handle_exitstatus(self, sts,
- waitstatus_to_exitcode=os.waitstatus_to_exitcode,
- _WIFSTOPPED=os.WIFSTOPPED,
- _WSTOPSIG=os.WSTOPSIG):
+ def _handle_exitstatus(self, sts,
+ waitstatus_to_exitcode=os.waitstatus_to_exitcode,
+ _WIFSTOPPED=os.WIFSTOPPED,
+ _WSTOPSIG=os.WSTOPSIG):
"""All callers to this function MUST hold self._waitpid_lock."""
# This method is called (indirectly) by __del__, so it cannot
# refer to anything outside of its local scope.
- if _WIFSTOPPED(sts):
+ if _WIFSTOPPED(sts):
self.returncode = -_WSTOPSIG(sts)
else:
- self.returncode = waitstatus_to_exitcode(sts)
+ self.returncode = waitstatus_to_exitcode(sts)
def _internal_poll(self, _deadstate=None, _waitpid=os.waitpid,
_WNOHANG=os.WNOHANG, _ECHILD=errno.ECHILD):
@@ -1961,23 +1961,23 @@ class Popen(object):
with _PopenSelector() as selector:
if self.stdin and input:
selector.register(self.stdin, selectors.EVENT_WRITE)
- if self.stdout and not self.stdout.closed:
+ if self.stdout and not self.stdout.closed:
selector.register(self.stdout, selectors.EVENT_READ)
- if self.stderr and not self.stderr.closed:
+ if self.stderr and not self.stderr.closed:
selector.register(self.stderr, selectors.EVENT_READ)
while selector.get_map():
timeout = self._remaining_time(endtime)
if timeout is not None and timeout < 0:
- self._check_timeout(endtime, orig_timeout,
- stdout, stderr,
- skip_check_and_raise=True)
- raise RuntimeError( # Impossible :)
- '_check_timeout(..., skip_check_and_raise=True) '
- 'failed to raise TimeoutExpired.')
+ self._check_timeout(endtime, orig_timeout,
+ stdout, stderr,
+ skip_check_and_raise=True)
+ raise RuntimeError( # Impossible :)
+ '_check_timeout(..., skip_check_and_raise=True) '
+ 'failed to raise TimeoutExpired.')
ready = selector.select(timeout)
- self._check_timeout(endtime, orig_timeout, stdout, stderr)
+ self._check_timeout(endtime, orig_timeout, stdout, stderr)
# XXX Rewrite these to use non-blocking I/O on the file
# objects; they are no longer using C stdio!
@@ -2039,35 +2039,35 @@ class Popen(object):
def send_signal(self, sig):
"""Send a signal to the process."""
- # bpo-38630: Polling reduces the risk of sending a signal to the
- # wrong process if the process completed, the Popen.returncode
- # attribute is still None, and the pid has been reassigned
- # (recycled) to a new different process. This race condition can
- # happens in two cases.
- #
- # Case 1. Thread A calls Popen.poll(), thread B calls
- # Popen.send_signal(). In thread A, waitpid() succeed and returns
- # the exit status. Thread B calls kill() because poll() in thread A
- # did not set returncode yet. Calling poll() in thread B prevents
- # the race condition thanks to Popen._waitpid_lock.
- #
- # Case 2. waitpid(pid, 0) has been called directly, without
- # using Popen methods: returncode is still None is this case.
- # Calling Popen.poll() will set returncode to a default value,
- # since waitpid() fails with ProcessLookupError.
- self.poll()
- if self.returncode is not None:
- # Skip signalling a process that we know has already died.
- return
-
- # The race condition can still happen if the race condition
- # described above happens between the returncode test
- # and the kill() call.
- try:
+ # bpo-38630: Polling reduces the risk of sending a signal to the
+ # wrong process if the process completed, the Popen.returncode
+ # attribute is still None, and the pid has been reassigned
+ # (recycled) to a new different process. This race condition can
+ # happens in two cases.
+ #
+ # Case 1. Thread A calls Popen.poll(), thread B calls
+ # Popen.send_signal(). In thread A, waitpid() succeed and returns
+ # the exit status. Thread B calls kill() because poll() in thread A
+ # did not set returncode yet. Calling poll() in thread B prevents
+ # the race condition thanks to Popen._waitpid_lock.
+ #
+ # Case 2. waitpid(pid, 0) has been called directly, without
+ # using Popen methods: returncode is still None is this case.
+ # Calling Popen.poll() will set returncode to a default value,
+ # since waitpid() fails with ProcessLookupError.
+ self.poll()
+ if self.returncode is not None:
+ # Skip signalling a process that we know has already died.
+ return
+
+ # The race condition can still happen if the race condition
+ # described above happens between the returncode test
+ # and the kill() call.
+ try:
os.kill(self.pid, sig)
- except ProcessLookupError:
- # Supress the race condition error; bpo-40550.
- pass
+ except ProcessLookupError:
+ # Supress the race condition error; bpo-40550.
+ pass
def terminate(self):
"""Terminate the process with SIGTERM
diff --git a/contrib/tools/python3/src/Lib/sunau.py b/contrib/tools/python3/src/Lib/sunau.py
index 79750a9d23..4cd420c315 100644
--- a/contrib/tools/python3/src/Lib/sunau.py
+++ b/contrib/tools/python3/src/Lib/sunau.py
@@ -105,7 +105,7 @@ is destroyed.
from collections import namedtuple
-
+
_sunau_params = namedtuple('_sunau_params',
'nchannels sampwidth framerate nframes comptype compname')
diff --git a/contrib/tools/python3/src/Lib/symbol.py b/contrib/tools/python3/src/Lib/symbol.py
index aaac8c9144..fee16642a0 100644
--- a/contrib/tools/python3/src/Lib/symbol.py
+++ b/contrib/tools/python3/src/Lib/symbol.py
@@ -5,21 +5,21 @@
# To update the symbols in this file, 'cd' to the top directory of
# the python source tree after building the interpreter and run:
#
-# python3 Tools/scripts/generate_symbol_py.py Include/graminit.h Lib/symbol.py
-#
-# or just
-#
-# make regen-symbol
-
-import warnings
-
-warnings.warn(
- "The symbol module is deprecated and will be removed "
- "in future versions of Python",
- DeprecationWarning,
- stacklevel=2,
-)
+# python3 Tools/scripts/generate_symbol_py.py Include/graminit.h Lib/symbol.py
+#
+# or just
+#
+# make regen-symbol
+import warnings
+
+warnings.warn(
+ "The symbol module is deprecated and will be removed "
+ "in future versions of Python",
+ DeprecationWarning,
+ stacklevel=2,
+)
+
#--start constants--
single_input = 256
file_input = 257
@@ -70,53 +70,53 @@ with_stmt = 301
with_item = 302
except_clause = 303
suite = 304
-namedexpr_test = 305
-test = 306
-test_nocond = 307
-lambdef = 308
-lambdef_nocond = 309
-or_test = 310
-and_test = 311
-not_test = 312
-comparison = 313
-comp_op = 314
-star_expr = 315
-expr = 316
-xor_expr = 317
-and_expr = 318
-shift_expr = 319
-arith_expr = 320
-term = 321
-factor = 322
-power = 323
-atom_expr = 324
-atom = 325
-testlist_comp = 326
-trailer = 327
-subscriptlist = 328
-subscript = 329
-sliceop = 330
-exprlist = 331
-testlist = 332
-dictorsetmaker = 333
-classdef = 334
-arglist = 335
-argument = 336
-comp_iter = 337
-sync_comp_for = 338
-comp_for = 339
-comp_if = 340
-encoding_decl = 341
-yield_expr = 342
-yield_arg = 343
-func_body_suite = 344
-func_type_input = 345
-func_type = 346
-typelist = 347
+namedexpr_test = 305
+test = 306
+test_nocond = 307
+lambdef = 308
+lambdef_nocond = 309
+or_test = 310
+and_test = 311
+not_test = 312
+comparison = 313
+comp_op = 314
+star_expr = 315
+expr = 316
+xor_expr = 317
+and_expr = 318
+shift_expr = 319
+arith_expr = 320
+term = 321
+factor = 322
+power = 323
+atom_expr = 324
+atom = 325
+testlist_comp = 326
+trailer = 327
+subscriptlist = 328
+subscript = 329
+sliceop = 330
+exprlist = 331
+testlist = 332
+dictorsetmaker = 333
+classdef = 334
+arglist = 335
+argument = 336
+comp_iter = 337
+sync_comp_for = 338
+comp_for = 339
+comp_if = 340
+encoding_decl = 341
+yield_expr = 342
+yield_arg = 343
+func_body_suite = 344
+func_type_input = 345
+func_type = 346
+typelist = 347
#--end constants--
sym_name = {}
for _name, _value in list(globals().items()):
if type(_value) is type(0):
sym_name[_value] = _name
-del _name, _value
+del _name, _value
diff --git a/contrib/tools/python3/src/Lib/symtable.py b/contrib/tools/python3/src/Lib/symtable.py
index 521540fe9e..8fc60f9812 100644
--- a/contrib/tools/python3/src/Lib/symtable.py
+++ b/contrib/tools/python3/src/Lib/symtable.py
@@ -1,7 +1,7 @@
"""Interface to the compiler's internal symbol tables"""
import _symtable
-from _symtable import (USE, DEF_GLOBAL, DEF_NONLOCAL, DEF_LOCAL, DEF_PARAM,
+from _symtable import (USE, DEF_GLOBAL, DEF_NONLOCAL, DEF_LOCAL, DEF_PARAM,
DEF_IMPORT, DEF_BOUND, DEF_ANNOT, SCOPE_OFF, SCOPE_MASK, FREE,
LOCAL, GLOBAL_IMPLICIT, GLOBAL_EXPLICIT, CELL)
@@ -34,7 +34,7 @@ class SymbolTableFactory:
_newSymbolTable = SymbolTableFactory()
-class SymbolTable:
+class SymbolTable:
def __init__(self, raw_table, filename):
self._table = raw_table
@@ -47,7 +47,7 @@ class SymbolTable:
else:
kind = "%s " % self.__class__.__name__
- if self._table.name == "top":
+ if self._table.name == "top":
return "<{0}SymbolTable for module {1}>".format(kind, self._filename)
else:
return "<{0}SymbolTable for {1} in {2}>".format(kind,
@@ -90,9 +90,9 @@ class SymbolTable:
if sym is None:
flags = self._table.symbols[name]
namespaces = self.__check_children(name)
- module_scope = (self._table.name == "top")
- sym = self._symbols[name] = Symbol(name, flags, namespaces,
- module_scope=module_scope)
+ module_scope = (self._table.name == "top")
+ sym = self._symbols[name] = Symbol(name, flags, namespaces,
+ module_scope=module_scope)
return sym
def get_symbols(self):
@@ -115,7 +115,7 @@ class Function(SymbolTable):
__locals = None
__frees = None
__globals = None
- __nonlocals = None
+ __nonlocals = None
def __idents_matching(self, test_func):
return tuple(ident for ident in self.get_identifiers()
@@ -140,11 +140,11 @@ class Function(SymbolTable):
self.__globals = self.__idents_matching(test)
return self.__globals
- def get_nonlocals(self):
- if self.__nonlocals is None:
- self.__nonlocals = self.__idents_matching(lambda x:x & DEF_NONLOCAL)
- return self.__nonlocals
-
+ def get_nonlocals(self):
+ if self.__nonlocals is None:
+ self.__nonlocals = self.__idents_matching(lambda x:x & DEF_NONLOCAL)
+ return self.__nonlocals
+
def get_frees(self):
if self.__frees is None:
is_free = lambda x:((x >> SCOPE_OFF) & SCOPE_MASK) == FREE
@@ -165,14 +165,14 @@ class Class(SymbolTable):
return self.__methods
-class Symbol:
+class Symbol:
- def __init__(self, name, flags, namespaces=None, *, module_scope=False):
+ def __init__(self, name, flags, namespaces=None, *, module_scope=False):
self.__name = name
self.__flags = flags
self.__scope = (flags >> SCOPE_OFF) & SCOPE_MASK # like PyST_GetScope()
self.__namespaces = namespaces or ()
- self.__module_scope = module_scope
+ self.__module_scope = module_scope
def __repr__(self):
return "<symbol {0!r}>".format(self.__name)
@@ -187,22 +187,22 @@ class Symbol:
return bool(self.__flags & DEF_PARAM)
def is_global(self):
- """Return *True* if the sysmbol is global.
- """
- return bool(self.__scope in (GLOBAL_IMPLICIT, GLOBAL_EXPLICIT)
- or (self.__module_scope and self.__flags & DEF_BOUND))
-
- def is_nonlocal(self):
- return bool(self.__flags & DEF_NONLOCAL)
-
+ """Return *True* if the sysmbol is global.
+ """
+ return bool(self.__scope in (GLOBAL_IMPLICIT, GLOBAL_EXPLICIT)
+ or (self.__module_scope and self.__flags & DEF_BOUND))
+
+ def is_nonlocal(self):
+ return bool(self.__flags & DEF_NONLOCAL)
+
def is_declared_global(self):
return bool(self.__scope == GLOBAL_EXPLICIT)
def is_local(self):
- """Return *True* if the symbol is local.
- """
- return bool(self.__scope in (LOCAL, CELL)
- or (self.__module_scope and self.__flags & DEF_BOUND))
+ """Return *True* if the symbol is local.
+ """
+ return bool(self.__scope in (LOCAL, CELL)
+ or (self.__module_scope and self.__flags & DEF_BOUND))
def is_annotated(self):
return bool(self.__flags & DEF_ANNOT)
diff --git a/contrib/tools/python3/src/Lib/sysconfig.py b/contrib/tools/python3/src/Lib/sysconfig.py
index 55bd06f3a0..7936188fba 100644
--- a/contrib/tools/python3/src/Lib/sysconfig.py
+++ b/contrib/tools/python3/src/Lib/sysconfig.py
@@ -18,17 +18,17 @@ __all__ = [
'parse_config_h',
]
-# Keys for get_config_var() that are never converted to Python integers.
-_ALWAYS_STR = {
- 'MACOSX_DEPLOYMENT_TARGET',
-}
-
+# Keys for get_config_var() that are never converted to Python integers.
+_ALWAYS_STR = {
+ 'MACOSX_DEPLOYMENT_TARGET',
+}
+
_INSTALL_SCHEMES = {
'posix_prefix': {
- 'stdlib': '{installed_base}/{platlibdir}/python{py_version_short}',
- 'platstdlib': '{platbase}/{platlibdir}/python{py_version_short}',
+ 'stdlib': '{installed_base}/{platlibdir}/python{py_version_short}',
+ 'platstdlib': '{platbase}/{platlibdir}/python{py_version_short}',
'purelib': '{base}/lib/python{py_version_short}/site-packages',
- 'platlib': '{platbase}/{platlibdir}/python{py_version_short}/site-packages',
+ 'platlib': '{platbase}/{platlibdir}/python{py_version_short}/site-packages',
'include':
'{installed_base}/include/python{py_version_short}{abiflags}',
'platinclude':
@@ -67,10 +67,10 @@ _INSTALL_SCHEMES = {
'data': '{userbase}',
},
'posix_user': {
- 'stdlib': '{userbase}/{platlibdir}/python{py_version_short}',
- 'platstdlib': '{userbase}/{platlibdir}/python{py_version_short}',
+ 'stdlib': '{userbase}/{platlibdir}/python{py_version_short}',
+ 'platstdlib': '{userbase}/{platlibdir}/python{py_version_short}',
'purelib': '{userbase}/lib/python{py_version_short}/site-packages',
- 'platlib': '{userbase}/{platlibdir}/python{py_version_short}/site-packages',
+ 'platlib': '{userbase}/{platlibdir}/python{py_version_short}/site-packages',
'include': '{userbase}/include/python{py_version_short}',
'scripts': '{userbase}/bin',
'data': '{userbase}',
@@ -122,7 +122,7 @@ if "_PYTHON_PROJECT_BASE" in os.environ:
_PROJECT_BASE = _safe_realpath(os.environ["_PYTHON_PROJECT_BASE"])
def _is_python_source_dir(d):
- for fn in ("Setup", "Setup.local"):
+ for fn in ("Setup", "Setup.local"):
if os.path.isfile(os.path.join(d, "Modules", fn)):
return True
return False
@@ -245,9 +245,9 @@ def _parse_makefile(filename, vars=None):
notdone[n] = v
else:
try:
- if n in _ALWAYS_STR:
- raise ValueError
-
+ if n in _ALWAYS_STR:
+ raise ValueError
+
v = int(v)
except ValueError:
# insert literal `$'
@@ -306,8 +306,8 @@ def _parse_makefile(filename, vars=None):
notdone[name] = value
else:
try:
- if name in _ALWAYS_STR:
- raise ValueError
+ if name in _ALWAYS_STR:
+ raise ValueError
value = int(value)
except ValueError:
done[name] = value.strip()
@@ -420,7 +420,7 @@ def _generate_posix_vars():
pprint.pprint(vars, stream=f)
# Create file used for sys.path fixup -- see Modules/getpath.c
- with open('pybuilddir.txt', 'w', encoding='utf8') as f:
+ with open('pybuilddir.txt', 'w', encoding='utf8') as f:
f.write(pybuilddir)
def _init_posix(vars):
@@ -435,11 +435,11 @@ def _init_posix(vars):
def _init_non_posix(vars):
"""Initialize the module as appropriate for NT"""
# set basic install directories
- import _imp
+ import _imp
vars['LIBDEST'] = get_path('stdlib')
vars['BINLIBDEST'] = get_path('platstdlib')
vars['INCLUDEPY'] = get_path('include')
- vars['EXT_SUFFIX'] = _imp.extension_suffixes()[0]
+ vars['EXT_SUFFIX'] = _imp.extension_suffixes()[0]
vars['EXE'] = '.exe'
vars['VERSION'] = _PY_VERSION_SHORT_NO_DOT
vars['BINDIR'] = os.path.dirname(_safe_realpath(sys.executable))
@@ -470,8 +470,8 @@ def parse_config_h(fp, vars=None):
if m:
n, v = m.group(1, 2)
try:
- if n in _ALWAYS_STR:
- raise ValueError
+ if n in _ALWAYS_STR:
+ raise ValueError
v = int(v)
except ValueError:
pass
@@ -551,7 +551,7 @@ def get_config_vars(*args):
_CONFIG_VARS['installed_platbase'] = _BASE_EXEC_PREFIX
_CONFIG_VARS['platbase'] = _EXEC_PREFIX
_CONFIG_VARS['projectbase'] = _PROJECT_BASE
- _CONFIG_VARS['platlibdir'] = sys.platlibdir
+ _CONFIG_VARS['platlibdir'] = sys.platlibdir
try:
_CONFIG_VARS['abiflags'] = sys.abiflags
except AttributeError:
@@ -560,7 +560,7 @@ def get_config_vars(*args):
if os.name == 'nt':
_init_non_posix(_CONFIG_VARS)
- _CONFIG_VARS['TZPATH'] = ''
+ _CONFIG_VARS['TZPATH'] = ''
if os.name == 'posix':
_init_posix(_CONFIG_VARS)
# For backward compatibility, see issue19555
@@ -640,10 +640,10 @@ def get_platform():
if os.name == 'nt':
if 'amd64' in sys.version.lower():
return 'win-amd64'
- if '(arm)' in sys.version.lower():
- return 'win-arm32'
- if '(arm64)' in sys.version.lower():
- return 'win-arm64'
+ if '(arm)' in sys.version.lower():
+ return 'win-arm32'
+ if '(arm64)' in sys.version.lower():
+ return 'win-arm64'
return sys.platform
if os.name != "posix" or not hasattr(os, 'uname'):
@@ -679,8 +679,8 @@ def get_platform():
machine += ".%s" % bitness[sys.maxsize]
# fall through to standard osname-release-machine representation
elif osname[:3] == "aix":
- from _aix_support import aix_platform
- return aix_platform()
+ from _aix_support import aix_platform
+ return aix_platform()
elif osname[:6] == "cygwin":
osname = "cygwin"
import re
diff --git a/contrib/tools/python3/src/Lib/tarfile.py b/contrib/tools/python3/src/Lib/tarfile.py
index 043a4ab5a5..d832e1c5fc 100644
--- a/contrib/tools/python3/src/Lib/tarfile.py
+++ b/contrib/tools/python3/src/Lib/tarfile.py
@@ -105,7 +105,7 @@ SOLARIS_XHDTYPE = b"X" # Solaris extended header
USTAR_FORMAT = 0 # POSIX.1-1988 (ustar) format
GNU_FORMAT = 1 # GNU tar format
PAX_FORMAT = 2 # POSIX.1-2001 (pax) format
-DEFAULT_FORMAT = PAX_FORMAT
+DEFAULT_FORMAT = PAX_FORMAT
#---------------------------------------------------------
# tarfile constants
@@ -420,8 +420,8 @@ class _Stream:
self.__write(b"\037\213\010\010" + timestamp + b"\002\377")
if self.name.endswith(".gz"):
self.name = self.name[:-3]
- # Honor "directory components removed" from RFC1952
- self.name = os.path.basename(self.name)
+ # Honor "directory components removed" from RFC1952
+ self.name = os.path.basename(self.name)
# RFC1952 says we must use ISO-8859-1 for the FNAME field.
self.__write(self.name.encode("iso-8859-1", "replace") + NUL)
@@ -515,10 +515,10 @@ class _Stream:
raise StreamError("seeking backwards is not allowed")
return self.pos
- def read(self, size):
- """Return the next size number of bytes from the stream."""
- assert size is not None
- buf = self._read(size)
+ def read(self, size):
+ """Return the next size number of bytes from the stream."""
+ assert size is not None
+ buf = self._read(size)
self.pos += len(buf)
return buf
@@ -531,14 +531,14 @@ class _Stream:
c = len(self.dbuf)
t = [self.dbuf]
while c < size:
- # Skip underlying buffer to avoid unaligned double buffering.
- if self.buf:
- buf = self.buf
- self.buf = b""
- else:
- buf = self.fileobj.read(self.bufsize)
- if not buf:
- break
+ # Skip underlying buffer to avoid unaligned double buffering.
+ if self.buf:
+ buf = self.buf
+ self.buf = b""
+ else:
+ buf = self.fileobj.read(self.bufsize)
+ if not buf:
+ break
try:
buf = self.cmp.decompress(buf)
except self.exception:
@@ -719,32 +719,32 @@ class TarInfo(object):
usually created internally.
"""
- __slots__ = dict(
- name = 'Name of the archive member.',
- mode = 'Permission bits.',
- uid = 'User ID of the user who originally stored this member.',
- gid = 'Group ID of the user who originally stored this member.',
- size = 'Size in bytes.',
- mtime = 'Time of last modification.',
- chksum = 'Header checksum.',
- type = ('File type. type is usually one of these constants: '
- 'REGTYPE, AREGTYPE, LNKTYPE, SYMTYPE, DIRTYPE, FIFOTYPE, '
- 'CONTTYPE, CHRTYPE, BLKTYPE, GNUTYPE_SPARSE.'),
- linkname = ('Name of the target file name, which is only present '
- 'in TarInfo objects of type LNKTYPE and SYMTYPE.'),
- uname = 'User name.',
- gname = 'Group name.',
- devmajor = 'Device major number.',
- devminor = 'Device minor number.',
- offset = 'The tar header starts here.',
- offset_data = "The file's data starts here.",
- pax_headers = ('A dictionary containing key-value pairs of an '
- 'associated pax extended header.'),
- sparse = 'Sparse member information.',
- tarfile = None,
- _sparse_structs = None,
- _link_target = None,
- )
+ __slots__ = dict(
+ name = 'Name of the archive member.',
+ mode = 'Permission bits.',
+ uid = 'User ID of the user who originally stored this member.',
+ gid = 'Group ID of the user who originally stored this member.',
+ size = 'Size in bytes.',
+ mtime = 'Time of last modification.',
+ chksum = 'Header checksum.',
+ type = ('File type. type is usually one of these constants: '
+ 'REGTYPE, AREGTYPE, LNKTYPE, SYMTYPE, DIRTYPE, FIFOTYPE, '
+ 'CONTTYPE, CHRTYPE, BLKTYPE, GNUTYPE_SPARSE.'),
+ linkname = ('Name of the target file name, which is only present '
+ 'in TarInfo objects of type LNKTYPE and SYMTYPE.'),
+ uname = 'User name.',
+ gname = 'Group name.',
+ devmajor = 'Device major number.',
+ devminor = 'Device minor number.',
+ offset = 'The tar header starts here.',
+ offset_data = "The file's data starts here.",
+ pax_headers = ('A dictionary containing key-value pairs of an '
+ 'associated pax extended header.'),
+ sparse = 'Sparse member information.',
+ tarfile = None,
+ _sparse_structs = None,
+ _link_target = None,
+ )
def __init__(self, name=""):
"""Construct a TarInfo object. name is the optional name
@@ -772,7 +772,7 @@ class TarInfo(object):
@property
def path(self):
- 'In pax headers, "name" is called "path".'
+ 'In pax headers, "name" is called "path".'
return self.name
@path.setter
@@ -781,7 +781,7 @@ class TarInfo(object):
@property
def linkpath(self):
- 'In pax headers, "linkname" is called "linkpath".'
+ 'In pax headers, "linkname" is called "linkpath".'
return self.linkname
@linkpath.setter
@@ -932,14 +932,14 @@ class TarInfo(object):
"""Return a header block. info is a dictionary with file
information, format must be one of the *_FORMAT constants.
"""
- has_device_fields = info.get("type") in (CHRTYPE, BLKTYPE)
- if has_device_fields:
- devmajor = itn(info.get("devmajor", 0), 8, format)
- devminor = itn(info.get("devminor", 0), 8, format)
- else:
- devmajor = stn("", 8, encoding, errors)
- devminor = stn("", 8, encoding, errors)
-
+ has_device_fields = info.get("type") in (CHRTYPE, BLKTYPE)
+ if has_device_fields:
+ devmajor = itn(info.get("devmajor", 0), 8, format)
+ devminor = itn(info.get("devminor", 0), 8, format)
+ else:
+ devmajor = stn("", 8, encoding, errors)
+ devminor = stn("", 8, encoding, errors)
+
parts = [
stn(info.get("name", ""), 100, encoding, errors),
itn(info.get("mode", 0) & 0o7777, 8, format),
@@ -953,8 +953,8 @@ class TarInfo(object):
info.get("magic", POSIX_MAGIC),
stn(info.get("uname", ""), 32, encoding, errors),
stn(info.get("gname", ""), 32, encoding, errors),
- devmajor,
- devminor,
+ devmajor,
+ devminor,
stn(info.get("prefix", ""), 155, encoding, errors)
]
@@ -1251,8 +1251,8 @@ class TarInfo(object):
length, keyword = match.groups()
length = int(length)
- if length == 0:
- raise InvalidHeaderError("invalid header")
+ if length == 0:
+ raise InvalidHeaderError("invalid header")
value = buf[match.end(2) + 1:match.start(1) + length - 1]
# Normally, we could just use "utf-8" as the encoding and "strict"
@@ -1383,42 +1383,42 @@ class TarInfo(object):
return blocks * BLOCKSIZE
def isreg(self):
- 'Return True if the Tarinfo object is a regular file.'
+ 'Return True if the Tarinfo object is a regular file.'
return self.type in REGULAR_TYPES
-
+
def isfile(self):
- 'Return True if the Tarinfo object is a regular file.'
+ 'Return True if the Tarinfo object is a regular file.'
return self.isreg()
-
+
def isdir(self):
- 'Return True if it is a directory.'
+ 'Return True if it is a directory.'
return self.type == DIRTYPE
-
+
def issym(self):
- 'Return True if it is a symbolic link.'
+ 'Return True if it is a symbolic link.'
return self.type == SYMTYPE
-
+
def islnk(self):
- 'Return True if it is a hard link.'
+ 'Return True if it is a hard link.'
return self.type == LNKTYPE
-
+
def ischr(self):
- 'Return True if it is a character device.'
+ 'Return True if it is a character device.'
return self.type == CHRTYPE
-
+
def isblk(self):
- 'Return True if it is a block device.'
+ 'Return True if it is a block device.'
return self.type == BLKTYPE
-
+
def isfifo(self):
- 'Return True if it is a FIFO.'
+ 'Return True if it is a FIFO.'
return self.type == FIFOTYPE
-
+
def issparse(self):
return self.sparse is not None
-
+
def isdev(self):
- 'Return True if it is one of character device, block device or FIFO.'
+ 'Return True if it is one of character device, block device or FIFO.'
return self.type in (CHRTYPE, BLKTYPE, FIFOTYPE)
# class TarInfo
@@ -1667,12 +1667,12 @@ class TarFile(object):
raise ValueError("mode must be 'r', 'w' or 'x'")
try:
- from gzip import GzipFile
- except ImportError:
+ from gzip import GzipFile
+ except ImportError:
raise CompressionError("gzip module is not available")
try:
- fileobj = GzipFile(name, mode + "b", compresslevel, fileobj)
+ fileobj = GzipFile(name, mode + "b", compresslevel, fileobj)
except OSError:
if fileobj is not None and mode == 'r':
raise ReadError("not a gzip file")
@@ -1700,11 +1700,11 @@ class TarFile(object):
raise ValueError("mode must be 'r', 'w' or 'x'")
try:
- from bz2 import BZ2File
+ from bz2 import BZ2File
except ImportError:
raise CompressionError("bz2 module is not available")
- fileobj = BZ2File(fileobj or name, mode, compresslevel=compresslevel)
+ fileobj = BZ2File(fileobj or name, mode, compresslevel=compresslevel)
try:
t = cls.taropen(name, mode, fileobj, **kwargs)
@@ -1728,15 +1728,15 @@ class TarFile(object):
raise ValueError("mode must be 'r', 'w' or 'x'")
try:
- from lzma import LZMAFile, LZMAError
+ from lzma import LZMAFile, LZMAError
except ImportError:
raise CompressionError("lzma module is not available")
- fileobj = LZMAFile(fileobj or name, mode, preset=preset)
+ fileobj = LZMAFile(fileobj or name, mode, preset=preset)
try:
t = cls.taropen(name, mode, fileobj, **kwargs)
- except (LZMAError, EOFError):
+ except (LZMAError, EOFError):
fileobj.close()
if mode == 'r':
raise ReadError("not an lzma file")
@@ -1836,9 +1836,9 @@ class TarFile(object):
tarinfo = self.tarinfo()
tarinfo.tarfile = self # Not needed
- # Use os.stat or os.lstat, depending on if symlinks shall be resolved.
+ # Use os.stat or os.lstat, depending on if symlinks shall be resolved.
if fileobj is None:
- if not self.dereference:
+ if not self.dereference:
statres = os.lstat(name)
else:
statres = os.stat(name)
@@ -2093,10 +2093,10 @@ class TarFile(object):
def extractfile(self, member):
"""Extract a member from the archive as a file object. `member' may be
- a filename or a TarInfo object. If `member' is a regular file or
- a link, an io.BufferedReader object is returned. For all other
- existing members, None is returned. If `member' does not appear
- in the archive, KeyError is raised.
+ a filename or a TarInfo object. If `member' is a regular file or
+ a link, an io.BufferedReader object is returned. For all other
+ existing members, None is returned. If `member' does not appear
+ in the archive, KeyError is raised.
"""
self._check("r")
@@ -2237,9 +2237,9 @@ class TarFile(object):
try:
# For systems that support symbolic and hard links.
if tarinfo.issym():
- if os.path.lexists(targetpath):
- # Avoid FileExistsError on following os.symlink.
- os.unlink(targetpath)
+ if os.path.lexists(targetpath):
+ # Avoid FileExistsError on following os.symlink.
+ os.unlink(targetpath)
os.symlink(tarinfo.linkname, targetpath)
else:
# See extract().
@@ -2287,10 +2287,10 @@ class TarFile(object):
def chmod(self, tarinfo, targetpath):
"""Set file permissions of targetpath according to tarinfo.
"""
- try:
- os.chmod(targetpath, tarinfo.mode)
- except OSError:
- raise ExtractError("could not change mode")
+ try:
+ os.chmod(targetpath, tarinfo.mode)
+ except OSError:
+ raise ExtractError("could not change mode")
def utime(self, tarinfo, targetpath):
"""Set modification time of targetpath according to tarinfo.
@@ -2345,15 +2345,15 @@ class TarFile(object):
raise ReadError(str(e))
except SubsequentHeaderError as e:
raise ReadError(str(e))
- except Exception as e:
- try:
- import zlib
- if isinstance(e, zlib.error):
- raise ReadError(f'zlib error: {e}')
- else:
- raise e
- except ImportError:
- raise e
+ except Exception as e:
+ try:
+ import zlib
+ if isinstance(e, zlib.error):
+ raise ReadError(f'zlib error: {e}')
+ else:
+ raise e
+ except ImportError:
+ raise e
break
if tarinfo is not None:
@@ -2484,14 +2484,14 @@ class TarFile(object):
def is_tarfile(name):
"""Return True if name points to a tar archive that we
are able to handle, else return False.
-
- 'name' should be a string, file, or file-like object.
+
+ 'name' should be a string, file, or file-like object.
"""
try:
- if hasattr(name, "read"):
- t = open(fileobj=name)
- else:
- t = open(name)
+ if hasattr(name, "read"):
+ t = open(fileobj=name)
+ else:
+ t = open(name)
t.close()
return True
except TarError:
diff --git a/contrib/tools/python3/src/Lib/telnetlib.py b/contrib/tools/python3/src/Lib/telnetlib.py
index 8ce053e881..4996a7dbeb 100644
--- a/contrib/tools/python3/src/Lib/telnetlib.py
+++ b/contrib/tools/python3/src/Lib/telnetlib.py
@@ -231,7 +231,7 @@ class Telnet:
self.host = host
self.port = port
self.timeout = timeout
- sys.audit("telnetlib.Telnet.open", self, host, port)
+ sys.audit("telnetlib.Telnet.open", self, host, port)
self.sock = socket.create_connection((host, port), timeout)
def __del__(self):
@@ -287,7 +287,7 @@ class Telnet:
"""
if IAC in buffer:
buffer = buffer.replace(IAC, IAC+IAC)
- sys.audit("telnetlib.Telnet.write", self, buffer)
+ sys.audit("telnetlib.Telnet.write", self, buffer)
self.msg("send %r", buffer)
self.sock.sendall(buffer)
diff --git a/contrib/tools/python3/src/Lib/tempfile.py b/contrib/tools/python3/src/Lib/tempfile.py
index eafce6f25b..1273e8625c 100644
--- a/contrib/tools/python3/src/Lib/tempfile.py
+++ b/contrib/tools/python3/src/Lib/tempfile.py
@@ -43,8 +43,8 @@ import os as _os
import shutil as _shutil
import errno as _errno
from random import Random as _Random
-import sys as _sys
-import types as _types
+import sys as _sys
+import types as _types
import weakref as _weakref
import _thread
_allocate_lock = _thread.allocate_lock
@@ -75,7 +75,7 @@ _once_lock = _allocate_lock()
def _exists(fn):
try:
- _os.lstat(fn)
+ _os.lstat(fn)
except OSError:
return False
else:
@@ -88,10 +88,10 @@ def _infer_return_type(*args):
for arg in args:
if arg is None:
continue
-
- if isinstance(arg, _os.PathLike):
- arg = _os.fspath(arg)
-
+
+ if isinstance(arg, _os.PathLike):
+ arg = _os.fspath(arg)
+
if isinstance(arg, bytes):
if return_type is str:
raise TypeError("Can't mix bytes and non-bytes in "
@@ -250,7 +250,7 @@ def _mkstemp_inner(dir, pre, suf, flags, output_type):
for seq in range(TMP_MAX):
name = next(names)
file = _os.path.join(dir, pre + name + suf)
- _sys.audit("tempfile.mkstemp", file)
+ _sys.audit("tempfile.mkstemp", file)
try:
fd = _os.open(file, flags, 0o600)
except FileExistsError:
@@ -312,7 +312,7 @@ def mkstemp(suffix=None, prefix=None, dir=None, text=False):
otherwise a default directory is used.
If 'text' is specified and true, the file is opened in text
- mode. Else (the default) the file is opened in binary mode.
+ mode. Else (the default) the file is opened in binary mode.
If any of 'suffix', 'prefix' and 'dir' are not None, they must be the
same type. If they are bytes, the returned name will be bytes; str
@@ -358,7 +358,7 @@ def mkdtemp(suffix=None, prefix=None, dir=None):
for seq in range(TMP_MAX):
name = next(names)
file = _os.path.join(dir, prefix + name + suffix)
- _sys.audit("tempfile.mkdtemp", file)
+ _sys.audit("tempfile.mkdtemp", file)
try:
_os.mkdir(file, 0o700)
except FileExistsError:
@@ -516,7 +516,7 @@ class _TemporaryFileWrapper:
def NamedTemporaryFile(mode='w+b', buffering=-1, encoding=None,
newline=None, suffix=None, prefix=None,
- dir=None, delete=True, *, errors=None):
+ dir=None, delete=True, *, errors=None):
"""Create and return a temporary file.
Arguments:
'prefix', 'suffix', 'dir' -- as for mkstemp.
@@ -525,7 +525,7 @@ def NamedTemporaryFile(mode='w+b', buffering=-1, encoding=None,
'encoding' -- the encoding argument to io.open (default None)
'newline' -- the newline argument to io.open (default None)
'delete' -- whether the file is deleted on close (default True).
- 'errors' -- the errors argument to io.open (default None)
+ 'errors' -- the errors argument to io.open (default None)
The file is created as mkstemp() would do it.
Returns an object with a file-like interface; the name of the file
@@ -545,7 +545,7 @@ def NamedTemporaryFile(mode='w+b', buffering=-1, encoding=None,
(fd, name) = _mkstemp_inner(dir, prefix, suffix, flags, output_type)
try:
file = _io.open(fd, mode, buffering=buffering,
- newline=newline, encoding=encoding, errors=errors)
+ newline=newline, encoding=encoding, errors=errors)
return _TemporaryFileWrapper(file, name, delete)
except BaseException:
@@ -553,7 +553,7 @@ def NamedTemporaryFile(mode='w+b', buffering=-1, encoding=None,
_os.close(fd)
raise
-if _os.name != 'posix' or _sys.platform == 'cygwin':
+if _os.name != 'posix' or _sys.platform == 'cygwin':
# On non-POSIX and Cygwin systems, assume that we cannot unlink a file
# while it is open.
TemporaryFile = NamedTemporaryFile
@@ -566,7 +566,7 @@ else:
def TemporaryFile(mode='w+b', buffering=-1, encoding=None,
newline=None, suffix=None, prefix=None,
- dir=None, *, errors=None):
+ dir=None, *, errors=None):
"""Create and return a temporary file.
Arguments:
'prefix', 'suffix', 'dir' -- as for mkstemp.
@@ -574,7 +574,7 @@ else:
'buffering' -- the buffer size argument to io.open (default -1).
'encoding' -- the encoding argument to io.open (default None)
'newline' -- the newline argument to io.open (default None)
- 'errors' -- the errors argument to io.open (default None)
+ 'errors' -- the errors argument to io.open (default None)
The file is created as mkstemp() would do it.
Returns an object with a file-like interface. The file has no
@@ -608,8 +608,8 @@ else:
else:
try:
return _io.open(fd, mode, buffering=buffering,
- newline=newline, encoding=encoding,
- errors=errors)
+ newline=newline, encoding=encoding,
+ errors=errors)
except:
_os.close(fd)
raise
@@ -619,7 +619,7 @@ else:
try:
_os.unlink(name)
return _io.open(fd, mode, buffering=buffering,
- newline=newline, encoding=encoding, errors=errors)
+ newline=newline, encoding=encoding, errors=errors)
except:
_os.close(fd)
raise
@@ -633,22 +633,22 @@ class SpooledTemporaryFile:
def __init__(self, max_size=0, mode='w+b', buffering=-1,
encoding=None, newline=None,
- suffix=None, prefix=None, dir=None, *, errors=None):
+ suffix=None, prefix=None, dir=None, *, errors=None):
if 'b' in mode:
self._file = _io.BytesIO()
else:
- self._file = _io.TextIOWrapper(_io.BytesIO(),
- encoding=encoding, errors=errors,
- newline=newline)
+ self._file = _io.TextIOWrapper(_io.BytesIO(),
+ encoding=encoding, errors=errors,
+ newline=newline)
self._max_size = max_size
self._rolled = False
self._TemporaryFileArgs = {'mode': mode, 'buffering': buffering,
'suffix': suffix, 'prefix': prefix,
'encoding': encoding, 'newline': newline,
- 'dir': dir, 'errors': errors}
-
- __class_getitem__ = classmethod(_types.GenericAlias)
+ 'dir': dir, 'errors': errors}
+ __class_getitem__ = classmethod(_types.GenericAlias)
+
def _check(self, file):
if self._rolled: return
max_size = self._max_size
@@ -661,12 +661,12 @@ class SpooledTemporaryFile:
newfile = self._file = TemporaryFile(**self._TemporaryFileArgs)
del self._TemporaryFileArgs
- pos = file.tell()
- if hasattr(newfile, 'buffer'):
- newfile.buffer.write(file.detach().getvalue())
- else:
- newfile.write(file.getvalue())
- newfile.seek(pos, 0)
+ pos = file.tell()
+ if hasattr(newfile, 'buffer'):
+ newfile.buffer.write(file.detach().getvalue())
+ else:
+ newfile.write(file.getvalue())
+ newfile.seek(pos, 0)
self._rolled = True
@@ -697,12 +697,12 @@ class SpooledTemporaryFile:
@property
def encoding(self):
- return self._file.encoding
-
- @property
- def errors(self):
- return self._file.errors
+ return self._file.encoding
+ @property
+ def errors(self):
+ return self._file.errors
+
def fileno(self):
self.rollover()
return self._file.fileno()
@@ -729,7 +729,7 @@ class SpooledTemporaryFile:
@property
def newlines(self):
- return self._file.newlines
+ return self._file.newlines
def read(self, *args):
return self._file.read(*args)
@@ -741,7 +741,7 @@ class SpooledTemporaryFile:
return self._file.readlines(*args)
def seek(self, *args):
- return self._file.seek(*args)
+ return self._file.seek(*args)
def tell(self):
return self._file.tell()
@@ -786,38 +786,38 @@ class TemporaryDirectory(object):
warn_message="Implicitly cleaning up {!r}".format(self))
@classmethod
- def _rmtree(cls, name):
- def onerror(func, path, exc_info):
- if issubclass(exc_info[0], PermissionError):
- def resetperms(path):
- try:
- _os.chflags(path, 0)
- except AttributeError:
- pass
- _os.chmod(path, 0o700)
-
- try:
- if path != name:
- resetperms(_os.path.dirname(path))
- resetperms(path)
-
- try:
- _os.unlink(path)
- # PermissionError is raised on FreeBSD for directories
- except (IsADirectoryError, PermissionError):
- cls._rmtree(path)
- except FileNotFoundError:
- pass
- elif issubclass(exc_info[0], FileNotFoundError):
- pass
- else:
- raise
-
- _shutil.rmtree(name, onerror=onerror)
-
- @classmethod
+ def _rmtree(cls, name):
+ def onerror(func, path, exc_info):
+ if issubclass(exc_info[0], PermissionError):
+ def resetperms(path):
+ try:
+ _os.chflags(path, 0)
+ except AttributeError:
+ pass
+ _os.chmod(path, 0o700)
+
+ try:
+ if path != name:
+ resetperms(_os.path.dirname(path))
+ resetperms(path)
+
+ try:
+ _os.unlink(path)
+ # PermissionError is raised on FreeBSD for directories
+ except (IsADirectoryError, PermissionError):
+ cls._rmtree(path)
+ except FileNotFoundError:
+ pass
+ elif issubclass(exc_info[0], FileNotFoundError):
+ pass
+ else:
+ raise
+
+ _shutil.rmtree(name, onerror=onerror)
+
+ @classmethod
def _cleanup(cls, name, warn_message):
- cls._rmtree(name)
+ cls._rmtree(name)
_warnings.warn(warn_message, ResourceWarning)
def __repr__(self):
@@ -831,6 +831,6 @@ class TemporaryDirectory(object):
def cleanup(self):
if self._finalizer.detach():
- self._rmtree(self.name)
-
- __class_getitem__ = classmethod(_types.GenericAlias)
+ self._rmtree(self.name)
+
+ __class_getitem__ = classmethod(_types.GenericAlias)
diff --git a/contrib/tools/python3/src/Lib/textwrap.py b/contrib/tools/python3/src/Lib/textwrap.py
index 30e693c8de..4bbdcf8134 100644
--- a/contrib/tools/python3/src/Lib/textwrap.py
+++ b/contrib/tools/python3/src/Lib/textwrap.py
@@ -420,9 +420,9 @@ def dedent(text):
Note that tabs and spaces are both treated as whitespace, but they
are not equal: the lines " hello" and "\\thello" are
- considered to have no common leading whitespace.
-
- Entirely blank lines are normalized to a newline character.
+ considered to have no common leading whitespace.
+
+ Entirely blank lines are normalized to a newline character.
"""
# Look for the longest leading string of spaces and tabs common to
# all lines.
diff --git a/contrib/tools/python3/src/Lib/threading.py b/contrib/tools/python3/src/Lib/threading.py
index a3cb245ab9..53b7203594 100644
--- a/contrib/tools/python3/src/Lib/threading.py
+++ b/contrib/tools/python3/src/Lib/threading.py
@@ -3,7 +3,7 @@
import os as _os
import sys as _sys
import _thread
-import functools
+import functools
from time import monotonic as _time
from _weakrefset import WeakSet
@@ -27,20 +27,20 @@ __all__ = ['get_ident', 'active_count', 'Condition', 'current_thread',
'enumerate', 'main_thread', 'TIMEOUT_MAX',
'Event', 'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', 'Thread',
'Barrier', 'BrokenBarrierError', 'Timer', 'ThreadError',
- 'setprofile', 'settrace', 'local', 'stack_size',
- 'excepthook', 'ExceptHookArgs']
+ 'setprofile', 'settrace', 'local', 'stack_size',
+ 'excepthook', 'ExceptHookArgs']
# Rename some stuff so "from threading import *" is safe
_start_new_thread = _thread.start_new_thread
_allocate_lock = _thread.allocate_lock
_set_sentinel = _thread._set_sentinel
get_ident = _thread.get_ident
-try:
- get_native_id = _thread.get_native_id
- _HAVE_THREAD_NATIVE_ID = True
- __all__.append('get_native_id')
-except AttributeError:
- _HAVE_THREAD_NATIVE_ID = False
+try:
+ get_native_id = _thread.get_native_id
+ _HAVE_THREAD_NATIVE_ID = True
+ __all__.append('get_native_id')
+except AttributeError:
+ _HAVE_THREAD_NATIVE_ID = False
ThreadError = _thread.error
try:
_CRLock = _thread.RLock
@@ -122,11 +122,11 @@ class _RLock:
hex(id(self))
)
- def _at_fork_reinit(self):
- self._block._at_fork_reinit()
- self._owner = None
- self._count = 0
-
+ def _at_fork_reinit(self):
+ self._block._at_fork_reinit()
+ self._owner = None
+ self._count = 0
+
def acquire(self, blocking=True, timeout=-1):
"""Acquire a lock, blocking or non-blocking.
@@ -249,10 +249,10 @@ class Condition:
pass
self._waiters = _deque()
- def _at_fork_reinit(self):
- self._lock._at_fork_reinit()
- self._waiters.clear()
-
+ def _at_fork_reinit(self):
+ self._lock._at_fork_reinit()
+ self._waiters.clear()
+
def __enter__(self):
return self._lock.__enter__()
@@ -271,7 +271,7 @@ class Condition:
def _is_owned(self):
# Return True if lock is owned by current_thread.
# This method is called only if _lock doesn't have _is_owned().
- if self._lock.acquire(False):
+ if self._lock.acquire(False):
self._lock.release()
return False
else:
@@ -448,19 +448,19 @@ class Semaphore:
__enter__ = acquire
- def release(self, n=1):
- """Release a semaphore, incrementing the internal counter by one or more.
+ def release(self, n=1):
+ """Release a semaphore, incrementing the internal counter by one or more.
When the counter is zero on entry and another thread is waiting for it
to become larger than zero again, wake up that thread.
"""
- if n < 1:
- raise ValueError('n must be one or more')
+ if n < 1:
+ raise ValueError('n must be one or more')
with self._cond:
- self._value += n
- for i in range(n):
- self._cond.notify()
+ self._value += n
+ for i in range(n):
+ self._cond.notify()
def __exit__(self, t, v, tb):
self.release()
@@ -487,8 +487,8 @@ class BoundedSemaphore(Semaphore):
Semaphore.__init__(self, value)
self._initial_value = value
- def release(self, n=1):
- """Release a semaphore, incrementing the internal counter by one or more.
+ def release(self, n=1):
+ """Release a semaphore, incrementing the internal counter by one or more.
When the counter is zero on entry and another thread is waiting for it
to become larger than zero again, wake up that thread.
@@ -497,14 +497,14 @@ class BoundedSemaphore(Semaphore):
raise a ValueError.
"""
- if n < 1:
- raise ValueError('n must be one or more')
+ if n < 1:
+ raise ValueError('n must be one or more')
with self._cond:
- if self._value + n > self._initial_value:
+ if self._value + n > self._initial_value:
raise ValueError("Semaphore released too many times")
- self._value += n
- for i in range(n):
- self._cond.notify()
+ self._value += n
+ for i in range(n):
+ self._cond.notify()
class Event:
@@ -522,9 +522,9 @@ class Event:
self._cond = Condition(Lock())
self._flag = False
- def _at_fork_reinit(self):
- # Private method called by Thread._reset_internal_locks()
- self._cond._at_fork_reinit()
+ def _at_fork_reinit(self):
+ # Private method called by Thread._reset_internal_locks()
+ self._cond._at_fork_reinit()
def is_set(self):
"""Return true if and only if the internal flag is true."""
@@ -608,7 +608,7 @@ class Barrier:
self._action = action
self._timeout = timeout
self._parties = parties
- self._state = 0 # 0 filling, 1 draining, -1 resetting, -2 broken
+ self._state = 0 # 0 filling, 1 draining, -1 resetting, -2 broken
self._count = 0
def wait(self, timeout=None):
@@ -750,35 +750,35 @@ _counter() # Consume 0 so first non-main thread has id 1.
def _newname(template="Thread-%d"):
return template % _counter()
-# Active thread administration.
-#
-# bpo-44422: Use a reentrant lock to allow reentrant calls to functions like
-# threading.enumerate().
-_active_limbo_lock = RLock()
+# Active thread administration.
+#
+# bpo-44422: Use a reentrant lock to allow reentrant calls to functions like
+# threading.enumerate().
+_active_limbo_lock = RLock()
_active = {} # maps thread id to Thread object
_limbo = {}
_dangling = WeakSet()
-
-# Set of Thread._tstate_lock locks of non-daemon threads used by _shutdown()
-# to wait until all Python thread states get deleted:
-# see Thread._set_tstate_lock().
-_shutdown_locks_lock = _allocate_lock()
-_shutdown_locks = set()
-
-def _maintain_shutdown_locks():
- """
- Drop any shutdown locks that don't correspond to running threads anymore.
-
- Calling this from time to time avoids an ever-growing _shutdown_locks
- set when Thread objects are not joined explicitly. See bpo-37788.
-
- This must be called with _shutdown_locks_lock acquired.
- """
- # If a lock was released, the corresponding thread has exited
- to_remove = [lock for lock in _shutdown_locks if not lock.locked()]
- _shutdown_locks.difference_update(to_remove)
-
-
+
+# Set of Thread._tstate_lock locks of non-daemon threads used by _shutdown()
+# to wait until all Python thread states get deleted:
+# see Thread._set_tstate_lock().
+_shutdown_locks_lock = _allocate_lock()
+_shutdown_locks = set()
+
+def _maintain_shutdown_locks():
+ """
+ Drop any shutdown locks that don't correspond to running threads anymore.
+
+ Calling this from time to time avoids an ever-growing _shutdown_locks
+ set when Thread objects are not joined explicitly. See bpo-37788.
+
+ This must be called with _shutdown_locks_lock acquired.
+ """
+ # If a lock was released, the corresponding thread has exited
+ to_remove = [lock for lock in _shutdown_locks if not lock.locked()]
+ _shutdown_locks.difference_update(to_remove)
+
+
# Main class for threads
class Thread:
@@ -827,29 +827,29 @@ class Thread:
else:
self._daemonic = current_thread().daemon
self._ident = None
- if _HAVE_THREAD_NATIVE_ID:
- self._native_id = None
+ if _HAVE_THREAD_NATIVE_ID:
+ self._native_id = None
self._tstate_lock = None
self._started = Event()
self._is_stopped = False
self._initialized = True
- # Copy of sys.stderr used by self._invoke_excepthook()
+ # Copy of sys.stderr used by self._invoke_excepthook()
self._stderr = _sys.stderr
- self._invoke_excepthook = _make_invoke_excepthook()
+ self._invoke_excepthook = _make_invoke_excepthook()
# For debugging and _after_fork()
_dangling.add(self)
def _reset_internal_locks(self, is_alive):
# private! Called by _after_fork() to reset our internal locks as
# they may be in an invalid state leading to a deadlock or crash.
- self._started._at_fork_reinit()
+ self._started._at_fork_reinit()
if is_alive:
- # bpo-42350: If the fork happens when the thread is already stopped
- # (ex: after threading._shutdown() has been called), _tstate_lock
- # is None. Do nothing in this case.
- if self._tstate_lock is not None:
- self._tstate_lock._at_fork_reinit()
- self._tstate_lock.acquire()
+ # bpo-42350: If the fork happens when the thread is already stopped
+ # (ex: after threading._shutdown() has been called), _tstate_lock
+ # is None. Do nothing in this case.
+ if self._tstate_lock is not None:
+ self._tstate_lock._at_fork_reinit()
+ self._tstate_lock.acquire()
else:
# The thread isn't alive after fork: it doesn't have a tstate
# anymore.
@@ -885,7 +885,7 @@ class Thread:
if self._started.is_set():
raise RuntimeError("threads can only be started once")
-
+
with _active_limbo_lock:
_limbo[self] = self
try:
@@ -936,10 +936,10 @@ class Thread:
def _set_ident(self):
self._ident = get_ident()
- if _HAVE_THREAD_NATIVE_ID:
- def _set_native_id(self):
- self._native_id = get_native_id()
-
+ if _HAVE_THREAD_NATIVE_ID:
+ def _set_native_id(self):
+ self._native_id = get_native_id()
+
def _set_tstate_lock(self):
"""
Set a lock object which will be released by the interpreter when
@@ -948,17 +948,17 @@ class Thread:
self._tstate_lock = _set_sentinel()
self._tstate_lock.acquire()
- if not self.daemon:
- with _shutdown_locks_lock:
- _maintain_shutdown_locks()
- _shutdown_locks.add(self._tstate_lock)
-
+ if not self.daemon:
+ with _shutdown_locks_lock:
+ _maintain_shutdown_locks()
+ _shutdown_locks.add(self._tstate_lock)
+
def _bootstrap_inner(self):
try:
self._set_ident()
self._set_tstate_lock()
- if _HAVE_THREAD_NATIVE_ID:
- self._set_native_id()
+ if _HAVE_THREAD_NATIVE_ID:
+ self._set_native_id()
self._started.set()
with _active_limbo_lock:
_active[self._ident] = self
@@ -972,7 +972,7 @@ class Thread:
try:
self.run()
except:
- self._invoke_excepthook(self)
+ self._invoke_excepthook(self)
finally:
with _active_limbo_lock:
try:
@@ -1004,10 +1004,10 @@ class Thread:
assert not lock.locked()
self._is_stopped = True
self._tstate_lock = None
- if not self.daemon:
- with _shutdown_locks_lock:
- # Remove our lock and other released locks from _shutdown_locks
- _maintain_shutdown_locks()
+ if not self.daemon:
+ with _shutdown_locks_lock:
+ # Remove our lock and other released locks from _shutdown_locks
+ _maintain_shutdown_locks()
def _delete(self):
"Remove current thread from the dict of currently running threads."
@@ -1064,25 +1064,25 @@ class Thread:
# If the lock is acquired, the C code is done, and self._stop() is
# called. That sets ._is_stopped to True, and ._tstate_lock to None.
lock = self._tstate_lock
- if lock is None:
- # already determined that the C code is done
+ if lock is None:
+ # already determined that the C code is done
assert self._is_stopped
- return
-
- try:
- if lock.acquire(block, timeout):
- lock.release()
- self._stop()
- except:
- if lock.locked():
- # bpo-45274: lock.acquire() acquired the lock, but the function
- # was interrupted with an exception before reaching the
- # lock.release(). It can happen if a signal handler raises an
- # exception, like CTRL+C which raises KeyboardInterrupt.
- lock.release()
- self._stop()
- raise
-
+ return
+
+ try:
+ if lock.acquire(block, timeout):
+ lock.release()
+ self._stop()
+ except:
+ if lock.locked():
+ # bpo-45274: lock.acquire() acquired the lock, but the function
+ # was interrupted with an exception before reaching the
+ # lock.release(). It can happen if a signal handler raises an
+ # exception, like CTRL+C which raises KeyboardInterrupt.
+ lock.release()
+ self._stop()
+ raise
+
@property
def name(self):
"""A string used for identification purposes only.
@@ -1111,24 +1111,24 @@ class Thread:
assert self._initialized, "Thread.__init__() not called"
return self._ident
- if _HAVE_THREAD_NATIVE_ID:
- @property
- def native_id(self):
- """Native integral thread ID of this thread, or None if it has not been started.
-
- This is a non-negative integer. See the get_native_id() function.
- This represents the Thread ID as reported by the kernel.
-
- """
- assert self._initialized, "Thread.__init__() not called"
- return self._native_id
-
+ if _HAVE_THREAD_NATIVE_ID:
+ @property
+ def native_id(self):
+ """Native integral thread ID of this thread, or None if it has not been started.
+
+ This is a non-negative integer. See the get_native_id() function.
+ This represents the Thread ID as reported by the kernel.
+
+ """
+ assert self._initialized, "Thread.__init__() not called"
+ return self._native_id
+
def is_alive(self):
"""Return whether the thread is alive.
This method returns True just before the run() method starts until just
- after the run() method terminates. See also the module function
- enumerate().
+ after the run() method terminates. See also the module function
+ enumerate().
"""
assert self._initialized, "Thread.__init__() not called"
@@ -1146,7 +1146,7 @@ class Thread:
main thread is not a daemon thread and therefore all threads created in
the main thread default to daemon = False.
- The entire Python program exits when only daemon threads are left.
+ The entire Python program exits when only daemon threads are left.
"""
assert self._initialized, "Thread.__init__() not called"
@@ -1172,104 +1172,104 @@ class Thread:
def setName(self, name):
self.name = name
-
-try:
- from _thread import (_excepthook as excepthook,
- _ExceptHookArgs as ExceptHookArgs)
-except ImportError:
- # Simple Python implementation if _thread._excepthook() is not available
- from traceback import print_exception as _print_exception
- from collections import namedtuple
-
- _ExceptHookArgs = namedtuple(
- 'ExceptHookArgs',
- 'exc_type exc_value exc_traceback thread')
-
- def ExceptHookArgs(args):
- return _ExceptHookArgs(*args)
-
- def excepthook(args, /):
- """
- Handle uncaught Thread.run() exception.
- """
- if args.exc_type == SystemExit:
- # silently ignore SystemExit
- return
-
- if _sys is not None and _sys.stderr is not None:
- stderr = _sys.stderr
- elif args.thread is not None:
- stderr = args.thread._stderr
- if stderr is None:
- # do nothing if sys.stderr is None and sys.stderr was None
- # when the thread was created
- return
- else:
- # do nothing if sys.stderr is None and args.thread is None
- return
-
- if args.thread is not None:
- name = args.thread.name
- else:
- name = get_ident()
- print(f"Exception in thread {name}:",
- file=stderr, flush=True)
- _print_exception(args.exc_type, args.exc_value, args.exc_traceback,
- file=stderr)
- stderr.flush()
-
-
-def _make_invoke_excepthook():
- # Create a local namespace to ensure that variables remain alive
- # when _invoke_excepthook() is called, even if it is called late during
- # Python shutdown. It is mostly needed for daemon threads.
-
- old_excepthook = excepthook
- old_sys_excepthook = _sys.excepthook
- if old_excepthook is None:
- raise RuntimeError("threading.excepthook is None")
- if old_sys_excepthook is None:
- raise RuntimeError("sys.excepthook is None")
-
- sys_exc_info = _sys.exc_info
- local_print = print
- local_sys = _sys
-
- def invoke_excepthook(thread):
- global excepthook
- try:
- hook = excepthook
- if hook is None:
- hook = old_excepthook
-
- args = ExceptHookArgs([*sys_exc_info(), thread])
-
- hook(args)
- except Exception as exc:
- exc.__suppress_context__ = True
- del exc
-
- if local_sys is not None and local_sys.stderr is not None:
- stderr = local_sys.stderr
- else:
- stderr = thread._stderr
-
- local_print("Exception in threading.excepthook:",
- file=stderr, flush=True)
-
- if local_sys is not None and local_sys.excepthook is not None:
- sys_excepthook = local_sys.excepthook
- else:
- sys_excepthook = old_sys_excepthook
-
- sys_excepthook(*sys_exc_info())
- finally:
- # Break reference cycle (exception stored in a variable)
- args = None
-
- return invoke_excepthook
-
-
+
+try:
+ from _thread import (_excepthook as excepthook,
+ _ExceptHookArgs as ExceptHookArgs)
+except ImportError:
+ # Simple Python implementation if _thread._excepthook() is not available
+ from traceback import print_exception as _print_exception
+ from collections import namedtuple
+
+ _ExceptHookArgs = namedtuple(
+ 'ExceptHookArgs',
+ 'exc_type exc_value exc_traceback thread')
+
+ def ExceptHookArgs(args):
+ return _ExceptHookArgs(*args)
+
+ def excepthook(args, /):
+ """
+ Handle uncaught Thread.run() exception.
+ """
+ if args.exc_type == SystemExit:
+ # silently ignore SystemExit
+ return
+
+ if _sys is not None and _sys.stderr is not None:
+ stderr = _sys.stderr
+ elif args.thread is not None:
+ stderr = args.thread._stderr
+ if stderr is None:
+ # do nothing if sys.stderr is None and sys.stderr was None
+ # when the thread was created
+ return
+ else:
+ # do nothing if sys.stderr is None and args.thread is None
+ return
+
+ if args.thread is not None:
+ name = args.thread.name
+ else:
+ name = get_ident()
+ print(f"Exception in thread {name}:",
+ file=stderr, flush=True)
+ _print_exception(args.exc_type, args.exc_value, args.exc_traceback,
+ file=stderr)
+ stderr.flush()
+
+
+def _make_invoke_excepthook():
+ # Create a local namespace to ensure that variables remain alive
+ # when _invoke_excepthook() is called, even if it is called late during
+ # Python shutdown. It is mostly needed for daemon threads.
+
+ old_excepthook = excepthook
+ old_sys_excepthook = _sys.excepthook
+ if old_excepthook is None:
+ raise RuntimeError("threading.excepthook is None")
+ if old_sys_excepthook is None:
+ raise RuntimeError("sys.excepthook is None")
+
+ sys_exc_info = _sys.exc_info
+ local_print = print
+ local_sys = _sys
+
+ def invoke_excepthook(thread):
+ global excepthook
+ try:
+ hook = excepthook
+ if hook is None:
+ hook = old_excepthook
+
+ args = ExceptHookArgs([*sys_exc_info(), thread])
+
+ hook(args)
+ except Exception as exc:
+ exc.__suppress_context__ = True
+ del exc
+
+ if local_sys is not None and local_sys.stderr is not None:
+ stderr = local_sys.stderr
+ else:
+ stderr = thread._stderr
+
+ local_print("Exception in threading.excepthook:",
+ file=stderr, flush=True)
+
+ if local_sys is not None and local_sys.excepthook is not None:
+ sys_excepthook = local_sys.excepthook
+ else:
+ sys_excepthook = old_sys_excepthook
+
+ sys_excepthook(*sys_exc_info())
+ finally:
+ # Break reference cycle (exception stored in a variable)
+ args = None
+
+ return invoke_excepthook
+
+
# The timer class was contributed by Itamar Shtull-Trauring
class Timer(Thread):
@@ -1309,8 +1309,8 @@ class _MainThread(Thread):
self._set_tstate_lock()
self._started.set()
self._set_ident()
- if _HAVE_THREAD_NATIVE_ID:
- self._set_native_id()
+ if _HAVE_THREAD_NATIVE_ID:
+ self._set_native_id()
with _active_limbo_lock:
_active[self._ident] = self
@@ -1330,8 +1330,8 @@ class _DummyThread(Thread):
self._started.set()
self._set_ident()
- if _HAVE_THREAD_NATIVE_ID:
- self._set_native_id()
+ if _HAVE_THREAD_NATIVE_ID:
+ self._set_native_id()
with _active_limbo_lock:
_active[self._ident] = self
@@ -1389,27 +1389,27 @@ def enumerate():
with _active_limbo_lock:
return list(_active.values()) + list(_limbo.values())
-
-_threading_atexits = []
-_SHUTTING_DOWN = False
-
-def _register_atexit(func, *arg, **kwargs):
- """CPython internal: register *func* to be called before joining threads.
-
- The registered *func* is called with its arguments just before all
- non-daemon threads are joined in `_shutdown()`. It provides a similar
- purpose to `atexit.register()`, but its functions are called prior to
- threading shutdown instead of interpreter shutdown.
-
- For similarity to atexit, the registered functions are called in reverse.
- """
- if _SHUTTING_DOWN:
- raise RuntimeError("can't register atexit after shutdown")
-
- call = functools.partial(func, *arg, **kwargs)
- _threading_atexits.append(call)
-
-
+
+_threading_atexits = []
+_SHUTTING_DOWN = False
+
+def _register_atexit(func, *arg, **kwargs):
+ """CPython internal: register *func* to be called before joining threads.
+
+ The registered *func* is called with its arguments just before all
+ non-daemon threads are joined in `_shutdown()`. It provides a similar
+ purpose to `atexit.register()`, but its functions are called prior to
+ threading shutdown instead of interpreter shutdown.
+
+ For similarity to atexit, the registered functions are called in reverse.
+ """
+ if _SHUTTING_DOWN:
+ raise RuntimeError("can't register atexit after shutdown")
+
+ call = functools.partial(func, *arg, **kwargs)
+ _threading_atexits.append(call)
+
+
from _thread import stack_size
# Create the main thread object,
@@ -1419,9 +1419,9 @@ from _thread import stack_size
_main_thread = _MainThread()
def _shutdown():
- """
- Wait until the Python thread state of all non-daemon threads get deleted.
- """
+ """
+ Wait until the Python thread state of all non-daemon threads get deleted.
+ """
# Obscure: other threads may be waiting to join _main_thread. That's
# dubious, but some code does it. We can't wait for C code to release
# the main thread's tstate_lock - that won't happen until the interpreter
@@ -1430,50 +1430,50 @@ def _shutdown():
if _main_thread._is_stopped:
# _shutdown() was already called
return
-
- global _SHUTTING_DOWN
- _SHUTTING_DOWN = True
-
- # Call registered threading atexit functions before threads are joined.
- # Order is reversed, similar to atexit.
- for atexit_call in reversed(_threading_atexits):
- atexit_call()
-
- # Main thread
- if _main_thread.ident == get_ident():
- tlock = _main_thread._tstate_lock
- # The main thread isn't finished yet, so its thread state lock can't
- # have been released.
- assert tlock is not None
- assert tlock.locked()
- tlock.release()
- _main_thread._stop()
- else:
- # bpo-1596321: _shutdown() must be called in the main thread.
- # If the threading module was not imported by the main thread,
- # _main_thread is the thread which imported the threading module.
- # In this case, ignore _main_thread, similar behavior than for threads
- # spawned by C libraries or using _thread.start_new_thread().
- pass
-
- # Join all non-deamon threads
- while True:
- with _shutdown_locks_lock:
- locks = list(_shutdown_locks)
- _shutdown_locks.clear()
-
- if not locks:
- break
-
- for lock in locks:
- # mimic Thread.join()
- lock.acquire()
- lock.release()
-
- # new threads can be spawned while we were waiting for the other
- # threads to complete
-
-
+
+ global _SHUTTING_DOWN
+ _SHUTTING_DOWN = True
+
+ # Call registered threading atexit functions before threads are joined.
+ # Order is reversed, similar to atexit.
+ for atexit_call in reversed(_threading_atexits):
+ atexit_call()
+
+ # Main thread
+ if _main_thread.ident == get_ident():
+ tlock = _main_thread._tstate_lock
+ # The main thread isn't finished yet, so its thread state lock can't
+ # have been released.
+ assert tlock is not None
+ assert tlock.locked()
+ tlock.release()
+ _main_thread._stop()
+ else:
+ # bpo-1596321: _shutdown() must be called in the main thread.
+ # If the threading module was not imported by the main thread,
+ # _main_thread is the thread which imported the threading module.
+ # In this case, ignore _main_thread, similar behavior than for threads
+ # spawned by C libraries or using _thread.start_new_thread().
+ pass
+
+ # Join all non-deamon threads
+ while True:
+ with _shutdown_locks_lock:
+ locks = list(_shutdown_locks)
+ _shutdown_locks.clear()
+
+ if not locks:
+ break
+
+ for lock in locks:
+ # mimic Thread.join()
+ lock.acquire()
+ lock.release()
+
+ # new threads can be spawned while we were waiting for the other
+ # threads to complete
+
+
def main_thread():
"""Return the main thread object.
@@ -1498,26 +1498,26 @@ def _after_fork():
# Reset _active_limbo_lock, in case we forked while the lock was held
# by another (non-forked) thread. http://bugs.python.org/issue874900
global _active_limbo_lock, _main_thread
- global _shutdown_locks_lock, _shutdown_locks
- _active_limbo_lock = RLock()
+ global _shutdown_locks_lock, _shutdown_locks
+ _active_limbo_lock = RLock()
# fork() only copied the current thread; clear references to others.
new_active = {}
-
- try:
- current = _active[get_ident()]
- except KeyError:
- # fork() was called in a thread which was not spawned
- # by threading.Thread. For example, a thread spawned
- # by thread.start_new_thread().
- current = _MainThread()
-
+
+ try:
+ current = _active[get_ident()]
+ except KeyError:
+ # fork() was called in a thread which was not spawned
+ # by threading.Thread. For example, a thread spawned
+ # by thread.start_new_thread().
+ current = _MainThread()
+
_main_thread = current
-
- # reset _shutdown() locks: threads re-register their _tstate_lock below
- _shutdown_locks_lock = _allocate_lock()
- _shutdown_locks = set()
-
+
+ # reset _shutdown() locks: threads re-register their _tstate_lock below
+ _shutdown_locks_lock = _allocate_lock()
+ _shutdown_locks = set()
+
with _active_limbo_lock:
# Dangling thread instances must still have their locks reset,
# because someone may join() them.
diff --git a/contrib/tools/python3/src/Lib/timeit.py b/contrib/tools/python3/src/Lib/timeit.py
index 6c3ec01067..418e4e9404 100644
--- a/contrib/tools/python3/src/Lib/timeit.py
+++ b/contrib/tools/python3/src/Lib/timeit.py
@@ -29,8 +29,8 @@ argument in quotes and using leading spaces. Multiple -s options are
treated similarly.
If -n is not given, a suitable number of loops is calculated by trying
-increasing numbers from the sequence 1, 2, 5, 10, 20, 50, ... until the
-total time is at least 0.2 seconds.
+increasing numbers from the sequence 1, 2, 5, 10, 20, 50, ... until the
+total time is at least 0.2 seconds.
Note: there is a certain baseline overhead associated with executing a
pass statement. It differs between versions. The code here doesn't try
diff --git a/contrib/tools/python3/src/Lib/token.py b/contrib/tools/python3/src/Lib/token.py
index 493bf04265..e8286c3bf0 100644
--- a/contrib/tools/python3/src/Lib/token.py
+++ b/contrib/tools/python3/src/Lib/token.py
@@ -1,5 +1,5 @@
-"""Token constants."""
-# Auto-generated by Tools/scripts/generate_token.py
+"""Token constants."""
+# Auto-generated by Tools/scripts/generate_token.py
__all__ = ['tok_name', 'ISTERMINAL', 'ISNONTERMINAL', 'ISEOF']
@@ -56,18 +56,18 @@ AT = 49
ATEQUAL = 50
RARROW = 51
ELLIPSIS = 52
-COLONEQUAL = 53
-OP = 54
-AWAIT = 55
-ASYNC = 56
-TYPE_IGNORE = 57
-TYPE_COMMENT = 58
+COLONEQUAL = 53
+OP = 54
+AWAIT = 55
+ASYNC = 56
+TYPE_IGNORE = 57
+TYPE_COMMENT = 58
# These aren't used by the C tokenizer but are needed for tokenize.py
-ERRORTOKEN = 59
-COMMENT = 60
-NL = 61
-ENCODING = 62
-N_TOKENS = 63
+ERRORTOKEN = 59
+COMMENT = 60
+NL = 61
+ENCODING = 62
+N_TOKENS = 63
# Special definitions for cooperation with parser
NT_OFFSET = 256
@@ -76,56 +76,56 @@ tok_name = {value: name
if isinstance(value, int) and not name.startswith('_')}
__all__.extend(tok_name.values())
-EXACT_TOKEN_TYPES = {
- '!=': NOTEQUAL,
- '%': PERCENT,
- '%=': PERCENTEQUAL,
- '&': AMPER,
- '&=': AMPEREQUAL,
- '(': LPAR,
- ')': RPAR,
- '*': STAR,
- '**': DOUBLESTAR,
- '**=': DOUBLESTAREQUAL,
- '*=': STAREQUAL,
- '+': PLUS,
- '+=': PLUSEQUAL,
- ',': COMMA,
- '-': MINUS,
- '-=': MINEQUAL,
- '->': RARROW,
- '.': DOT,
- '...': ELLIPSIS,
- '/': SLASH,
- '//': DOUBLESLASH,
- '//=': DOUBLESLASHEQUAL,
- '/=': SLASHEQUAL,
- ':': COLON,
- ':=': COLONEQUAL,
- ';': SEMI,
- '<': LESS,
- '<<': LEFTSHIFT,
- '<<=': LEFTSHIFTEQUAL,
- '<=': LESSEQUAL,
- '=': EQUAL,
- '==': EQEQUAL,
- '>': GREATER,
- '>=': GREATEREQUAL,
- '>>': RIGHTSHIFT,
- '>>=': RIGHTSHIFTEQUAL,
- '@': AT,
- '@=': ATEQUAL,
- '[': LSQB,
- ']': RSQB,
- '^': CIRCUMFLEX,
- '^=': CIRCUMFLEXEQUAL,
- '{': LBRACE,
- '|': VBAR,
- '|=': VBAREQUAL,
- '}': RBRACE,
- '~': TILDE,
-}
-
+EXACT_TOKEN_TYPES = {
+ '!=': NOTEQUAL,
+ '%': PERCENT,
+ '%=': PERCENTEQUAL,
+ '&': AMPER,
+ '&=': AMPEREQUAL,
+ '(': LPAR,
+ ')': RPAR,
+ '*': STAR,
+ '**': DOUBLESTAR,
+ '**=': DOUBLESTAREQUAL,
+ '*=': STAREQUAL,
+ '+': PLUS,
+ '+=': PLUSEQUAL,
+ ',': COMMA,
+ '-': MINUS,
+ '-=': MINEQUAL,
+ '->': RARROW,
+ '.': DOT,
+ '...': ELLIPSIS,
+ '/': SLASH,
+ '//': DOUBLESLASH,
+ '//=': DOUBLESLASHEQUAL,
+ '/=': SLASHEQUAL,
+ ':': COLON,
+ ':=': COLONEQUAL,
+ ';': SEMI,
+ '<': LESS,
+ '<<': LEFTSHIFT,
+ '<<=': LEFTSHIFTEQUAL,
+ '<=': LESSEQUAL,
+ '=': EQUAL,
+ '==': EQEQUAL,
+ '>': GREATER,
+ '>=': GREATEREQUAL,
+ '>>': RIGHTSHIFT,
+ '>>=': RIGHTSHIFTEQUAL,
+ '@': AT,
+ '@=': ATEQUAL,
+ '[': LSQB,
+ ']': RSQB,
+ '^': CIRCUMFLEX,
+ '^=': CIRCUMFLEXEQUAL,
+ '{': LBRACE,
+ '|': VBAR,
+ '|=': VBAREQUAL,
+ '}': RBRACE,
+ '~': TILDE,
+}
+
def ISTERMINAL(x):
return x < NT_OFFSET
diff --git a/contrib/tools/python3/src/Lib/tokenize.py b/contrib/tools/python3/src/Lib/tokenize.py
index a782f6250d..98a435bfa1 100644
--- a/contrib/tools/python3/src/Lib/tokenize.py
+++ b/contrib/tools/python3/src/Lib/tokenize.py
@@ -32,13 +32,13 @@ import itertools as _itertools
import re
import sys
from token import *
-from token import EXACT_TOKEN_TYPES
+from token import EXACT_TOKEN_TYPES
cookie_re = re.compile(r'^[ \t\f]*#.*?coding[:=][ \t]*([-\w.]+)', re.ASCII)
blank_re = re.compile(br'^[ \t\f]*(?:[#\r\n]|$)', re.ASCII)
import token
-__all__ = token.__all__ + ["tokenize", "generate_tokens", "detect_encoding",
+__all__ = token.__all__ + ["tokenize", "generate_tokens", "detect_encoding",
"untokenize", "TokenInfo"]
del token
@@ -82,7 +82,7 @@ Number = group(Imagnumber, Floatnumber, Intnumber)
# Return the empty string, plus all of the valid string prefixes.
def _all_string_prefixes():
# The valid string prefixes. Only contain the lower case versions,
- # and don't contain any permutations (include 'fr', but not
+ # and don't contain any permutations (include 'fr', but not
# 'rf'). The various permutations will be generated.
_valid_string_prefixes = ['b', 'r', 'u', 'f', 'br', 'fr']
# if we add binary f-strings, add: ['fb', 'fbr']
@@ -115,11 +115,11 @@ Triple = group(StringPrefix + "'''", StringPrefix + '"""')
String = group(StringPrefix + r"'[^\n'\\]*(?:\\.[^\n'\\]*)*'",
StringPrefix + r'"[^\n"\\]*(?:\\.[^\n"\\]*)*"')
-# Sorting in reverse order puts the long operators before their prefixes.
-# Otherwise if = came before ==, == would get recognized as two instances
-# of =.
-Special = group(*map(re.escape, sorted(EXACT_TOKEN_TYPES, reverse=True)))
-Funny = group(r'\r?\n', Special)
+# Sorting in reverse order puts the long operators before their prefixes.
+# Otherwise if = came before ==, == would get recognized as two instances
+# of =.
+Special = group(*map(re.escape, sorted(EXACT_TOKEN_TYPES, reverse=True)))
+Funny = group(r'\r?\n', Special)
PlainToken = group(Number, Funny, String, Name)
Token = Ignore + PlainToken
@@ -223,7 +223,7 @@ class Untokenizer:
startline = token[0] in (NEWLINE, NL)
prevstring = False
- for tok in _itertools.chain([token], iterable):
+ for tok in _itertools.chain([token], iterable):
toknum, tokval = tok[:2]
if toknum == ENCODING:
self.encoding = tokval
@@ -415,15 +415,15 @@ def tokenize(readline):
column where the token begins in the source; a 2-tuple (erow, ecol) of
ints specifying the row and column where the token ends in the source;
and the line on which the token was found. The line passed is the
- physical line.
+ physical line.
The first token sequence will always be an ENCODING token
which tells you which encoding was used to decode the bytes stream.
"""
encoding, consumed = detect_encoding(readline)
- empty = _itertools.repeat(b"")
- rl_gen = _itertools.chain(consumed, iter(readline, b""), empty)
- return _tokenize(rl_gen.__next__, encoding)
+ empty = _itertools.repeat(b"")
+ rl_gen = _itertools.chain(consumed, iter(readline, b""), empty)
+ return _tokenize(rl_gen.__next__, encoding)
def _tokenize(readline, encoding):
@@ -530,7 +530,7 @@ def _tokenize(readline, encoding):
continue
token, initial = line[start:end], line[start]
- if (initial in numchars or # ordinary number
+ if (initial in numchars or # ordinary number
(initial == '.' and token != '.' and token != '...')):
yield TokenInfo(NUMBER, token, spos, epos, line)
elif initial in '\r\n':
@@ -602,7 +602,7 @@ def _tokenize(readline, encoding):
pos += 1
# Add an implicit NEWLINE if the input doesn't end in one
- if last_line and last_line[-1] not in '\r\n' and not last_line.strip().startswith("#"):
+ if last_line and last_line[-1] not in '\r\n' and not last_line.strip().startswith("#"):
yield TokenInfo(NEWLINE, '', (lnum - 1, len(last_line)), (lnum - 1, len(last_line) + 1), '')
for indent in indents[1:]: # pop remaining indent levels
yield TokenInfo(DEDENT, '', (lnum, 0), (lnum, 0), '')
@@ -610,11 +610,11 @@ def _tokenize(readline, encoding):
def generate_tokens(readline):
- """Tokenize a source reading Python code as unicode strings.
-
- This has the same API as tokenize(), except that it expects the *readline*
- callable to return str objects instead of bytes.
- """
+ """Tokenize a source reading Python code as unicode strings.
+
+ This has the same API as tokenize(), except that it expects the *readline*
+ callable to return str objects instead of bytes.
+ """
return _tokenize(readline, None)
def main():
@@ -622,8 +622,8 @@ def main():
# Helper error handling routines
def perror(message):
- sys.stderr.write(message)
- sys.stderr.write('\n')
+ sys.stderr.write(message)
+ sys.stderr.write('\n')
def error(message, filename=None, location=None):
if location:
diff --git a/contrib/tools/python3/src/Lib/trace.py b/contrib/tools/python3/src/Lib/trace.py
index c505d8bc72..232dfc2f10 100644
--- a/contrib/tools/python3/src/Lib/trace.py
+++ b/contrib/tools/python3/src/Lib/trace.py
@@ -52,7 +52,7 @@ __all__ = ['Trace', 'CoverageResults']
import linecache
import os
import sys
-import sysconfig
+import sysconfig
import token
import tokenize
import inspect
@@ -287,9 +287,9 @@ class CoverageResults:
if self.outfile:
# try and store counts and module info into self.outfile
try:
- with open(self.outfile, 'wb') as f:
- pickle.dump((self.counts, self.calledfuncs, self.callers),
- f, 1)
+ with open(self.outfile, 'wb') as f:
+ pickle.dump((self.counts, self.calledfuncs, self.callers),
+ f, 1)
except OSError as err:
print("Can't save counts files because %s" % err, file=sys.stderr)
@@ -453,7 +453,7 @@ class Trace:
sys.settrace(None)
threading.settrace(None)
- def runfunc(self, func, /, *args, **kw):
+ def runfunc(self, func, /, *args, **kw):
result = None
if not self.donothing:
sys.settrace(self.globaltrace)
@@ -652,9 +652,9 @@ def main():
help='Ignore files in the given directory '
'(multiple directories can be joined by os.pathsep).')
- parser.add_argument('--module', action='store_true', default=False,
- help='Trace a module. ')
- parser.add_argument('progname', nargs='?',
+ parser.add_argument('--module', action='store_true', default=False,
+ help='Trace a module. ')
+ parser.add_argument('progname', nargs='?',
help='file to run as main program')
parser.add_argument('arguments', nargs=argparse.REMAINDER,
help='arguments to the program')
@@ -662,8 +662,8 @@ def main():
opts = parser.parse_args()
if opts.ignore_dir:
- _prefix = sysconfig.get_path("stdlib")
- _exec_prefix = sysconfig.get_path("platstdlib")
+ _prefix = sysconfig.get_path("stdlib")
+ _exec_prefix = sysconfig.get_path("platstdlib")
def parse_ignore_dir(s):
s = os.path.expanduser(os.path.expandvars(s))
@@ -691,40 +691,40 @@ def main():
if opts.summary and not opts.count:
parser.error('--summary can only be used with --count or --report')
- if opts.progname is None:
- parser.error('progname is missing: required with the main options')
+ if opts.progname is None:
+ parser.error('progname is missing: required with the main options')
t = Trace(opts.count, opts.trace, countfuncs=opts.listfuncs,
countcallers=opts.trackcalls, ignoremods=opts.ignore_module,
ignoredirs=opts.ignore_dir, infile=opts.file,
outfile=opts.file, timing=opts.timing)
try:
- if opts.module:
- import runpy
- module_name = opts.progname
- mod_name, mod_spec, code = runpy._get_module_details(module_name)
- sys.argv = [code.co_filename, *opts.arguments]
- globs = {
- '__name__': '__main__',
- '__file__': code.co_filename,
- '__package__': mod_spec.parent,
- '__loader__': mod_spec.loader,
- '__spec__': mod_spec,
- '__cached__': None,
- }
- else:
- sys.argv = [opts.progname, *opts.arguments]
- sys.path[0] = os.path.dirname(opts.progname)
-
- with open(opts.progname, 'rb') as fp:
- code = compile(fp.read(), opts.progname, 'exec')
- # try to emulate __main__ namespace as much as possible
- globs = {
- '__file__': opts.progname,
- '__name__': '__main__',
- '__package__': None,
- '__cached__': None,
- }
+ if opts.module:
+ import runpy
+ module_name = opts.progname
+ mod_name, mod_spec, code = runpy._get_module_details(module_name)
+ sys.argv = [code.co_filename, *opts.arguments]
+ globs = {
+ '__name__': '__main__',
+ '__file__': code.co_filename,
+ '__package__': mod_spec.parent,
+ '__loader__': mod_spec.loader,
+ '__spec__': mod_spec,
+ '__cached__': None,
+ }
+ else:
+ sys.argv = [opts.progname, *opts.arguments]
+ sys.path[0] = os.path.dirname(opts.progname)
+
+ with open(opts.progname, 'rb') as fp:
+ code = compile(fp.read(), opts.progname, 'exec')
+ # try to emulate __main__ namespace as much as possible
+ globs = {
+ '__file__': opts.progname,
+ '__name__': '__main__',
+ '__package__': None,
+ '__cached__': None,
+ }
t.runctx(code, globs, globs)
except OSError as err:
sys.exit("Cannot run file %r because: %s" % (sys.argv[0], err))
diff --git a/contrib/tools/python3/src/Lib/traceback.py b/contrib/tools/python3/src/Lib/traceback.py
index c771a137cd..116aa94918 100644
--- a/contrib/tools/python3/src/Lib/traceback.py
+++ b/contrib/tools/python3/src/Lib/traceback.py
@@ -279,9 +279,9 @@ class FrameSummary:
return "<FrameSummary file {filename}, line {lineno} in {name}>".format(
filename=self.filename, lineno=self.lineno, name=self.name)
- def __len__(self):
- return 4
-
+ def __len__(self):
+ return 4
+
@property
def line(self):
if self._line is None:
@@ -476,37 +476,37 @@ class TracebackException:
_seen.add(id(exc_value))
# Gracefully handle (the way Python 2.4 and earlier did) the case of
# being called with no type or value (None, None, None).
- self._truncated = False
- try:
- if (exc_value and exc_value.__cause__ is not None
- and id(exc_value.__cause__) not in _seen):
- cause = TracebackException(
- type(exc_value.__cause__),
- exc_value.__cause__,
- exc_value.__cause__.__traceback__,
- limit=limit,
- lookup_lines=False,
- capture_locals=capture_locals,
- _seen=_seen)
- else:
- cause = None
- if (exc_value and exc_value.__context__ is not None
- and id(exc_value.__context__) not in _seen):
- context = TracebackException(
- type(exc_value.__context__),
- exc_value.__context__,
- exc_value.__context__.__traceback__,
- limit=limit,
- lookup_lines=False,
- capture_locals=capture_locals,
- _seen=_seen)
- else:
- context = None
- except RecursionError:
- # The recursive call to the constructors above
- # may result in a stack overflow for long exception chains,
- # so we must truncate.
- self._truncated = True
+ self._truncated = False
+ try:
+ if (exc_value and exc_value.__cause__ is not None
+ and id(exc_value.__cause__) not in _seen):
+ cause = TracebackException(
+ type(exc_value.__cause__),
+ exc_value.__cause__,
+ exc_value.__cause__.__traceback__,
+ limit=limit,
+ lookup_lines=False,
+ capture_locals=capture_locals,
+ _seen=_seen)
+ else:
+ cause = None
+ if (exc_value and exc_value.__context__ is not None
+ and id(exc_value.__context__) not in _seen):
+ context = TracebackException(
+ type(exc_value.__context__),
+ exc_value.__context__,
+ exc_value.__context__.__traceback__,
+ limit=limit,
+ lookup_lines=False,
+ capture_locals=capture_locals,
+ _seen=_seen)
+ else:
+ context = None
+ except RecursionError:
+ # The recursive call to the constructors above
+ # may result in a stack overflow for long exception chains,
+ # so we must truncate.
+ self._truncated = True
cause = None
context = None
self.__cause__ = cause
@@ -524,8 +524,8 @@ class TracebackException:
if exc_type and issubclass(exc_type, SyntaxError):
# Handle SyntaxError's specially
self.filename = exc_value.filename
- lno = exc_value.lineno
- self.lineno = str(lno) if lno is not None else None
+ lno = exc_value.lineno
+ self.lineno = str(lno) if lno is not None else None
self.text = exc_value.text
self.offset = exc_value.offset
self.msg = exc_value.msg
@@ -547,9 +547,9 @@ class TracebackException:
self.__cause__._load_lines()
def __eq__(self, other):
- if isinstance(other, TracebackException):
- return self.__dict__ == other.__dict__
- return NotImplemented
+ if isinstance(other, TracebackException):
+ return self.__dict__ == other.__dict__
+ return NotImplemented
def __str__(self):
return self._str
@@ -560,7 +560,7 @@ class TracebackException:
The return value is a generator of strings, each ending in a newline.
Normally, the generator emits a single string; however, for
- SyntaxError exceptions, it emits several lines that (when
+ SyntaxError exceptions, it emits several lines that (when
printed) display detailed information about where the syntax
error occurred.
@@ -574,42 +574,42 @@ class TracebackException:
stype = self.exc_type.__qualname__
smod = self.exc_type.__module__
if smod not in ("__main__", "builtins"):
- if not isinstance(smod, str):
- smod = "<unknown>"
+ if not isinstance(smod, str):
+ smod = "<unknown>"
stype = smod + '.' + stype
if not issubclass(self.exc_type, SyntaxError):
yield _format_final_exc_line(stype, self._str)
- else:
- yield from self._format_syntax_error(stype)
-
- def _format_syntax_error(self, stype):
- """Format SyntaxError exceptions (internal helper)."""
- # Show exactly where the problem was found.
- filename_suffix = ''
- if self.lineno is not None:
- yield ' File "{}", line {}\n'.format(
- self.filename or "<string>", self.lineno)
- elif self.filename is not None:
- filename_suffix = ' ({})'.format(self.filename)
-
- text = self.text
- if text is not None:
- # text = " foo\n"
- # rtext = " foo"
- # ltext = "foo"
- rtext = text.rstrip('\n')
- ltext = rtext.lstrip(' \n\f')
- spaces = len(rtext) - len(ltext)
- yield ' {}\n'.format(ltext)
- # Convert 1-based column offset to 0-based index into stripped text
- caret = (self.offset or 0) - 1 - spaces
- if caret >= 0:
+ else:
+ yield from self._format_syntax_error(stype)
+
+ def _format_syntax_error(self, stype):
+ """Format SyntaxError exceptions (internal helper)."""
+ # Show exactly where the problem was found.
+ filename_suffix = ''
+ if self.lineno is not None:
+ yield ' File "{}", line {}\n'.format(
+ self.filename or "<string>", self.lineno)
+ elif self.filename is not None:
+ filename_suffix = ' ({})'.format(self.filename)
+
+ text = self.text
+ if text is not None:
+ # text = " foo\n"
+ # rtext = " foo"
+ # ltext = "foo"
+ rtext = text.rstrip('\n')
+ ltext = rtext.lstrip(' \n\f')
+ spaces = len(rtext) - len(ltext)
+ yield ' {}\n'.format(ltext)
+ # Convert 1-based column offset to 0-based index into stripped text
+ caret = (self.offset or 0) - 1 - spaces
+ if caret >= 0:
# non-space whitespace (likes tabs) must be kept for alignment
- caretspace = ((c if c.isspace() else ' ') for c in ltext[:caret])
+ caretspace = ((c if c.isspace() else ' ') for c in ltext[:caret])
yield ' {}^\n'.format(''.join(caretspace))
msg = self.msg or "<no detail available>"
- yield "{}: {}{}\n".format(stype, msg, filename_suffix)
+ yield "{}: {}{}\n".format(stype, msg, filename_suffix)
def format(self, *, chain=True):
"""Format the exception.
@@ -631,11 +631,11 @@ class TracebackException:
not self.__suppress_context__):
yield from self.__context__.format(chain=chain)
yield _context_message
- if self._truncated:
- yield (
- 'Chained exceptions have been truncated to avoid '
- 'stack overflow in traceback formatting:\n')
- if self.stack:
+ if self._truncated:
+ yield (
+ 'Chained exceptions have been truncated to avoid '
+ 'stack overflow in traceback formatting:\n')
+ if self.stack:
yield 'Traceback (most recent call last):\n'
- yield from self.stack.format()
+ yield from self.stack.format()
yield from self.format_exception_only()
diff --git a/contrib/tools/python3/src/Lib/tracemalloc.py b/contrib/tools/python3/src/Lib/tracemalloc.py
index cec99c5970..ddb216dd00 100644
--- a/contrib/tools/python3/src/Lib/tracemalloc.py
+++ b/contrib/tools/python3/src/Lib/tracemalloc.py
@@ -43,8 +43,8 @@ class Statistic:
return hash((self.traceback, self.size, self.count))
def __eq__(self, other):
- if not isinstance(other, Statistic):
- return NotImplemented
+ if not isinstance(other, Statistic):
+ return NotImplemented
return (self.traceback == other.traceback
and self.size == other.size
and self.count == other.count)
@@ -86,8 +86,8 @@ class StatisticDiff:
self.count, self.count_diff))
def __eq__(self, other):
- if not isinstance(other, StatisticDiff):
- return NotImplemented
+ if not isinstance(other, StatisticDiff):
+ return NotImplemented
return (self.traceback == other.traceback
and self.size == other.size
and self.size_diff == other.size_diff
@@ -157,13 +157,13 @@ class Frame:
return self._frame[1]
def __eq__(self, other):
- if not isinstance(other, Frame):
- return NotImplemented
+ if not isinstance(other, Frame):
+ return NotImplemented
return (self._frame == other._frame)
def __lt__(self, other):
- if not isinstance(other, Frame):
- return NotImplemented
+ if not isinstance(other, Frame):
+ return NotImplemented
return (self._frame < other._frame)
def __hash__(self):
@@ -182,21 +182,21 @@ class Traceback(Sequence):
Sequence of Frame instances sorted from the oldest frame
to the most recent frame.
"""
- __slots__ = ("_frames", '_total_nframe')
+ __slots__ = ("_frames", '_total_nframe')
- def __init__(self, frames, total_nframe=None):
+ def __init__(self, frames, total_nframe=None):
Sequence.__init__(self)
# frames is a tuple of frame tuples: see Frame constructor for the
# format of a frame tuple; it is reversed, because _tracemalloc
# returns frames sorted from most recent to oldest, but the
# Python API expects oldest to most recent
self._frames = tuple(reversed(frames))
- self._total_nframe = total_nframe
-
- @property
- def total_nframe(self):
- return self._total_nframe
+ self._total_nframe = total_nframe
+ @property
+ def total_nframe(self):
+ return self._total_nframe
+
def __len__(self):
return len(self._frames)
@@ -213,25 +213,25 @@ class Traceback(Sequence):
return hash(self._frames)
def __eq__(self, other):
- if not isinstance(other, Traceback):
- return NotImplemented
+ if not isinstance(other, Traceback):
+ return NotImplemented
return (self._frames == other._frames)
def __lt__(self, other):
- if not isinstance(other, Traceback):
- return NotImplemented
+ if not isinstance(other, Traceback):
+ return NotImplemented
return (self._frames < other._frames)
def __str__(self):
return str(self[0])
def __repr__(self):
- s = f"<Traceback {tuple(self)}"
- if self._total_nframe is None:
- s += ">"
- else:
- s += f" total_nframe={self.total_nframe}>"
- return s
+ s = f"<Traceback {tuple(self)}"
+ if self._total_nframe is None:
+ s += ">"
+ else:
+ s += f" total_nframe={self.total_nframe}>"
+ return s
def format(self, limit=None, most_recent_first=False):
lines = []
@@ -290,11 +290,11 @@ class Trace:
@property
def traceback(self):
- return Traceback(*self._trace[2:])
+ return Traceback(*self._trace[2:])
def __eq__(self, other):
- if not isinstance(other, Trace):
- return NotImplemented
+ if not isinstance(other, Trace):
+ return NotImplemented
return (self._trace == other._trace)
def __hash__(self):
@@ -327,8 +327,8 @@ class _Traces(Sequence):
return trace._trace in self._traces
def __eq__(self, other):
- if not isinstance(other, _Traces):
- return NotImplemented
+ if not isinstance(other, _Traces):
+ return NotImplemented
return (self._traces == other._traces)
def __repr__(self):
@@ -388,7 +388,7 @@ class Filter(BaseFilter):
return self._match_frame(filename, lineno)
def _match(self, trace):
- domain, size, traceback, total_nframe = trace
+ domain, size, traceback, total_nframe = trace
res = self._match_traceback(traceback)
if self.domain is not None:
if self.inclusive:
@@ -408,7 +408,7 @@ class DomainFilter(BaseFilter):
return self._domain
def _match(self, trace):
- domain, size, traceback, total_nframe = trace
+ domain, size, traceback, total_nframe = trace
return (domain == self.domain) ^ (not self.inclusive)
@@ -485,7 +485,7 @@ class Snapshot:
tracebacks = {}
if not cumulative:
for trace in self.traces._traces:
- domain, size, trace_traceback, total_nframe = trace
+ domain, size, trace_traceback, total_nframe = trace
try:
traceback = tracebacks[trace_traceback]
except KeyError:
@@ -506,7 +506,7 @@ class Snapshot:
else:
# cumulative statistics
for trace in self.traces._traces:
- domain, size, trace_traceback, total_nframe = trace
+ domain, size, trace_traceback, total_nframe = trace
for frame in trace_traceback:
try:
traceback = tracebacks[frame]
diff --git a/contrib/tools/python3/src/Lib/turtle.py b/contrib/tools/python3/src/Lib/turtle.py
index 9c8f6ced25..975d851b9f 100644
--- a/contrib/tools/python3/src/Lib/turtle.py
+++ b/contrib/tools/python3/src/Lib/turtle.py
@@ -38,7 +38,7 @@ pictures can easily be drawn.
----- turtle.py
This module is an extended reimplementation of turtle.py from the
-Python standard distribution up to Python 2.5. (See: https://www.python.org)
+Python standard distribution up to Python 2.5. (See: https://www.python.org)
It tries to keep the merits of turtle.py and to be (nearly) 100%
compatible with it. This means in the first place to enable the
@@ -258,7 +258,7 @@ class Vec2D(tuple):
def __rmul__(self, other):
if isinstance(other, int) or isinstance(other, float):
return Vec2D(self[0]*other, self[1]*other)
- return NotImplemented
+ return NotImplemented
def __sub__(self, other):
return Vec2D(self[0]-other[0], self[1]-other[1])
def __neg__(self):
@@ -464,18 +464,18 @@ class TurtleScreenBase(object):
a corresponding TurtleScreenBase class has to be implemented.
"""
- def _blankimage(self):
+ def _blankimage(self):
"""return a blank image object
"""
- img = TK.PhotoImage(width=1, height=1, master=self.cv)
+ img = TK.PhotoImage(width=1, height=1, master=self.cv)
img.blank()
return img
- def _image(self, filename):
+ def _image(self, filename):
"""return an image object containing the
imagedata from a gif-file named filename.
"""
- return TK.PhotoImage(file=filename, master=self.cv)
+ return TK.PhotoImage(file=filename, master=self.cv)
def __init__(self, cv):
self.cv = cv
@@ -809,7 +809,7 @@ class TurtleScreenBase(object):
>>> screen.mainloop()
"""
- self.cv.tk.mainloop()
+ self.cv.tk.mainloop()
def textinput(self, title, prompt):
"""Pop up a dialog window for input of a string.
@@ -824,7 +824,7 @@ class TurtleScreenBase(object):
>>> screen.textinput("NIM", "Name of first player:")
"""
- return simpledialog.askstring(title, prompt, parent=self.cv)
+ return simpledialog.askstring(title, prompt, parent=self.cv)
def numinput(self, title, prompt, default=None, minval=None, maxval=None):
"""Pop up a dialog window for input of a number.
@@ -832,7 +832,7 @@ class TurtleScreenBase(object):
Arguments: title is the title of the dialog window,
prompt is a text mostly describing what numerical information to input.
default: default value
- minval: minimum value for input
+ minval: minimum value for input
maxval: maximum value for input
The number input must be in the range minval .. maxval if these are
@@ -845,8 +845,8 @@ class TurtleScreenBase(object):
"""
return simpledialog.askfloat(title, prompt, initialvalue=default,
- minvalue=minval, maxvalue=maxval,
- parent=self.cv)
+ minvalue=minval, maxvalue=maxval,
+ parent=self.cv)
##############################################################################
@@ -964,8 +964,8 @@ class TurtleScreen(TurtleScreenBase):
def __init__(self, cv, mode=_CFG["mode"],
colormode=_CFG["colormode"], delay=_CFG["delay"]):
- TurtleScreenBase.__init__(self, cv)
-
+ TurtleScreenBase.__init__(self, cv)
+
self._shapes = {
"arrow" : Shape("polygon", ((-10,0), (10,0), (0,10))),
"turtle" : Shape("polygon", ((0,16), (-2,14), (-1,10), (-4,7),
@@ -1569,7 +1569,7 @@ class TNavigator(object):
fullcircle - a number
Set angle measurement units, i. e. set number
- of 'degrees' for a full circle. Default value is
+ of 'degrees' for a full circle. Default value is
360 degrees.
Example (for a Turtle instance named turtle):
@@ -1645,7 +1645,7 @@ class TNavigator(object):
Argument:
distance -- a number
- Move the turtle backward by distance, opposite to the direction the
+ Move the turtle backward by distance, opposite to the direction the
turtle is headed. Do not change the turtle's heading.
Example (for a Turtle instance named turtle):
@@ -2877,8 +2877,8 @@ class RawTurtle(TPen, TNavigator):
between the orientation of the turtleshape and the heading of the
turtle (its direction of movement).
- (Incorrectly marked as deprecated since Python 3.1, it is really
- settiltangle that is deprecated.)
+ (Incorrectly marked as deprecated since Python 3.1, it is really
+ settiltangle that is deprecated.)
Examples (for a Turtle instance named turtle):
>>> turtle.shape("circle")
diff --git a/contrib/tools/python3/src/Lib/types.py b/contrib/tools/python3/src/Lib/types.py
index f15fda5933..c5197adcd9 100644
--- a/contrib/tools/python3/src/Lib/types.py
+++ b/contrib/tools/python3/src/Lib/types.py
@@ -15,13 +15,13 @@ CodeType = type(_f.__code__)
MappingProxyType = type(type.__dict__)
SimpleNamespace = type(sys.implementation)
-def _cell_factory():
- a = 1
- def f():
- nonlocal a
- return f.__closure__[0]
-CellType = type(_cell_factory())
-
+def _cell_factory():
+ a = 1
+ def f():
+ nonlocal a
+ return f.__closure__[0]
+CellType = type(_cell_factory())
+
def _g():
yield 1
GeneratorType = type(_g())
@@ -82,7 +82,7 @@ def resolve_bases(bases):
updated = False
shift = 0
for i, base in enumerate(bases):
- if isinstance(base, type) and not isinstance(base, GenericAlias):
+ if isinstance(base, type) and not isinstance(base, GenericAlias):
continue
if not hasattr(base, "__mro_entries__"):
continue
@@ -262,8 +262,8 @@ def coroutine(func):
if co_flags & 0x20:
# TODO: Implement this in C.
co = func.__code__
- # 0x100 == CO_ITERABLE_COROUTINE
- func.__code__ = co.replace(co_flags=co.co_flags | 0x100)
+ # 0x100 == CO_ITERABLE_COROUTINE
+ func.__code__ = co.replace(co_flags=co.co_flags | 0x100)
return func
# The following code is primarily to support functions that
@@ -293,7 +293,7 @@ def coroutine(func):
return wrapped
-GenericAlias = type(list[int])
-
-
+GenericAlias = type(list[int])
+
+
__all__ = [n for n in globals() if n[:1] != '_']
diff --git a/contrib/tools/python3/src/Lib/typing.py b/contrib/tools/python3/src/Lib/typing.py
index da70d4115f..7b687082a4 100644
--- a/contrib/tools/python3/src/Lib/typing.py
+++ b/contrib/tools/python3/src/Lib/typing.py
@@ -9,15 +9,15 @@ At large scale, the structure of the module is following:
* The core of internal generics API: _GenericAlias and _VariadicGenericAlias, the latter is
currently only used by Tuple and Callable. All subscripted types like X[int], Union[int, str],
etc., are instances of either of these classes.
-* The public counterpart of the generics API consists of two classes: Generic and Protocol.
+* The public counterpart of the generics API consists of two classes: Generic and Protocol.
* Public helper functions: get_type_hints, overload, cast, no_type_check,
no_type_check_decorator.
* Generic aliases for collections.abc ABCs and few additional protocols.
-* Special types: NewType, NamedTuple, TypedDict.
+* Special types: NewType, NamedTuple, TypedDict.
* Wrapper submodules for re and io related types.
"""
-from abc import abstractmethod, ABCMeta
+from abc import abstractmethod, ABCMeta
import collections
import collections.abc
import contextlib
@@ -26,21 +26,21 @@ import operator
import re as stdlib_re # Avoid confusion with the re we export.
import sys
import types
-from types import WrapperDescriptorType, MethodWrapperType, MethodDescriptorType, GenericAlias
+from types import WrapperDescriptorType, MethodWrapperType, MethodDescriptorType, GenericAlias
# Please keep __all__ alphabetized within each category.
__all__ = [
# Super-special typing primitives.
- 'Annotated',
+ 'Annotated',
'Any',
'Callable',
'ClassVar',
- 'Final',
- 'ForwardRef',
+ 'Final',
+ 'ForwardRef',
'Generic',
- 'Literal',
+ 'Literal',
'Optional',
- 'Protocol',
+ 'Protocol',
'Tuple',
'Type',
'TypeVar',
@@ -78,44 +78,44 @@ __all__ = [
'SupportsBytes',
'SupportsComplex',
'SupportsFloat',
- 'SupportsIndex',
+ 'SupportsIndex',
'SupportsInt',
'SupportsRound',
# Concrete collection types.
- 'ChainMap',
+ 'ChainMap',
'Counter',
'Deque',
'Dict',
'DefaultDict',
'List',
- 'OrderedDict',
+ 'OrderedDict',
'Set',
'FrozenSet',
'NamedTuple', # Not really a type.
- 'TypedDict', # Not really a type.
+ 'TypedDict', # Not really a type.
'Generator',
-
- # Other concrete types.
- 'BinaryIO',
- 'IO',
- 'Match',
- 'Pattern',
- 'TextIO',
+
+ # Other concrete types.
+ 'BinaryIO',
+ 'IO',
+ 'Match',
+ 'Pattern',
+ 'TextIO',
# One-off things.
'AnyStr',
'cast',
- 'final',
- 'get_args',
- 'get_origin',
+ 'final',
+ 'get_args',
+ 'get_origin',
'get_type_hints',
'NewType',
'no_type_check',
'no_type_check_decorator',
'NoReturn',
'overload',
- 'runtime_checkable',
+ 'runtime_checkable',
'Text',
'TYPE_CHECKING',
]
@@ -125,16 +125,16 @@ __all__ = [
# legitimate imports of those modules.
-def _type_convert(arg, module=None):
- """For converting None to type(None), and strings to ForwardRef."""
- if arg is None:
- return type(None)
- if isinstance(arg, str):
- return ForwardRef(arg, module=module)
- return arg
-
-
-def _type_check(arg, msg, is_argument=True, module=None, *, is_class=False):
+def _type_convert(arg, module=None):
+ """For converting None to type(None), and strings to ForwardRef."""
+ if arg is None:
+ return type(None)
+ if isinstance(arg, str):
+ return ForwardRef(arg, module=module)
+ return arg
+
+
+def _type_check(arg, msg, is_argument=True, module=None, *, is_class=False):
"""Check that the argument is a type, and return it (internal helper).
As a special case, accept None and return type(None) instead. Also wrap strings
@@ -146,19 +146,19 @@ def _type_check(arg, msg, is_argument=True, module=None, *, is_class=False):
We append the repr() of the actual value (truncated to 100 chars).
"""
- invalid_generic_forms = (Generic, Protocol)
- if not is_class:
- invalid_generic_forms += (ClassVar,)
- if is_argument:
- invalid_generic_forms += (Final,)
+ invalid_generic_forms = (Generic, Protocol)
+ if not is_class:
+ invalid_generic_forms += (ClassVar,)
+ if is_argument:
+ invalid_generic_forms += (Final,)
- arg = _type_convert(arg, module=module)
+ arg = _type_convert(arg, module=module)
if (isinstance(arg, _GenericAlias) and
arg.__origin__ in invalid_generic_forms):
raise TypeError(f"{arg} is not valid as type argument")
- if arg in (Any, NoReturn, Final):
- return arg
- if isinstance(arg, _SpecialForm) or arg in (Generic, Protocol):
+ if arg in (Any, NoReturn, Final):
+ return arg
+ if isinstance(arg, _SpecialForm) or arg in (Generic, Protocol):
raise TypeError(f"Plain {arg} is not valid as type argument")
if isinstance(arg, (type, TypeVar, ForwardRef)):
return arg
@@ -175,8 +175,8 @@ def _type_repr(obj):
typically enough to uniquely identify a type. For everything
else, we fall back on repr(obj).
"""
- if isinstance(obj, types.GenericAlias):
- return repr(obj)
+ if isinstance(obj, types.GenericAlias):
+ return repr(obj)
if isinstance(obj, type):
if obj.__module__ == 'builtins':
return obj.__qualname__
@@ -198,16 +198,16 @@ def _collect_type_vars(types):
for t in types:
if isinstance(t, TypeVar) and t not in tvars:
tvars.append(t)
- if isinstance(t, (_GenericAlias, GenericAlias)):
+ if isinstance(t, (_GenericAlias, GenericAlias)):
tvars.extend([t for t in t.__parameters__ if t not in tvars])
return tuple(tvars)
-def _check_generic(cls, parameters, elen):
+def _check_generic(cls, parameters, elen):
"""Check correct count for parameters of a generic cls (internal helper).
This gives a nice error message in case of count mismatch.
"""
- if not elen:
+ if not elen:
raise TypeError(f"{cls} is not a generic class")
alen = len(parameters)
if alen != elen:
@@ -215,20 +215,20 @@ def _check_generic(cls, parameters, elen):
f" actual {alen}, expected {elen}")
-def _deduplicate(params):
- # Weed out strict duplicates, preserving the first of each occurrence.
- all_params = set(params)
- if len(all_params) < len(params):
- new_params = []
- for t in params:
- if t in all_params:
- new_params.append(t)
- all_params.remove(t)
- params = new_params
- assert not all_params, all_params
- return params
-
-
+def _deduplicate(params):
+ # Weed out strict duplicates, preserving the first of each occurrence.
+ all_params = set(params)
+ if len(all_params) < len(params):
+ new_params = []
+ for t in params:
+ if t in all_params:
+ new_params.append(t)
+ all_params.remove(t)
+ params = new_params
+ assert not all_params, all_params
+ return params
+
+
def _remove_dups_flatten(parameters):
"""An internal helper for Union creation and substitution: flatten Unions
among parameters, then remove duplicates.
@@ -236,68 +236,68 @@ def _remove_dups_flatten(parameters):
# Flatten out Union[Union[...], ...].
params = []
for p in parameters:
- if isinstance(p, _UnionGenericAlias):
+ if isinstance(p, _UnionGenericAlias):
params.extend(p.__args__)
elif isinstance(p, tuple) and len(p) > 0 and p[0] is Union:
params.extend(p[1:])
else:
params.append(p)
-
- return tuple(_deduplicate(params))
-
-
-def _flatten_literal_params(parameters):
- """An internal helper for Literal creation: flatten Literals among parameters"""
- params = []
- for p in parameters:
- if isinstance(p, _LiteralGenericAlias):
- params.extend(p.__args__)
- else:
- params.append(p)
+
+ return tuple(_deduplicate(params))
+
+
+def _flatten_literal_params(parameters):
+ """An internal helper for Literal creation: flatten Literals among parameters"""
+ params = []
+ for p in parameters:
+ if isinstance(p, _LiteralGenericAlias):
+ params.extend(p.__args__)
+ else:
+ params.append(p)
return tuple(params)
_cleanups = []
-def _tp_cache(func=None, /, *, typed=False):
+def _tp_cache(func=None, /, *, typed=False):
"""Internal wrapper caching __getitem__ of generic types with a fallback to
original function for non-hashable arguments.
"""
- def decorator(func):
- cached = functools.lru_cache(typed=typed)(func)
- _cleanups.append(cached.cache_clear)
-
- @functools.wraps(func)
- def inner(*args, **kwds):
- try:
- return cached(*args, **kwds)
- except TypeError:
- pass # All real errors (not unhashable args) are raised below.
- return func(*args, **kwds)
- return inner
-
- if func is not None:
- return decorator(func)
-
- return decorator
-
-def _eval_type(t, globalns, localns, recursive_guard=frozenset()):
- """Evaluate all forward references in the given type t.
+ def decorator(func):
+ cached = functools.lru_cache(typed=typed)(func)
+ _cleanups.append(cached.cache_clear)
+
+ @functools.wraps(func)
+ def inner(*args, **kwds):
+ try:
+ return cached(*args, **kwds)
+ except TypeError:
+ pass # All real errors (not unhashable args) are raised below.
+ return func(*args, **kwds)
+ return inner
+
+ if func is not None:
+ return decorator(func)
+
+ return decorator
+
+def _eval_type(t, globalns, localns, recursive_guard=frozenset()):
+ """Evaluate all forward references in the given type t.
For use of globalns and localns see the docstring for get_type_hints().
- recursive_guard is used to prevent prevent infinite recursion
- with recursive ForwardRef.
+ recursive_guard is used to prevent prevent infinite recursion
+ with recursive ForwardRef.
"""
if isinstance(t, ForwardRef):
- return t._evaluate(globalns, localns, recursive_guard)
- if isinstance(t, (_GenericAlias, GenericAlias)):
- ev_args = tuple(_eval_type(a, globalns, localns, recursive_guard) for a in t.__args__)
+ return t._evaluate(globalns, localns, recursive_guard)
+ if isinstance(t, (_GenericAlias, GenericAlias)):
+ ev_args = tuple(_eval_type(a, globalns, localns, recursive_guard) for a in t.__args__)
if ev_args == t.__args__:
return t
- if isinstance(t, GenericAlias):
- return GenericAlias(t.__origin__, ev_args)
- else:
- return t.copy_with(ev_args)
+ if isinstance(t, GenericAlias):
+ return GenericAlias(t.__origin__, ev_args)
+ else:
+ return t.copy_with(ev_args)
return t
@@ -306,13 +306,13 @@ class _Final:
__slots__ = ('__weakref__',)
- def __init_subclass__(self, /, *args, **kwds):
+ def __init_subclass__(self, /, *args, **kwds):
if '_root' not in kwds:
raise TypeError("Cannot subclass special typing classes")
class _Immutable:
"""Mixin to indicate that object should not be copied."""
- __slots__ = ()
+ __slots__ = ()
def __copy__(self):
return self
@@ -321,18 +321,18 @@ class _Immutable:
return self
-# Internal indicator of special typing constructs.
-# See __doc__ instance attribute for specific docs.
-class _SpecialForm(_Final, _root=True):
- __slots__ = ('_name', '__doc__', '_getitem')
+# Internal indicator of special typing constructs.
+# See __doc__ instance attribute for specific docs.
+class _SpecialForm(_Final, _root=True):
+ __slots__ = ('_name', '__doc__', '_getitem')
- def __init__(self, getitem):
- self._getitem = getitem
- self._name = getitem.__name__
- self.__doc__ = getitem.__doc__
+ def __init__(self, getitem):
+ self._getitem = getitem
+ self._name = getitem.__name__
+ self.__doc__ = getitem.__doc__
- def __mro_entries__(self, bases):
- raise TypeError(f"Cannot subclass {self!r}")
+ def __mro_entries__(self, bases):
+ raise TypeError(f"Cannot subclass {self!r}")
def __repr__(self):
return 'typing.' + self._name
@@ -351,18 +351,18 @@ class _SpecialForm(_Final, _root=True):
@_tp_cache
def __getitem__(self, parameters):
- return self._getitem(self, parameters)
-
-
-class _LiteralSpecialForm(_SpecialForm, _root=True):
- def __getitem__(self, parameters):
- if not isinstance(parameters, tuple):
- parameters = (parameters,)
- return self._getitem(self, *parameters)
-
-
-@_SpecialForm
-def Any(self, parameters):
+ return self._getitem(self, parameters)
+
+
+class _LiteralSpecialForm(_SpecialForm, _root=True):
+ def __getitem__(self, parameters):
+ if not isinstance(parameters, tuple):
+ parameters = (parameters,)
+ return self._getitem(self, *parameters)
+
+
+@_SpecialForm
+def Any(self, parameters):
"""Special type indicating an unconstrained type.
- Any is compatible with every type.
@@ -372,11 +372,11 @@ def Any(self, parameters):
Note that all the above statements are true from the point of view of
static type checkers. At runtime, Any should not be used with instance
or class checks.
- """
- raise TypeError(f"{self} is not subscriptable")
+ """
+ raise TypeError(f"{self} is not subscriptable")
-@_SpecialForm
-def NoReturn(self, parameters):
+@_SpecialForm
+def NoReturn(self, parameters):
"""Special type indicating functions that never return.
Example::
@@ -387,11 +387,11 @@ def NoReturn(self, parameters):
This type is invalid in other positions, e.g., ``List[NoReturn]``
will fail in static type checkers.
- """
- raise TypeError(f"{self} is not subscriptable")
+ """
+ raise TypeError(f"{self} is not subscriptable")
-@_SpecialForm
-def ClassVar(self, parameters):
+@_SpecialForm
+def ClassVar(self, parameters):
"""Special type construct to mark class variables.
An annotation wrapped in ClassVar indicates that a given
@@ -406,33 +406,33 @@ def ClassVar(self, parameters):
Note that ClassVar is not a class itself, and should not
be used with isinstance() or issubclass().
- """
- item = _type_check(parameters, f'{self} accepts only single type.')
- return _GenericAlias(self, (item,))
-
-@_SpecialForm
-def Final(self, parameters):
- """Special typing construct to indicate final names to type checkers.
-
- A final name cannot be re-assigned or overridden in a subclass.
- For example:
-
- MAX_SIZE: Final = 9000
- MAX_SIZE += 1 # Error reported by type checker
-
- class Connection:
- TIMEOUT: Final[int] = 10
-
- class FastConnector(Connection):
- TIMEOUT = 1 # Error reported by type checker
-
- There is no runtime checking of these properties.
- """
- item = _type_check(parameters, f'{self} accepts only single type.')
- return _GenericAlias(self, (item,))
-
-@_SpecialForm
-def Union(self, parameters):
+ """
+ item = _type_check(parameters, f'{self} accepts only single type.')
+ return _GenericAlias(self, (item,))
+
+@_SpecialForm
+def Final(self, parameters):
+ """Special typing construct to indicate final names to type checkers.
+
+ A final name cannot be re-assigned or overridden in a subclass.
+ For example:
+
+ MAX_SIZE: Final = 9000
+ MAX_SIZE += 1 # Error reported by type checker
+
+ class Connection:
+ TIMEOUT: Final[int] = 10
+
+ class FastConnector(Connection):
+ TIMEOUT = 1 # Error reported by type checker
+
+ There is no runtime checking of these properties.
+ """
+ item = _type_check(parameters, f'{self} accepts only single type.')
+ return _GenericAlias(self, (item,))
+
+@_SpecialForm
+def Union(self, parameters):
"""Union type; Union[X, Y] means either X or Y.
To define a union, use e.g. Union[int, str]. Details:
@@ -457,71 +457,71 @@ def Union(self, parameters):
- You cannot subclass or instantiate a union.
- You can use Optional[X] as a shorthand for Union[X, None].
- """
- if parameters == ():
- raise TypeError("Cannot take a Union of no types.")
- if not isinstance(parameters, tuple):
- parameters = (parameters,)
- msg = "Union[arg, ...]: each arg must be a type."
- parameters = tuple(_type_check(p, msg) for p in parameters)
- parameters = _remove_dups_flatten(parameters)
- if len(parameters) == 1:
- return parameters[0]
- return _UnionGenericAlias(self, parameters)
-
-@_SpecialForm
-def Optional(self, parameters):
+ """
+ if parameters == ():
+ raise TypeError("Cannot take a Union of no types.")
+ if not isinstance(parameters, tuple):
+ parameters = (parameters,)
+ msg = "Union[arg, ...]: each arg must be a type."
+ parameters = tuple(_type_check(p, msg) for p in parameters)
+ parameters = _remove_dups_flatten(parameters)
+ if len(parameters) == 1:
+ return parameters[0]
+ return _UnionGenericAlias(self, parameters)
+
+@_SpecialForm
+def Optional(self, parameters):
"""Optional type.
Optional[X] is equivalent to Union[X, None].
- """
- arg = _type_check(parameters, f"{self} requires a single type.")
- return Union[arg, type(None)]
-
-@_LiteralSpecialForm
-@_tp_cache(typed=True)
-def Literal(self, *parameters):
- """Special typing form to define literal types (a.k.a. value types).
-
- This form can be used to indicate to type checkers that the corresponding
- variable or function parameter has a value equivalent to the provided
- literal (or one of several literals):
-
- def validate_simple(data: Any) -> Literal[True]: # always returns True
- ...
-
- MODE = Literal['r', 'rb', 'w', 'wb']
- def open_helper(file: str, mode: MODE) -> str:
- ...
-
- open_helper('/some/path', 'r') # Passes type check
- open_helper('/other/path', 'typo') # Error in type checker
-
- Literal[...] cannot be subclassed. At runtime, an arbitrary value
- is allowed as type argument to Literal[...], but type checkers may
- impose restrictions.
- """
- # There is no '_type_check' call because arguments to Literal[...] are
- # values, not types.
- parameters = _flatten_literal_params(parameters)
-
- try:
- parameters = tuple(p for p, _ in _deduplicate(list(_value_and_type_iter(parameters))))
- except TypeError: # unhashable parameters
- pass
-
- return _LiteralGenericAlias(self, parameters)
-
-
+ """
+ arg = _type_check(parameters, f"{self} requires a single type.")
+ return Union[arg, type(None)]
+
+@_LiteralSpecialForm
+@_tp_cache(typed=True)
+def Literal(self, *parameters):
+ """Special typing form to define literal types (a.k.a. value types).
+
+ This form can be used to indicate to type checkers that the corresponding
+ variable or function parameter has a value equivalent to the provided
+ literal (or one of several literals):
+
+ def validate_simple(data: Any) -> Literal[True]: # always returns True
+ ...
+
+ MODE = Literal['r', 'rb', 'w', 'wb']
+ def open_helper(file: str, mode: MODE) -> str:
+ ...
+
+ open_helper('/some/path', 'r') # Passes type check
+ open_helper('/other/path', 'typo') # Error in type checker
+
+ Literal[...] cannot be subclassed. At runtime, an arbitrary value
+ is allowed as type argument to Literal[...], but type checkers may
+ impose restrictions.
+ """
+ # There is no '_type_check' call because arguments to Literal[...] are
+ # values, not types.
+ parameters = _flatten_literal_params(parameters)
+
+ try:
+ parameters = tuple(p for p, _ in _deduplicate(list(_value_and_type_iter(parameters))))
+ except TypeError: # unhashable parameters
+ pass
+
+ return _LiteralGenericAlias(self, parameters)
+
+
class ForwardRef(_Final, _root=True):
"""Internal wrapper to hold a forward reference."""
__slots__ = ('__forward_arg__', '__forward_code__',
'__forward_evaluated__', '__forward_value__',
- '__forward_is_argument__', '__forward_is_class__',
- '__forward_module__')
+ '__forward_is_argument__', '__forward_is_class__',
+ '__forward_module__')
- def __init__(self, arg, is_argument=True, module=None, *, is_class=False):
+ def __init__(self, arg, is_argument=True, module=None, *, is_class=False):
if not isinstance(arg, str):
raise TypeError(f"Forward reference must be a string -- got {arg!r}")
try:
@@ -533,12 +533,12 @@ class ForwardRef(_Final, _root=True):
self.__forward_evaluated__ = False
self.__forward_value__ = None
self.__forward_is_argument__ = is_argument
- self.__forward_is_class__ = is_class
- self.__forward_module__ = module
+ self.__forward_is_class__ = is_class
+ self.__forward_module__ = module
- def _evaluate(self, globalns, localns, recursive_guard):
- if self.__forward_arg__ in recursive_guard:
- return self
+ def _evaluate(self, globalns, localns, recursive_guard):
+ if self.__forward_arg__ in recursive_guard:
+ return self
if not self.__forward_evaluated__ or localns is not globalns:
if globalns is None and localns is None:
globalns = localns = {}
@@ -546,32 +546,32 @@ class ForwardRef(_Final, _root=True):
globalns = localns
elif localns is None:
localns = globalns
- if self.__forward_module__ is not None:
- globalns = getattr(
- sys.modules.get(self.__forward_module__, None), '__dict__', globalns
- )
- type_ = _type_check(
+ if self.__forward_module__ is not None:
+ globalns = getattr(
+ sys.modules.get(self.__forward_module__, None), '__dict__', globalns
+ )
+ type_ = _type_check(
eval(self.__forward_code__, globalns, localns),
"Forward references must evaluate to types.",
- is_argument=self.__forward_is_argument__,
- is_class=self.__forward_is_class__,
- )
- self.__forward_value__ = _eval_type(
- type_, globalns, localns, recursive_guard | {self.__forward_arg__}
- )
+ is_argument=self.__forward_is_argument__,
+ is_class=self.__forward_is_class__,
+ )
+ self.__forward_value__ = _eval_type(
+ type_, globalns, localns, recursive_guard | {self.__forward_arg__}
+ )
self.__forward_evaluated__ = True
return self.__forward_value__
def __eq__(self, other):
if not isinstance(other, ForwardRef):
return NotImplemented
- if self.__forward_evaluated__ and other.__forward_evaluated__:
- return (self.__forward_arg__ == other.__forward_arg__ and
- self.__forward_value__ == other.__forward_value__)
- return self.__forward_arg__ == other.__forward_arg__
+ if self.__forward_evaluated__ and other.__forward_evaluated__:
+ return (self.__forward_arg__ == other.__forward_arg__ and
+ self.__forward_value__ == other.__forward_value__)
+ return self.__forward_arg__ == other.__forward_arg__
def __hash__(self):
- return hash(self.__forward_arg__)
+ return hash(self.__forward_arg__)
def __repr__(self):
return f'ForwardRef({self.__forward_arg__!r})'
@@ -622,7 +622,7 @@ class TypeVar(_Final, _Immutable, _root=True):
"""
__slots__ = ('__name__', '__bound__', '__constraints__',
- '__covariant__', '__contravariant__', '__dict__')
+ '__covariant__', '__contravariant__', '__dict__')
def __init__(self, name, *constraints, bound=None,
covariant=False, contravariant=False):
@@ -641,10 +641,10 @@ class TypeVar(_Final, _Immutable, _root=True):
self.__bound__ = _type_check(bound, "Bound must be a type.")
else:
self.__bound__ = None
- try:
- def_mod = sys._getframe(1).f_globals.get('__name__', '__main__') # for pickling
- except (AttributeError, ValueError):
- def_mod = None
+ try:
+ def_mod = sys._getframe(1).f_globals.get('__name__', '__main__') # for pickling
+ except (AttributeError, ValueError):
+ def_mod = None
if def_mod != 'typing':
self.__module__ = def_mod
@@ -664,7 +664,7 @@ class TypeVar(_Final, _Immutable, _root=True):
def _is_dunder(attr):
return attr.startswith('__') and attr.endswith('__')
-class _BaseGenericAlias(_Final, _root=True):
+class _BaseGenericAlias(_Final, _root=True):
"""The central part of internal API.
This represents a generic version of type 'origin' with type arguments 'params'.
@@ -673,70 +673,70 @@ class _BaseGenericAlias(_Final, _root=True):
have 'name' always set. If 'inst' is False, then the alias can't be instantiated,
this is used by e.g. typing.List and typing.Dict.
"""
- def __init__(self, origin, *, inst=True, name=None):
+ def __init__(self, origin, *, inst=True, name=None):
self._inst = inst
self._name = name
- self.__origin__ = origin
- self.__slots__ = None # This is not documented.
-
- def __call__(self, *args, **kwargs):
- if not self._inst:
- raise TypeError(f"Type {self._name} cannot be instantiated; "
- f"use {self.__origin__.__name__}() instead")
- result = self.__origin__(*args, **kwargs)
- try:
- result.__orig_class__ = self
- except AttributeError:
- pass
- return result
-
- def __mro_entries__(self, bases):
- res = []
- if self.__origin__ not in bases:
- res.append(self.__origin__)
- i = bases.index(self)
- for b in bases[i+1:]:
- if isinstance(b, _BaseGenericAlias) or issubclass(b, Generic):
- break
- else:
- res.append(Generic)
- return tuple(res)
-
- def __getattr__(self, attr):
- # We are careful for copy and pickle.
- # Also for simplicity we just don't relay all dunder names
- if '__origin__' in self.__dict__ and not _is_dunder(attr):
- return getattr(self.__origin__, attr)
- raise AttributeError(attr)
-
- def __setattr__(self, attr, val):
- if _is_dunder(attr) or attr in ('_name', '_inst', '_nparams'):
- super().__setattr__(attr, val)
- else:
- setattr(self.__origin__, attr, val)
-
- def __instancecheck__(self, obj):
- return self.__subclasscheck__(type(obj))
-
- def __subclasscheck__(self, cls):
- raise TypeError("Subscripted generics cannot be used with"
- " class and instance checks")
-
-
-# Special typing constructs Union, Optional, Generic, Callable and Tuple
-# use three special attributes for internal bookkeeping of generic types:
-# * __parameters__ is a tuple of unique free type parameters of a generic
-# type, for example, Dict[T, T].__parameters__ == (T,);
-# * __origin__ keeps a reference to a type that was subscripted,
-# e.g., Union[T, int].__origin__ == Union, or the non-generic version of
-# the type.
-# * __args__ is a tuple of all arguments used in subscripting,
-# e.g., Dict[T, int].__args__ == (T, int).
-
-
-class _GenericAlias(_BaseGenericAlias, _root=True):
- def __init__(self, origin, params, *, inst=True, name=None):
- super().__init__(origin, inst=inst, name=name)
+ self.__origin__ = origin
+ self.__slots__ = None # This is not documented.
+
+ def __call__(self, *args, **kwargs):
+ if not self._inst:
+ raise TypeError(f"Type {self._name} cannot be instantiated; "
+ f"use {self.__origin__.__name__}() instead")
+ result = self.__origin__(*args, **kwargs)
+ try:
+ result.__orig_class__ = self
+ except AttributeError:
+ pass
+ return result
+
+ def __mro_entries__(self, bases):
+ res = []
+ if self.__origin__ not in bases:
+ res.append(self.__origin__)
+ i = bases.index(self)
+ for b in bases[i+1:]:
+ if isinstance(b, _BaseGenericAlias) or issubclass(b, Generic):
+ break
+ else:
+ res.append(Generic)
+ return tuple(res)
+
+ def __getattr__(self, attr):
+ # We are careful for copy and pickle.
+ # Also for simplicity we just don't relay all dunder names
+ if '__origin__' in self.__dict__ and not _is_dunder(attr):
+ return getattr(self.__origin__, attr)
+ raise AttributeError(attr)
+
+ def __setattr__(self, attr, val):
+ if _is_dunder(attr) or attr in ('_name', '_inst', '_nparams'):
+ super().__setattr__(attr, val)
+ else:
+ setattr(self.__origin__, attr, val)
+
+ def __instancecheck__(self, obj):
+ return self.__subclasscheck__(type(obj))
+
+ def __subclasscheck__(self, cls):
+ raise TypeError("Subscripted generics cannot be used with"
+ " class and instance checks")
+
+
+# Special typing constructs Union, Optional, Generic, Callable and Tuple
+# use three special attributes for internal bookkeeping of generic types:
+# * __parameters__ is a tuple of unique free type parameters of a generic
+# type, for example, Dict[T, T].__parameters__ == (T,);
+# * __origin__ keeps a reference to a type that was subscripted,
+# e.g., Union[T, int].__origin__ == Union, or the non-generic version of
+# the type.
+# * __args__ is a tuple of all arguments used in subscripting,
+# e.g., Dict[T, int].__args__ == (T, int).
+
+
+class _GenericAlias(_BaseGenericAlias, _root=True):
+ def __init__(self, origin, params, *, inst=True, name=None):
+ super().__init__(origin, inst=inst, name=name)
if not isinstance(params, tuple):
params = (params,)
self.__args__ = tuple(... if a is _TypingEllipsis else
@@ -746,218 +746,218 @@ class _GenericAlias(_BaseGenericAlias, _root=True):
if not name:
self.__module__ = origin.__module__
- def __eq__(self, other):
- if not isinstance(other, _GenericAlias):
- return NotImplemented
- return (self.__origin__ == other.__origin__
- and self.__args__ == other.__args__)
-
- def __hash__(self):
- return hash((self.__origin__, self.__args__))
-
+ def __eq__(self, other):
+ if not isinstance(other, _GenericAlias):
+ return NotImplemented
+ return (self.__origin__ == other.__origin__
+ and self.__args__ == other.__args__)
+
+ def __hash__(self):
+ return hash((self.__origin__, self.__args__))
+
@_tp_cache
def __getitem__(self, params):
- if self.__origin__ in (Generic, Protocol):
- # Can't subscript Generic[...] or Protocol[...].
+ if self.__origin__ in (Generic, Protocol):
+ # Can't subscript Generic[...] or Protocol[...].
raise TypeError(f"Cannot subscript already-subscripted {self}")
if not isinstance(params, tuple):
params = (params,)
msg = "Parameters to generic types must be types."
params = tuple(_type_check(p, msg) for p in params)
- _check_generic(self, params, len(self.__parameters__))
-
- subst = dict(zip(self.__parameters__, params))
- new_args = []
- for arg in self.__args__:
- if isinstance(arg, TypeVar):
- arg = subst[arg]
- elif isinstance(arg, (_GenericAlias, GenericAlias)):
- subparams = arg.__parameters__
- if subparams:
- subargs = tuple(subst[x] for x in subparams)
- arg = arg[subargs]
- new_args.append(arg)
- return self.copy_with(tuple(new_args))
-
+ _check_generic(self, params, len(self.__parameters__))
+
+ subst = dict(zip(self.__parameters__, params))
+ new_args = []
+ for arg in self.__args__:
+ if isinstance(arg, TypeVar):
+ arg = subst[arg]
+ elif isinstance(arg, (_GenericAlias, GenericAlias)):
+ subparams = arg.__parameters__
+ if subparams:
+ subargs = tuple(subst[x] for x in subparams)
+ arg = arg[subargs]
+ new_args.append(arg)
+ return self.copy_with(tuple(new_args))
+
def copy_with(self, params):
- return self.__class__(self.__origin__, params, name=self._name, inst=self._inst)
+ return self.__class__(self.__origin__, params, name=self._name, inst=self._inst)
def __repr__(self):
- if self._name:
- name = 'typing.' + self._name
- else:
- name = _type_repr(self.__origin__)
- args = ", ".join([_type_repr(a) for a in self.__args__])
- return f'{name}[{args}]'
-
- def __reduce__(self):
- if self._name:
- origin = globals()[self._name]
- else:
- origin = self.__origin__
- args = tuple(self.__args__)
- if len(args) == 1 and not isinstance(args[0], tuple):
- args, = args
- return operator.getitem, (origin, args)
+ if self._name:
+ name = 'typing.' + self._name
+ else:
+ name = _type_repr(self.__origin__)
+ args = ", ".join([_type_repr(a) for a in self.__args__])
+ return f'{name}[{args}]'
+
+ def __reduce__(self):
+ if self._name:
+ origin = globals()[self._name]
+ else:
+ origin = self.__origin__
+ args = tuple(self.__args__)
+ if len(args) == 1 and not isinstance(args[0], tuple):
+ args, = args
+ return operator.getitem, (origin, args)
def __mro_entries__(self, bases):
if self._name: # generic version of an ABC or built-in class
- return super().__mro_entries__(bases)
+ return super().__mro_entries__(bases)
if self.__origin__ is Generic:
- if Protocol in bases:
- return ()
+ if Protocol in bases:
+ return ()
i = bases.index(self)
for b in bases[i+1:]:
- if isinstance(b, _BaseGenericAlias) and b is not self:
+ if isinstance(b, _BaseGenericAlias) and b is not self:
return ()
return (self.__origin__,)
-# _nparams is the number of accepted parameters, e.g. 0 for Hashable,
-# 1 for List and 2 for Dict. It may be -1 if variable number of
-# parameters are accepted (needs custom __getitem__).
-
-class _SpecialGenericAlias(_BaseGenericAlias, _root=True):
- def __init__(self, origin, nparams, *, inst=True, name=None):
- if name is None:
- name = origin.__name__
- super().__init__(origin, inst=inst, name=name)
- self._nparams = nparams
- if origin.__module__ == 'builtins':
- self.__doc__ = f'A generic version of {origin.__qualname__}.'
+# _nparams is the number of accepted parameters, e.g. 0 for Hashable,
+# 1 for List and 2 for Dict. It may be -1 if variable number of
+# parameters are accepted (needs custom __getitem__).
+
+class _SpecialGenericAlias(_BaseGenericAlias, _root=True):
+ def __init__(self, origin, nparams, *, inst=True, name=None):
+ if name is None:
+ name = origin.__name__
+ super().__init__(origin, inst=inst, name=name)
+ self._nparams = nparams
+ if origin.__module__ == 'builtins':
+ self.__doc__ = f'A generic version of {origin.__qualname__}.'
else:
- self.__doc__ = f'A generic version of {origin.__module__}.{origin.__qualname__}.'
-
- @_tp_cache
- def __getitem__(self, params):
- if not isinstance(params, tuple):
- params = (params,)
- msg = "Parameters to generic types must be types."
- params = tuple(_type_check(p, msg) for p in params)
- _check_generic(self, params, self._nparams)
- return self.copy_with(params)
-
- def copy_with(self, params):
- return _GenericAlias(self.__origin__, params,
- name=self._name, inst=self._inst)
-
- def __repr__(self):
- return 'typing.' + self._name
-
+ self.__doc__ = f'A generic version of {origin.__module__}.{origin.__qualname__}.'
+
+ @_tp_cache
+ def __getitem__(self, params):
+ if not isinstance(params, tuple):
+ params = (params,)
+ msg = "Parameters to generic types must be types."
+ params = tuple(_type_check(p, msg) for p in params)
+ _check_generic(self, params, self._nparams)
+ return self.copy_with(params)
+
+ def copy_with(self, params):
+ return _GenericAlias(self.__origin__, params,
+ name=self._name, inst=self._inst)
+
+ def __repr__(self):
+ return 'typing.' + self._name
+
def __subclasscheck__(self, cls):
- if isinstance(cls, _SpecialGenericAlias):
- return issubclass(cls.__origin__, self.__origin__)
- if not isinstance(cls, _GenericAlias):
- return issubclass(cls, self.__origin__)
- return super().__subclasscheck__(cls)
+ if isinstance(cls, _SpecialGenericAlias):
+ return issubclass(cls.__origin__, self.__origin__)
+ if not isinstance(cls, _GenericAlias):
+ return issubclass(cls, self.__origin__)
+ return super().__subclasscheck__(cls)
def __reduce__(self):
- return self._name
-
-
-class _CallableGenericAlias(_GenericAlias, _root=True):
- def __repr__(self):
- assert self._name == 'Callable'
- if len(self.__args__) == 2 and self.__args__[0] is Ellipsis:
- return super().__repr__()
- return (f'typing.Callable'
- f'[[{", ".join([_type_repr(a) for a in self.__args__[:-1]])}], '
- f'{_type_repr(self.__args__[-1])}]')
-
- def __reduce__(self):
- args = self.__args__
- if not (len(args) == 2 and args[0] is ...):
- args = list(args[:-1]), args[-1]
- return operator.getitem, (Callable, args)
-
-
-class _CallableType(_SpecialGenericAlias, _root=True):
- def copy_with(self, params):
- return _CallableGenericAlias(self.__origin__, params,
- name=self._name, inst=self._inst)
-
+ return self._name
+
+
+class _CallableGenericAlias(_GenericAlias, _root=True):
+ def __repr__(self):
+ assert self._name == 'Callable'
+ if len(self.__args__) == 2 and self.__args__[0] is Ellipsis:
+ return super().__repr__()
+ return (f'typing.Callable'
+ f'[[{", ".join([_type_repr(a) for a in self.__args__[:-1]])}], '
+ f'{_type_repr(self.__args__[-1])}]')
+
+ def __reduce__(self):
+ args = self.__args__
+ if not (len(args) == 2 and args[0] is ...):
+ args = list(args[:-1]), args[-1]
+ return operator.getitem, (Callable, args)
+
+
+class _CallableType(_SpecialGenericAlias, _root=True):
+ def copy_with(self, params):
+ return _CallableGenericAlias(self.__origin__, params,
+ name=self._name, inst=self._inst)
+
def __getitem__(self, params):
if not isinstance(params, tuple) or len(params) != 2:
raise TypeError("Callable must be used as "
"Callable[[arg, ...], result].")
args, result = params
- # This relaxes what args can be on purpose to allow things like
- # PEP 612 ParamSpec. Responsibility for whether a user is using
- # Callable[...] properly is deferred to static type checkers.
- if isinstance(args, list):
- params = (tuple(args), result)
+ # This relaxes what args can be on purpose to allow things like
+ # PEP 612 ParamSpec. Responsibility for whether a user is using
+ # Callable[...] properly is deferred to static type checkers.
+ if isinstance(args, list):
+ params = (tuple(args), result)
else:
- params = (args, result)
+ params = (args, result)
return self.__getitem_inner__(params)
@_tp_cache
def __getitem_inner__(self, params):
- args, result = params
- msg = "Callable[args, result]: result must be a type."
- result = _type_check(result, msg)
- if args is Ellipsis:
- return self.copy_with((_TypingEllipsis, result))
- if not isinstance(args, tuple):
- args = (args,)
- args = tuple(_type_convert(arg) for arg in args)
- params = args + (result,)
- return self.copy_with(params)
-
-
-class _TupleType(_SpecialGenericAlias, _root=True):
- @_tp_cache
- def __getitem__(self, params):
- if params == ():
- return self.copy_with((_TypingEmpty,))
- if not isinstance(params, tuple):
- params = (params,)
- if len(params) == 2 and params[1] is ...:
- msg = "Tuple[t, ...]: t must be a type."
- p = _type_check(params[0], msg)
- return self.copy_with((p, _TypingEllipsis))
- msg = "Tuple[t0, t1, ...]: each t must be a type."
- params = tuple(_type_check(p, msg) for p in params)
- return self.copy_with(params)
-
-
-class _UnionGenericAlias(_GenericAlias, _root=True):
- def copy_with(self, params):
- return Union[params]
-
- def __eq__(self, other):
- if not isinstance(other, _UnionGenericAlias):
- return NotImplemented
- return set(self.__args__) == set(other.__args__)
-
- def __hash__(self):
- return hash(frozenset(self.__args__))
-
- def __repr__(self):
- args = self.__args__
- if len(args) == 2:
- if args[0] is type(None):
- return f'typing.Optional[{_type_repr(args[1])}]'
- elif args[1] is type(None):
- return f'typing.Optional[{_type_repr(args[0])}]'
- return super().__repr__()
-
-
-def _value_and_type_iter(parameters):
- return ((p, type(p)) for p in parameters)
-
-
-class _LiteralGenericAlias(_GenericAlias, _root=True):
-
- def __eq__(self, other):
- if not isinstance(other, _LiteralGenericAlias):
- return NotImplemented
-
- return set(_value_and_type_iter(self.__args__)) == set(_value_and_type_iter(other.__args__))
-
- def __hash__(self):
- return hash(frozenset(_value_and_type_iter(self.__args__)))
-
-
+ args, result = params
+ msg = "Callable[args, result]: result must be a type."
+ result = _type_check(result, msg)
+ if args is Ellipsis:
+ return self.copy_with((_TypingEllipsis, result))
+ if not isinstance(args, tuple):
+ args = (args,)
+ args = tuple(_type_convert(arg) for arg in args)
+ params = args + (result,)
+ return self.copy_with(params)
+
+
+class _TupleType(_SpecialGenericAlias, _root=True):
+ @_tp_cache
+ def __getitem__(self, params):
+ if params == ():
+ return self.copy_with((_TypingEmpty,))
+ if not isinstance(params, tuple):
+ params = (params,)
+ if len(params) == 2 and params[1] is ...:
+ msg = "Tuple[t, ...]: t must be a type."
+ p = _type_check(params[0], msg)
+ return self.copy_with((p, _TypingEllipsis))
+ msg = "Tuple[t0, t1, ...]: each t must be a type."
+ params = tuple(_type_check(p, msg) for p in params)
+ return self.copy_with(params)
+
+
+class _UnionGenericAlias(_GenericAlias, _root=True):
+ def copy_with(self, params):
+ return Union[params]
+
+ def __eq__(self, other):
+ if not isinstance(other, _UnionGenericAlias):
+ return NotImplemented
+ return set(self.__args__) == set(other.__args__)
+
+ def __hash__(self):
+ return hash(frozenset(self.__args__))
+
+ def __repr__(self):
+ args = self.__args__
+ if len(args) == 2:
+ if args[0] is type(None):
+ return f'typing.Optional[{_type_repr(args[1])}]'
+ elif args[1] is type(None):
+ return f'typing.Optional[{_type_repr(args[0])}]'
+ return super().__repr__()
+
+
+def _value_and_type_iter(parameters):
+ return ((p, type(p)) for p in parameters)
+
+
+class _LiteralGenericAlias(_GenericAlias, _root=True):
+
+ def __eq__(self, other):
+ if not isinstance(other, _LiteralGenericAlias):
+ return NotImplemented
+
+ return set(_value_and_type_iter(self.__args__)) == set(_value_and_type_iter(other.__args__))
+
+ def __hash__(self):
+ return hash(frozenset(_value_and_type_iter(self.__args__)))
+
+
class Generic:
"""Abstract base class for generic types.
@@ -979,7 +979,7 @@ class Generic:
return default
"""
__slots__ = ()
- _is_protocol = False
+ _is_protocol = False
@_tp_cache
def __class_getitem__(cls, params):
@@ -990,17 +990,17 @@ class Generic:
f"Parameter list to {cls.__qualname__}[...] cannot be empty")
msg = "Parameters to generic types must be types."
params = tuple(_type_check(p, msg) for p in params)
- if cls in (Generic, Protocol):
- # Generic and Protocol can only be subscripted with unique type variables.
+ if cls in (Generic, Protocol):
+ # Generic and Protocol can only be subscripted with unique type variables.
if not all(isinstance(p, TypeVar) for p in params):
raise TypeError(
- f"Parameters to {cls.__name__}[...] must all be type variables")
+ f"Parameters to {cls.__name__}[...] must all be type variables")
if len(set(params)) != len(params):
raise TypeError(
- f"Parameters to {cls.__name__}[...] must all be unique")
+ f"Parameters to {cls.__name__}[...] must all be unique")
else:
# Subscripting a regular Generic subclass.
- _check_generic(cls, params, len(cls.__parameters__))
+ _check_generic(cls, params, len(cls.__parameters__))
return _GenericAlias(cls, params)
def __init_subclass__(cls, *args, **kwargs):
@@ -1009,7 +1009,7 @@ class Generic:
if '__orig_bases__' in cls.__dict__:
error = Generic in cls.__orig_bases__
else:
- error = Generic in cls.__bases__ and cls.__name__ != 'Protocol'
+ error = Generic in cls.__bases__ and cls.__name__ != 'Protocol'
if error:
raise TypeError("Cannot inherit from plain Generic")
if '__orig_bases__' in cls.__dict__:
@@ -1027,7 +1027,7 @@ class Generic:
raise TypeError(
"Cannot inherit from Generic[...] multiple types.")
gvars = base.__parameters__
- if gvars is not None:
+ if gvars is not None:
tvarset = set(tvars)
gvarset = set(gvars)
if not tvarset <= gvarset:
@@ -1050,327 +1050,327 @@ class _TypingEllipsis:
"""Internal placeholder for ... (ellipsis)."""
-_TYPING_INTERNALS = ['__parameters__', '__orig_bases__', '__orig_class__',
- '_is_protocol', '_is_runtime_protocol']
-
-_SPECIAL_NAMES = ['__abstractmethods__', '__annotations__', '__dict__', '__doc__',
- '__init__', '__module__', '__new__', '__slots__',
- '__subclasshook__', '__weakref__', '__class_getitem__']
-
-# These special attributes will be not collected as protocol members.
-EXCLUDED_ATTRIBUTES = _TYPING_INTERNALS + _SPECIAL_NAMES + ['_MutableMapping__marker']
-
-
-def _get_protocol_attrs(cls):
- """Collect protocol members from a protocol class objects.
-
- This includes names actually defined in the class dictionary, as well
- as names that appear in annotations. Special names (above) are skipped.
- """
- attrs = set()
- for base in cls.__mro__[:-1]: # without object
- if base.__name__ in ('Protocol', 'Generic'):
- continue
- annotations = getattr(base, '__annotations__', {})
- for attr in list(base.__dict__.keys()) + list(annotations.keys()):
- if not attr.startswith('_abc_') and attr not in EXCLUDED_ATTRIBUTES:
- attrs.add(attr)
- return attrs
-
-
-def _is_callable_members_only(cls):
- # PEP 544 prohibits using issubclass() with protocols that have non-method members.
- return all(callable(getattr(cls, attr, None)) for attr in _get_protocol_attrs(cls))
-
-
-def _no_init_or_replace_init(self, *args, **kwargs):
- cls = type(self)
-
- if cls._is_protocol:
- raise TypeError('Protocols cannot be instantiated')
-
- # Already using a custom `__init__`. No need to calculate correct
- # `__init__` to call. This can lead to RecursionError. See bpo-45121.
- if cls.__init__ is not _no_init_or_replace_init:
- return
-
- # Initially, `__init__` of a protocol subclass is set to `_no_init_or_replace_init`.
- # The first instantiation of the subclass will call `_no_init_or_replace_init` which
- # searches for a proper new `__init__` in the MRO. The new `__init__`
- # replaces the subclass' old `__init__` (ie `_no_init_or_replace_init`). Subsequent
- # instantiation of the protocol subclass will thus use the new
- # `__init__` and no longer call `_no_init_or_replace_init`.
- for base in cls.__mro__:
- init = base.__dict__.get('__init__', _no_init_or_replace_init)
- if init is not _no_init_or_replace_init:
- cls.__init__ = init
- break
- else:
- # should not happen
- cls.__init__ = object.__init__
-
- cls.__init__(self, *args, **kwargs)
-
-
-
-def _allow_reckless_class_cheks():
- """Allow instance and class checks for special stdlib modules.
-
- The abc and functools modules indiscriminately call isinstance() and
- issubclass() on the whole MRO of a user class, which may contain protocols.
- """
- try:
- return sys._getframe(3).f_globals['__name__'] in ['abc', 'functools']
- except (AttributeError, ValueError): # For platforms without _getframe().
- return True
-
-
-_PROTO_WHITELIST = {
- 'collections.abc': [
- 'Callable', 'Awaitable', 'Iterable', 'Iterator', 'AsyncIterable',
- 'Hashable', 'Sized', 'Container', 'Collection', 'Reversible',
- ],
- 'contextlib': ['AbstractContextManager', 'AbstractAsyncContextManager'],
-}
-
-
-class _ProtocolMeta(ABCMeta):
- # This metaclass is really unfortunate and exists only because of
- # the lack of __instancehook__.
- def __instancecheck__(cls, instance):
- # We need this method for situations where attributes are
- # assigned in __init__.
- if ((not getattr(cls, '_is_protocol', False) or
- _is_callable_members_only(cls)) and
- issubclass(instance.__class__, cls)):
- return True
- if cls._is_protocol:
- if all(hasattr(instance, attr) and
- # All *methods* can be blocked by setting them to None.
- (not callable(getattr(cls, attr, None)) or
- getattr(instance, attr) is not None)
- for attr in _get_protocol_attrs(cls)):
- return True
- return super().__instancecheck__(instance)
-
-
-class Protocol(Generic, metaclass=_ProtocolMeta):
- """Base class for protocol classes.
-
- Protocol classes are defined as::
-
- class Proto(Protocol):
- def meth(self) -> int:
- ...
-
- Such classes are primarily used with static type checkers that recognize
- structural subtyping (static duck-typing), for example::
-
- class C:
- def meth(self) -> int:
- return 0
-
- def func(x: Proto) -> int:
- return x.meth()
-
- func(C()) # Passes static type check
-
- See PEP 544 for details. Protocol classes decorated with
- @typing.runtime_checkable act as simple-minded runtime protocols that check
- only the presence of given attributes, ignoring their type signatures.
- Protocol classes can be generic, they are defined as::
-
- class GenProto(Protocol[T]):
- def meth(self) -> T:
- ...
- """
- __slots__ = ()
- _is_protocol = True
- _is_runtime_protocol = False
-
- def __init_subclass__(cls, *args, **kwargs):
- super().__init_subclass__(*args, **kwargs)
-
- # Determine if this is a protocol or a concrete subclass.
- if not cls.__dict__.get('_is_protocol', False):
- cls._is_protocol = any(b is Protocol for b in cls.__bases__)
-
- # Set (or override) the protocol subclass hook.
- def _proto_hook(other):
- if not cls.__dict__.get('_is_protocol', False):
- return NotImplemented
-
- # First, perform various sanity checks.
- if not getattr(cls, '_is_runtime_protocol', False):
- if _allow_reckless_class_cheks():
- return NotImplemented
- raise TypeError("Instance and class checks can only be used with"
- " @runtime_checkable protocols")
- if not _is_callable_members_only(cls):
- if _allow_reckless_class_cheks():
- return NotImplemented
- raise TypeError("Protocols with non-method members"
- " don't support issubclass()")
- if not isinstance(other, type):
- # Same error message as for issubclass(1, int).
- raise TypeError('issubclass() arg 1 must be a class')
-
- # Second, perform the actual structural compatibility check.
- for attr in _get_protocol_attrs(cls):
- for base in other.__mro__:
- # Check if the members appears in the class dictionary...
- if attr in base.__dict__:
- if base.__dict__[attr] is None:
- return NotImplemented
- break
-
- # ...or in annotations, if it is a sub-protocol.
- annotations = getattr(base, '__annotations__', {})
- if (isinstance(annotations, collections.abc.Mapping) and
- attr in annotations and
- issubclass(other, Generic) and other._is_protocol):
- break
- else:
- return NotImplemented
- return True
-
- if '__subclasshook__' not in cls.__dict__:
- cls.__subclasshook__ = _proto_hook
-
- # We have nothing more to do for non-protocols...
- if not cls._is_protocol:
- return
-
- # ... otherwise check consistency of bases, and prohibit instantiation.
- for base in cls.__bases__:
- if not (base in (object, Generic) or
- base.__module__ in _PROTO_WHITELIST and
- base.__name__ in _PROTO_WHITELIST[base.__module__] or
- issubclass(base, Generic) and base._is_protocol):
- raise TypeError('Protocols can only inherit from other'
- ' protocols, got %r' % base)
- cls.__init__ = _no_init_or_replace_init
-
-
-class _AnnotatedAlias(_GenericAlias, _root=True):
- """Runtime representation of an annotated type.
-
- At its core 'Annotated[t, dec1, dec2, ...]' is an alias for the type 't'
- with extra annotations. The alias behaves like a normal typing alias,
- instantiating is the same as instantiating the underlying type, binding
- it to types is also the same.
- """
- def __init__(self, origin, metadata):
- if isinstance(origin, _AnnotatedAlias):
- metadata = origin.__metadata__ + metadata
- origin = origin.__origin__
- super().__init__(origin, origin)
- self.__metadata__ = metadata
-
- def copy_with(self, params):
- assert len(params) == 1
- new_type = params[0]
- return _AnnotatedAlias(new_type, self.__metadata__)
-
- def __repr__(self):
- return "typing.Annotated[{}, {}]".format(
- _type_repr(self.__origin__),
- ", ".join(repr(a) for a in self.__metadata__)
- )
-
- def __reduce__(self):
- return operator.getitem, (
- Annotated, (self.__origin__,) + self.__metadata__
- )
-
- def __eq__(self, other):
- if not isinstance(other, _AnnotatedAlias):
- return NotImplemented
- return (self.__origin__ == other.__origin__
- and self.__metadata__ == other.__metadata__)
-
- def __hash__(self):
- return hash((self.__origin__, self.__metadata__))
-
-
-class Annotated:
- """Add context specific metadata to a type.
-
- Example: Annotated[int, runtime_check.Unsigned] indicates to the
- hypothetical runtime_check module that this type is an unsigned int.
- Every other consumer of this type can ignore this metadata and treat
- this type as int.
-
- The first argument to Annotated must be a valid type.
-
- Details:
-
- - It's an error to call `Annotated` with less than two arguments.
- - Nested Annotated are flattened::
-
- Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3]
-
- - Instantiating an annotated type is equivalent to instantiating the
- underlying type::
-
- Annotated[C, Ann1](5) == C(5)
-
- - Annotated can be used as a generic type alias::
-
- Optimized = Annotated[T, runtime.Optimize()]
- Optimized[int] == Annotated[int, runtime.Optimize()]
-
- OptimizedList = Annotated[List[T], runtime.Optimize()]
- OptimizedList[int] == Annotated[List[int], runtime.Optimize()]
- """
-
- __slots__ = ()
-
- def __new__(cls, *args, **kwargs):
- raise TypeError("Type Annotated cannot be instantiated.")
-
- @_tp_cache
- def __class_getitem__(cls, params):
- if not isinstance(params, tuple) or len(params) < 2:
- raise TypeError("Annotated[...] should be used "
- "with at least two arguments (a type and an "
- "annotation).")
- msg = "Annotated[t, ...]: t must be a type."
- origin = _type_check(params[0], msg)
- metadata = tuple(params[1:])
- return _AnnotatedAlias(origin, metadata)
-
- def __init_subclass__(cls, *args, **kwargs):
- raise TypeError(
- "Cannot subclass {}.Annotated".format(cls.__module__)
- )
-
-
-def runtime_checkable(cls):
- """Mark a protocol class as a runtime protocol.
-
- Such protocol can be used with isinstance() and issubclass().
- Raise TypeError if applied to a non-protocol class.
- This allows a simple-minded structural check very similar to
- one trick ponies in collections.abc such as Iterable.
- For example::
-
- @runtime_checkable
- class Closable(Protocol):
- def close(self): ...
-
- assert isinstance(open('/some/file'), Closable)
-
- Warning: this will check only the presence of the required methods,
- not their type signatures!
- """
- if not issubclass(cls, Generic) or not cls._is_protocol:
- raise TypeError('@runtime_checkable can be only applied to protocol classes,'
- ' got %r' % cls)
- cls._is_runtime_protocol = True
- return cls
-
-
+_TYPING_INTERNALS = ['__parameters__', '__orig_bases__', '__orig_class__',
+ '_is_protocol', '_is_runtime_protocol']
+
+_SPECIAL_NAMES = ['__abstractmethods__', '__annotations__', '__dict__', '__doc__',
+ '__init__', '__module__', '__new__', '__slots__',
+ '__subclasshook__', '__weakref__', '__class_getitem__']
+
+# These special attributes will be not collected as protocol members.
+EXCLUDED_ATTRIBUTES = _TYPING_INTERNALS + _SPECIAL_NAMES + ['_MutableMapping__marker']
+
+
+def _get_protocol_attrs(cls):
+ """Collect protocol members from a protocol class objects.
+
+ This includes names actually defined in the class dictionary, as well
+ as names that appear in annotations. Special names (above) are skipped.
+ """
+ attrs = set()
+ for base in cls.__mro__[:-1]: # without object
+ if base.__name__ in ('Protocol', 'Generic'):
+ continue
+ annotations = getattr(base, '__annotations__', {})
+ for attr in list(base.__dict__.keys()) + list(annotations.keys()):
+ if not attr.startswith('_abc_') and attr not in EXCLUDED_ATTRIBUTES:
+ attrs.add(attr)
+ return attrs
+
+
+def _is_callable_members_only(cls):
+ # PEP 544 prohibits using issubclass() with protocols that have non-method members.
+ return all(callable(getattr(cls, attr, None)) for attr in _get_protocol_attrs(cls))
+
+
+def _no_init_or_replace_init(self, *args, **kwargs):
+ cls = type(self)
+
+ if cls._is_protocol:
+ raise TypeError('Protocols cannot be instantiated')
+
+ # Already using a custom `__init__`. No need to calculate correct
+ # `__init__` to call. This can lead to RecursionError. See bpo-45121.
+ if cls.__init__ is not _no_init_or_replace_init:
+ return
+
+ # Initially, `__init__` of a protocol subclass is set to `_no_init_or_replace_init`.
+ # The first instantiation of the subclass will call `_no_init_or_replace_init` which
+ # searches for a proper new `__init__` in the MRO. The new `__init__`
+ # replaces the subclass' old `__init__` (ie `_no_init_or_replace_init`). Subsequent
+ # instantiation of the protocol subclass will thus use the new
+ # `__init__` and no longer call `_no_init_or_replace_init`.
+ for base in cls.__mro__:
+ init = base.__dict__.get('__init__', _no_init_or_replace_init)
+ if init is not _no_init_or_replace_init:
+ cls.__init__ = init
+ break
+ else:
+ # should not happen
+ cls.__init__ = object.__init__
+
+ cls.__init__(self, *args, **kwargs)
+
+
+
+def _allow_reckless_class_cheks():
+ """Allow instance and class checks for special stdlib modules.
+
+ The abc and functools modules indiscriminately call isinstance() and
+ issubclass() on the whole MRO of a user class, which may contain protocols.
+ """
+ try:
+ return sys._getframe(3).f_globals['__name__'] in ['abc', 'functools']
+ except (AttributeError, ValueError): # For platforms without _getframe().
+ return True
+
+
+_PROTO_WHITELIST = {
+ 'collections.abc': [
+ 'Callable', 'Awaitable', 'Iterable', 'Iterator', 'AsyncIterable',
+ 'Hashable', 'Sized', 'Container', 'Collection', 'Reversible',
+ ],
+ 'contextlib': ['AbstractContextManager', 'AbstractAsyncContextManager'],
+}
+
+
+class _ProtocolMeta(ABCMeta):
+ # This metaclass is really unfortunate and exists only because of
+ # the lack of __instancehook__.
+ def __instancecheck__(cls, instance):
+ # We need this method for situations where attributes are
+ # assigned in __init__.
+ if ((not getattr(cls, '_is_protocol', False) or
+ _is_callable_members_only(cls)) and
+ issubclass(instance.__class__, cls)):
+ return True
+ if cls._is_protocol:
+ if all(hasattr(instance, attr) and
+ # All *methods* can be blocked by setting them to None.
+ (not callable(getattr(cls, attr, None)) or
+ getattr(instance, attr) is not None)
+ for attr in _get_protocol_attrs(cls)):
+ return True
+ return super().__instancecheck__(instance)
+
+
+class Protocol(Generic, metaclass=_ProtocolMeta):
+ """Base class for protocol classes.
+
+ Protocol classes are defined as::
+
+ class Proto(Protocol):
+ def meth(self) -> int:
+ ...
+
+ Such classes are primarily used with static type checkers that recognize
+ structural subtyping (static duck-typing), for example::
+
+ class C:
+ def meth(self) -> int:
+ return 0
+
+ def func(x: Proto) -> int:
+ return x.meth()
+
+ func(C()) # Passes static type check
+
+ See PEP 544 for details. Protocol classes decorated with
+ @typing.runtime_checkable act as simple-minded runtime protocols that check
+ only the presence of given attributes, ignoring their type signatures.
+ Protocol classes can be generic, they are defined as::
+
+ class GenProto(Protocol[T]):
+ def meth(self) -> T:
+ ...
+ """
+ __slots__ = ()
+ _is_protocol = True
+ _is_runtime_protocol = False
+
+ def __init_subclass__(cls, *args, **kwargs):
+ super().__init_subclass__(*args, **kwargs)
+
+ # Determine if this is a protocol or a concrete subclass.
+ if not cls.__dict__.get('_is_protocol', False):
+ cls._is_protocol = any(b is Protocol for b in cls.__bases__)
+
+ # Set (or override) the protocol subclass hook.
+ def _proto_hook(other):
+ if not cls.__dict__.get('_is_protocol', False):
+ return NotImplemented
+
+ # First, perform various sanity checks.
+ if not getattr(cls, '_is_runtime_protocol', False):
+ if _allow_reckless_class_cheks():
+ return NotImplemented
+ raise TypeError("Instance and class checks can only be used with"
+ " @runtime_checkable protocols")
+ if not _is_callable_members_only(cls):
+ if _allow_reckless_class_cheks():
+ return NotImplemented
+ raise TypeError("Protocols with non-method members"
+ " don't support issubclass()")
+ if not isinstance(other, type):
+ # Same error message as for issubclass(1, int).
+ raise TypeError('issubclass() arg 1 must be a class')
+
+ # Second, perform the actual structural compatibility check.
+ for attr in _get_protocol_attrs(cls):
+ for base in other.__mro__:
+ # Check if the members appears in the class dictionary...
+ if attr in base.__dict__:
+ if base.__dict__[attr] is None:
+ return NotImplemented
+ break
+
+ # ...or in annotations, if it is a sub-protocol.
+ annotations = getattr(base, '__annotations__', {})
+ if (isinstance(annotations, collections.abc.Mapping) and
+ attr in annotations and
+ issubclass(other, Generic) and other._is_protocol):
+ break
+ else:
+ return NotImplemented
+ return True
+
+ if '__subclasshook__' not in cls.__dict__:
+ cls.__subclasshook__ = _proto_hook
+
+ # We have nothing more to do for non-protocols...
+ if not cls._is_protocol:
+ return
+
+ # ... otherwise check consistency of bases, and prohibit instantiation.
+ for base in cls.__bases__:
+ if not (base in (object, Generic) or
+ base.__module__ in _PROTO_WHITELIST and
+ base.__name__ in _PROTO_WHITELIST[base.__module__] or
+ issubclass(base, Generic) and base._is_protocol):
+ raise TypeError('Protocols can only inherit from other'
+ ' protocols, got %r' % base)
+ cls.__init__ = _no_init_or_replace_init
+
+
+class _AnnotatedAlias(_GenericAlias, _root=True):
+ """Runtime representation of an annotated type.
+
+ At its core 'Annotated[t, dec1, dec2, ...]' is an alias for the type 't'
+ with extra annotations. The alias behaves like a normal typing alias,
+ instantiating is the same as instantiating the underlying type, binding
+ it to types is also the same.
+ """
+ def __init__(self, origin, metadata):
+ if isinstance(origin, _AnnotatedAlias):
+ metadata = origin.__metadata__ + metadata
+ origin = origin.__origin__
+ super().__init__(origin, origin)
+ self.__metadata__ = metadata
+
+ def copy_with(self, params):
+ assert len(params) == 1
+ new_type = params[0]
+ return _AnnotatedAlias(new_type, self.__metadata__)
+
+ def __repr__(self):
+ return "typing.Annotated[{}, {}]".format(
+ _type_repr(self.__origin__),
+ ", ".join(repr(a) for a in self.__metadata__)
+ )
+
+ def __reduce__(self):
+ return operator.getitem, (
+ Annotated, (self.__origin__,) + self.__metadata__
+ )
+
+ def __eq__(self, other):
+ if not isinstance(other, _AnnotatedAlias):
+ return NotImplemented
+ return (self.__origin__ == other.__origin__
+ and self.__metadata__ == other.__metadata__)
+
+ def __hash__(self):
+ return hash((self.__origin__, self.__metadata__))
+
+
+class Annotated:
+ """Add context specific metadata to a type.
+
+ Example: Annotated[int, runtime_check.Unsigned] indicates to the
+ hypothetical runtime_check module that this type is an unsigned int.
+ Every other consumer of this type can ignore this metadata and treat
+ this type as int.
+
+ The first argument to Annotated must be a valid type.
+
+ Details:
+
+ - It's an error to call `Annotated` with less than two arguments.
+ - Nested Annotated are flattened::
+
+ Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3]
+
+ - Instantiating an annotated type is equivalent to instantiating the
+ underlying type::
+
+ Annotated[C, Ann1](5) == C(5)
+
+ - Annotated can be used as a generic type alias::
+
+ Optimized = Annotated[T, runtime.Optimize()]
+ Optimized[int] == Annotated[int, runtime.Optimize()]
+
+ OptimizedList = Annotated[List[T], runtime.Optimize()]
+ OptimizedList[int] == Annotated[List[int], runtime.Optimize()]
+ """
+
+ __slots__ = ()
+
+ def __new__(cls, *args, **kwargs):
+ raise TypeError("Type Annotated cannot be instantiated.")
+
+ @_tp_cache
+ def __class_getitem__(cls, params):
+ if not isinstance(params, tuple) or len(params) < 2:
+ raise TypeError("Annotated[...] should be used "
+ "with at least two arguments (a type and an "
+ "annotation).")
+ msg = "Annotated[t, ...]: t must be a type."
+ origin = _type_check(params[0], msg)
+ metadata = tuple(params[1:])
+ return _AnnotatedAlias(origin, metadata)
+
+ def __init_subclass__(cls, *args, **kwargs):
+ raise TypeError(
+ "Cannot subclass {}.Annotated".format(cls.__module__)
+ )
+
+
+def runtime_checkable(cls):
+ """Mark a protocol class as a runtime protocol.
+
+ Such protocol can be used with isinstance() and issubclass().
+ Raise TypeError if applied to a non-protocol class.
+ This allows a simple-minded structural check very similar to
+ one trick ponies in collections.abc such as Iterable.
+ For example::
+
+ @runtime_checkable
+ class Closable(Protocol):
+ def close(self): ...
+
+ assert isinstance(open('/some/file'), Closable)
+
+ Warning: this will check only the presence of the required methods,
+ not their type signatures!
+ """
+ if not issubclass(cls, Generic) or not cls._is_protocol:
+ raise TypeError('@runtime_checkable can be only applied to protocol classes,'
+ ' got %r' % cls)
+ cls._is_runtime_protocol = True
+ return cls
+
+
def cast(typ, val):
"""Cast a value to a type.
@@ -1407,13 +1407,13 @@ _allowed_types = (types.FunctionType, types.BuiltinFunctionType,
WrapperDescriptorType, MethodWrapperType, MethodDescriptorType)
-def get_type_hints(obj, globalns=None, localns=None, include_extras=False):
+def get_type_hints(obj, globalns=None, localns=None, include_extras=False):
"""Return type hints for an object.
This is often the same as obj.__annotations__, but it handles
- forward references encoded as string literals, adds Optional[t] if a
- default value equal to None is set and recursively replaces all
- 'Annotated[T, ...]' with 'T' (unless 'include_extras=True').
+ forward references encoded as string literals, adds Optional[t] if a
+ default value equal to None is set and recursively replaces all
+ 'Annotated[T, ...]' with 'T' (unless 'include_extras=True').
The argument may be a module, class, method, or function. The annotations
are returned as a dictionary. For classes, annotations include also
@@ -1454,20 +1454,20 @@ def get_type_hints(obj, globalns=None, localns=None, include_extras=False):
if value is None:
value = type(None)
if isinstance(value, str):
- value = ForwardRef(value, is_argument=False, is_class=True)
+ value = ForwardRef(value, is_argument=False, is_class=True)
value = _eval_type(value, base_globals, localns)
hints[name] = value
- return hints if include_extras else {k: _strip_annotations(t) for k, t in hints.items()}
+ return hints if include_extras else {k: _strip_annotations(t) for k, t in hints.items()}
if globalns is None:
if isinstance(obj, types.ModuleType):
globalns = obj.__dict__
else:
- nsobj = obj
- # Find globalns for the unwrapped object.
- while hasattr(nsobj, '__wrapped__'):
- nsobj = nsobj.__wrapped__
- globalns = getattr(nsobj, '__globals__', {})
+ nsobj = obj
+ # Find globalns for the unwrapped object.
+ while hasattr(nsobj, '__wrapped__'):
+ nsobj = nsobj.__wrapped__
+ globalns = getattr(nsobj, '__globals__', {})
if localns is None:
localns = globalns
elif localns is None:
@@ -1486,82 +1486,82 @@ def get_type_hints(obj, globalns=None, localns=None, include_extras=False):
if value is None:
value = type(None)
if isinstance(value, str):
- # class-level forward refs were handled above, this must be either
- # a module-level annotation or a function argument annotation
- value = ForwardRef(
- value,
- is_argument=not isinstance(obj, types.ModuleType),
- is_class=False,
- )
+ # class-level forward refs were handled above, this must be either
+ # a module-level annotation or a function argument annotation
+ value = ForwardRef(
+ value,
+ is_argument=not isinstance(obj, types.ModuleType),
+ is_class=False,
+ )
value = _eval_type(value, globalns, localns)
if name in defaults and defaults[name] is None:
value = Optional[value]
hints[name] = value
- return hints if include_extras else {k: _strip_annotations(t) for k, t in hints.items()}
-
-
-def _strip_annotations(t):
- """Strips the annotations from a given type.
- """
- if isinstance(t, _AnnotatedAlias):
- return _strip_annotations(t.__origin__)
- if isinstance(t, _GenericAlias):
- stripped_args = tuple(_strip_annotations(a) for a in t.__args__)
- if stripped_args == t.__args__:
- return t
- return t.copy_with(stripped_args)
- if isinstance(t, GenericAlias):
- stripped_args = tuple(_strip_annotations(a) for a in t.__args__)
- if stripped_args == t.__args__:
- return t
- return GenericAlias(t.__origin__, stripped_args)
- return t
-
-
-def get_origin(tp):
- """Get the unsubscripted version of a type.
-
- This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar
- and Annotated. Return None for unsupported types. Examples::
-
- get_origin(Literal[42]) is Literal
- get_origin(int) is None
- get_origin(ClassVar[int]) is ClassVar
- get_origin(Generic) is Generic
- get_origin(Generic[T]) is Generic
- get_origin(Union[T, int]) is Union
- get_origin(List[Tuple[T, T]][int]) == list
- """
- if isinstance(tp, _AnnotatedAlias):
- return Annotated
- if isinstance(tp, (_BaseGenericAlias, GenericAlias)):
- return tp.__origin__
- if tp is Generic:
- return Generic
- return None
-
-
-def get_args(tp):
- """Get type arguments with all substitutions performed.
-
- For unions, basic simplifications used by Union constructor are performed.
- Examples::
- get_args(Dict[str, int]) == (str, int)
- get_args(int) == ()
- get_args(Union[int, Union[T, int], str][int]) == (int, str)
- get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int])
- get_args(Callable[[], T][int]) == ([], int)
- """
- if isinstance(tp, _AnnotatedAlias):
- return (tp.__origin__,) + tp.__metadata__
- if isinstance(tp, (_GenericAlias, GenericAlias)):
- res = tp.__args__
- if tp.__origin__ is collections.abc.Callable and res[0] is not Ellipsis:
- res = (list(res[:-1]), res[-1])
- return res
- return ()
-
-
+ return hints if include_extras else {k: _strip_annotations(t) for k, t in hints.items()}
+
+
+def _strip_annotations(t):
+ """Strips the annotations from a given type.
+ """
+ if isinstance(t, _AnnotatedAlias):
+ return _strip_annotations(t.__origin__)
+ if isinstance(t, _GenericAlias):
+ stripped_args = tuple(_strip_annotations(a) for a in t.__args__)
+ if stripped_args == t.__args__:
+ return t
+ return t.copy_with(stripped_args)
+ if isinstance(t, GenericAlias):
+ stripped_args = tuple(_strip_annotations(a) for a in t.__args__)
+ if stripped_args == t.__args__:
+ return t
+ return GenericAlias(t.__origin__, stripped_args)
+ return t
+
+
+def get_origin(tp):
+ """Get the unsubscripted version of a type.
+
+ This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar
+ and Annotated. Return None for unsupported types. Examples::
+
+ get_origin(Literal[42]) is Literal
+ get_origin(int) is None
+ get_origin(ClassVar[int]) is ClassVar
+ get_origin(Generic) is Generic
+ get_origin(Generic[T]) is Generic
+ get_origin(Union[T, int]) is Union
+ get_origin(List[Tuple[T, T]][int]) == list
+ """
+ if isinstance(tp, _AnnotatedAlias):
+ return Annotated
+ if isinstance(tp, (_BaseGenericAlias, GenericAlias)):
+ return tp.__origin__
+ if tp is Generic:
+ return Generic
+ return None
+
+
+def get_args(tp):
+ """Get type arguments with all substitutions performed.
+
+ For unions, basic simplifications used by Union constructor are performed.
+ Examples::
+ get_args(Dict[str, int]) == (str, int)
+ get_args(int) == ()
+ get_args(Union[int, Union[T, int], str][int]) == (int, str)
+ get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int])
+ get_args(Callable[[], T][int]) == ([], int)
+ """
+ if isinstance(tp, _AnnotatedAlias):
+ return (tp.__origin__,) + tp.__metadata__
+ if isinstance(tp, (_GenericAlias, GenericAlias)):
+ res = tp.__args__
+ if tp.__origin__ is collections.abc.Callable and res[0] is not Ellipsis:
+ res = (list(res[:-1]), res[-1])
+ return res
+ return ()
+
+
def no_type_check(arg):
"""Decorator to indicate that annotations are not type hints.
@@ -1642,30 +1642,30 @@ def overload(func):
return _overload_dummy
-def final(f):
- """A decorator to indicate final methods and final classes.
+def final(f):
+ """A decorator to indicate final methods and final classes.
- Use this decorator to indicate to type checkers that the decorated
- method cannot be overridden, and decorated class cannot be subclassed.
- For example:
+ Use this decorator to indicate to type checkers that the decorated
+ method cannot be overridden, and decorated class cannot be subclassed.
+ For example:
- class Base:
- @final
- def done(self) -> None:
- ...
- class Sub(Base):
- def done(self) -> None: # Error reported by type checker
- ...
+ class Base:
+ @final
+ def done(self) -> None:
+ ...
+ class Sub(Base):
+ def done(self) -> None: # Error reported by type checker
+ ...
- @final
- class Leaf:
- ...
- class Other(Leaf): # Error reported by type checker
- ...
+ @final
+ class Leaf:
+ ...
+ class Other(Leaf): # Error reported by type checker
+ ...
- There is no runtime checking of these properties.
+ There is no runtime checking of these properties.
"""
- return f
+ return f
# Some unconstrained type variables. These are used by the container types.
@@ -1686,20 +1686,20 @@ AnyStr = TypeVar('AnyStr', bytes, str)
# Various ABCs mimicking those in collections.abc.
-_alias = _SpecialGenericAlias
-
-Hashable = _alias(collections.abc.Hashable, 0) # Not generic.
-Awaitable = _alias(collections.abc.Awaitable, 1)
-Coroutine = _alias(collections.abc.Coroutine, 3)
-AsyncIterable = _alias(collections.abc.AsyncIterable, 1)
-AsyncIterator = _alias(collections.abc.AsyncIterator, 1)
-Iterable = _alias(collections.abc.Iterable, 1)
-Iterator = _alias(collections.abc.Iterator, 1)
-Reversible = _alias(collections.abc.Reversible, 1)
-Sized = _alias(collections.abc.Sized, 0) # Not generic.
-Container = _alias(collections.abc.Container, 1)
-Collection = _alias(collections.abc.Collection, 1)
-Callable = _CallableType(collections.abc.Callable, 2)
+_alias = _SpecialGenericAlias
+
+Hashable = _alias(collections.abc.Hashable, 0) # Not generic.
+Awaitable = _alias(collections.abc.Awaitable, 1)
+Coroutine = _alias(collections.abc.Coroutine, 3)
+AsyncIterable = _alias(collections.abc.AsyncIterable, 1)
+AsyncIterator = _alias(collections.abc.AsyncIterator, 1)
+Iterable = _alias(collections.abc.Iterable, 1)
+Iterator = _alias(collections.abc.Iterator, 1)
+Reversible = _alias(collections.abc.Reversible, 1)
+Sized = _alias(collections.abc.Sized, 0) # Not generic.
+Container = _alias(collections.abc.Container, 1)
+Collection = _alias(collections.abc.Collection, 1)
+Callable = _CallableType(collections.abc.Callable, 2)
Callable.__doc__ = \
"""Callable type; Callable[[int], str] is a function of (int) -> str.
@@ -1710,16 +1710,16 @@ Callable.__doc__ = \
There is no syntax to indicate optional or keyword arguments,
such function types are rarely used as callback types.
"""
-AbstractSet = _alias(collections.abc.Set, 1, name='AbstractSet')
-MutableSet = _alias(collections.abc.MutableSet, 1)
+AbstractSet = _alias(collections.abc.Set, 1, name='AbstractSet')
+MutableSet = _alias(collections.abc.MutableSet, 1)
# NOTE: Mapping is only covariant in the value type.
-Mapping = _alias(collections.abc.Mapping, 2)
-MutableMapping = _alias(collections.abc.MutableMapping, 2)
-Sequence = _alias(collections.abc.Sequence, 1)
-MutableSequence = _alias(collections.abc.MutableSequence, 1)
-ByteString = _alias(collections.abc.ByteString, 0) # Not generic
-# Tuple accepts variable number of parameters.
-Tuple = _TupleType(tuple, -1, inst=False, name='Tuple')
+Mapping = _alias(collections.abc.Mapping, 2)
+MutableMapping = _alias(collections.abc.MutableMapping, 2)
+Sequence = _alias(collections.abc.Sequence, 1)
+MutableSequence = _alias(collections.abc.MutableSequence, 1)
+ByteString = _alias(collections.abc.ByteString, 0) # Not generic
+# Tuple accepts variable number of parameters.
+Tuple = _TupleType(tuple, -1, inst=False, name='Tuple')
Tuple.__doc__ = \
"""Tuple type; Tuple[X, Y] is the cross-product type of X and Y.
@@ -1729,24 +1729,24 @@ Tuple.__doc__ = \
To specify a variable-length tuple of homogeneous type, use Tuple[T, ...].
"""
-List = _alias(list, 1, inst=False, name='List')
-Deque = _alias(collections.deque, 1, name='Deque')
-Set = _alias(set, 1, inst=False, name='Set')
-FrozenSet = _alias(frozenset, 1, inst=False, name='FrozenSet')
-MappingView = _alias(collections.abc.MappingView, 1)
-KeysView = _alias(collections.abc.KeysView, 1)
-ItemsView = _alias(collections.abc.ItemsView, 2)
-ValuesView = _alias(collections.abc.ValuesView, 1)
-ContextManager = _alias(contextlib.AbstractContextManager, 1, name='ContextManager')
-AsyncContextManager = _alias(contextlib.AbstractAsyncContextManager, 1, name='AsyncContextManager')
-Dict = _alias(dict, 2, inst=False, name='Dict')
-DefaultDict = _alias(collections.defaultdict, 2, name='DefaultDict')
-OrderedDict = _alias(collections.OrderedDict, 2)
-Counter = _alias(collections.Counter, 1)
-ChainMap = _alias(collections.ChainMap, 2)
-Generator = _alias(collections.abc.Generator, 3)
-AsyncGenerator = _alias(collections.abc.AsyncGenerator, 2)
-Type = _alias(type, 1, inst=False, name='Type')
+List = _alias(list, 1, inst=False, name='List')
+Deque = _alias(collections.deque, 1, name='Deque')
+Set = _alias(set, 1, inst=False, name='Set')
+FrozenSet = _alias(frozenset, 1, inst=False, name='FrozenSet')
+MappingView = _alias(collections.abc.MappingView, 1)
+KeysView = _alias(collections.abc.KeysView, 1)
+ItemsView = _alias(collections.abc.ItemsView, 2)
+ValuesView = _alias(collections.abc.ValuesView, 1)
+ContextManager = _alias(contextlib.AbstractContextManager, 1, name='ContextManager')
+AsyncContextManager = _alias(contextlib.AbstractAsyncContextManager, 1, name='AsyncContextManager')
+Dict = _alias(dict, 2, inst=False, name='Dict')
+DefaultDict = _alias(collections.defaultdict, 2, name='DefaultDict')
+OrderedDict = _alias(collections.OrderedDict, 2)
+Counter = _alias(collections.Counter, 1)
+ChainMap = _alias(collections.ChainMap, 2)
+Generator = _alias(collections.abc.Generator, 3)
+AsyncGenerator = _alias(collections.abc.AsyncGenerator, 2)
+Type = _alias(type, 1, inst=False, name='Type')
Type.__doc__ = \
"""A special construct usable to annotate class objects.
@@ -1772,9 +1772,9 @@ Type.__doc__ = \
"""
-@runtime_checkable
-class SupportsInt(Protocol):
- """An ABC with one abstract method __int__."""
+@runtime_checkable
+class SupportsInt(Protocol):
+ """An ABC with one abstract method __int__."""
__slots__ = ()
@abstractmethod
@@ -1782,9 +1782,9 @@ class SupportsInt(Protocol):
pass
-@runtime_checkable
-class SupportsFloat(Protocol):
- """An ABC with one abstract method __float__."""
+@runtime_checkable
+class SupportsFloat(Protocol):
+ """An ABC with one abstract method __float__."""
__slots__ = ()
@abstractmethod
@@ -1792,9 +1792,9 @@ class SupportsFloat(Protocol):
pass
-@runtime_checkable
-class SupportsComplex(Protocol):
- """An ABC with one abstract method __complex__."""
+@runtime_checkable
+class SupportsComplex(Protocol):
+ """An ABC with one abstract method __complex__."""
__slots__ = ()
@abstractmethod
@@ -1802,9 +1802,9 @@ class SupportsComplex(Protocol):
pass
-@runtime_checkable
-class SupportsBytes(Protocol):
- """An ABC with one abstract method __bytes__."""
+@runtime_checkable
+class SupportsBytes(Protocol):
+ """An ABC with one abstract method __bytes__."""
__slots__ = ()
@abstractmethod
@@ -1812,19 +1812,19 @@ class SupportsBytes(Protocol):
pass
-@runtime_checkable
-class SupportsIndex(Protocol):
- """An ABC with one abstract method __index__."""
- __slots__ = ()
-
- @abstractmethod
- def __index__(self) -> int:
- pass
-
-
-@runtime_checkable
-class SupportsAbs(Protocol[T_co]):
- """An ABC with one abstract method __abs__ that is covariant in its return type."""
+@runtime_checkable
+class SupportsIndex(Protocol):
+ """An ABC with one abstract method __index__."""
+ __slots__ = ()
+
+ @abstractmethod
+ def __index__(self) -> int:
+ pass
+
+
+@runtime_checkable
+class SupportsAbs(Protocol[T_co]):
+ """An ABC with one abstract method __abs__ that is covariant in its return type."""
__slots__ = ()
@abstractmethod
@@ -1832,9 +1832,9 @@ class SupportsAbs(Protocol[T_co]):
pass
-@runtime_checkable
-class SupportsRound(Protocol[T_co]):
- """An ABC with one abstract method __round__ that is covariant in its return type."""
+@runtime_checkable
+class SupportsRound(Protocol[T_co]):
+ """An ABC with one abstract method __round__ that is covariant in its return type."""
__slots__ = ()
@abstractmethod
@@ -1842,41 +1842,41 @@ class SupportsRound(Protocol[T_co]):
pass
-def _make_nmtuple(name, types, module, defaults = ()):
- fields = [n for n, t in types]
- types = {n: _type_check(t, f"field {n} annotation must be a type")
- for n, t in types}
- nm_tpl = collections.namedtuple(name, fields,
- defaults=defaults, module=module)
- nm_tpl.__annotations__ = nm_tpl.__new__.__annotations__ = types
+def _make_nmtuple(name, types, module, defaults = ()):
+ fields = [n for n, t in types]
+ types = {n: _type_check(t, f"field {n} annotation must be a type")
+ for n, t in types}
+ nm_tpl = collections.namedtuple(name, fields,
+ defaults=defaults, module=module)
+ nm_tpl.__annotations__ = nm_tpl.__new__.__annotations__ = types
return nm_tpl
# attributes prohibited to set in NamedTuple class syntax
-_prohibited = frozenset({'__new__', '__init__', '__slots__', '__getnewargs__',
- '_fields', '_field_defaults',
- '_make', '_replace', '_asdict', '_source'})
+_prohibited = frozenset({'__new__', '__init__', '__slots__', '__getnewargs__',
+ '_fields', '_field_defaults',
+ '_make', '_replace', '_asdict', '_source'})
-_special = frozenset({'__module__', '__name__', '__annotations__'})
+_special = frozenset({'__module__', '__name__', '__annotations__'})
class NamedTupleMeta(type):
def __new__(cls, typename, bases, ns):
- assert bases[0] is _NamedTuple
+ assert bases[0] is _NamedTuple
types = ns.get('__annotations__', {})
- default_names = []
+ default_names = []
for field_name in types:
if field_name in ns:
- default_names.append(field_name)
- elif default_names:
- raise TypeError(f"Non-default namedtuple field {field_name} "
- f"cannot follow default field"
- f"{'s' if len(default_names) > 1 else ''} "
- f"{', '.join(default_names)}")
- nm_tpl = _make_nmtuple(typename, types.items(),
- defaults=[ns[n] for n in default_names],
- module=ns['__module__'])
+ default_names.append(field_name)
+ elif default_names:
+ raise TypeError(f"Non-default namedtuple field {field_name} "
+ f"cannot follow default field"
+ f"{'s' if len(default_names) > 1 else ''} "
+ f"{', '.join(default_names)}")
+ nm_tpl = _make_nmtuple(typename, types.items(),
+ defaults=[ns[n] for n in default_names],
+ module=ns['__module__'])
# update from user namespace without overriding special namedtuple attributes
for key in ns:
if key in _prohibited:
@@ -1886,7 +1886,7 @@ class NamedTupleMeta(type):
return nm_tpl
-def NamedTuple(typename, fields=None, /, **kwargs):
+def NamedTuple(typename, fields=None, /, **kwargs):
"""Typed version of namedtuple.
Usage in Python versions >= 3.6::
@@ -1899,10 +1899,10 @@ def NamedTuple(typename, fields=None, /, **kwargs):
Employee = collections.namedtuple('Employee', ['name', 'id'])
- The resulting class has an extra __annotations__ attribute, giving a
- dict that maps field names to types. (The field names are also in
- the _fields attribute, which is part of the namedtuple API.)
- Alternative equivalent keyword syntax is also accepted::
+ The resulting class has an extra __annotations__ attribute, giving a
+ dict that maps field names to types. (The field names are also in
+ the _fields attribute, which is part of the namedtuple API.)
+ Alternative equivalent keyword syntax is also accepted::
Employee = NamedTuple('Employee', name=str, id=int)
@@ -1910,142 +1910,142 @@ def NamedTuple(typename, fields=None, /, **kwargs):
Employee = NamedTuple('Employee', [('name', str), ('id', int)])
"""
- if fields is None:
- fields = kwargs.items()
- elif kwargs:
- raise TypeError("Either list of fields or keywords"
- " can be provided to NamedTuple, not both")
- try:
- module = sys._getframe(1).f_globals.get('__name__', '__main__')
- except (AttributeError, ValueError):
- module = None
- return _make_nmtuple(typename, fields, module=module)
-
-_NamedTuple = type.__new__(NamedTupleMeta, 'NamedTuple', (), {})
-
-def _namedtuple_mro_entries(bases):
- if len(bases) > 1:
- raise TypeError("Multiple inheritance with NamedTuple is not supported")
- assert bases[0] is NamedTuple
- return (_NamedTuple,)
-
-NamedTuple.__mro_entries__ = _namedtuple_mro_entries
-
-
-class _TypedDictMeta(type):
- def __new__(cls, name, bases, ns, total=True):
- """Create new typed dict class object.
-
- This method is called when TypedDict is subclassed,
- or when TypedDict is instantiated. This way
- TypedDict supports all three syntax forms described in its docstring.
- Subclasses and instances of TypedDict return actual dictionaries.
- """
- for base in bases:
- if type(base) is not _TypedDictMeta:
- raise TypeError('cannot inherit from both a TypedDict type '
- 'and a non-TypedDict base class')
- tp_dict = type.__new__(_TypedDictMeta, name, (dict,), ns)
-
- annotations = {}
- own_annotations = ns.get('__annotations__', {})
- own_annotation_keys = set(own_annotations.keys())
- msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type"
- own_annotations = {
- n: _type_check(tp, msg, module=tp_dict.__module__)
- for n, tp in own_annotations.items()
- }
- required_keys = set()
- optional_keys = set()
-
- for base in bases:
- annotations.update(base.__dict__.get('__annotations__', {}))
- required_keys.update(base.__dict__.get('__required_keys__', ()))
- optional_keys.update(base.__dict__.get('__optional_keys__', ()))
-
- annotations.update(own_annotations)
- if total:
- required_keys.update(own_annotation_keys)
- else:
- optional_keys.update(own_annotation_keys)
-
- tp_dict.__annotations__ = annotations
- tp_dict.__required_keys__ = frozenset(required_keys)
- tp_dict.__optional_keys__ = frozenset(optional_keys)
- if not hasattr(tp_dict, '__total__'):
- tp_dict.__total__ = total
- return tp_dict
-
- __call__ = dict # static method
-
- def __subclasscheck__(cls, other):
- # Typed dicts are only for static structural subtyping.
- raise TypeError('TypedDict does not support instance and class checks')
-
- __instancecheck__ = __subclasscheck__
-
-
-def TypedDict(typename, fields=None, /, *, total=True, **kwargs):
- """A simple typed namespace. At runtime it is equivalent to a plain dict.
-
- TypedDict creates a dictionary type that expects all of its
- instances to have a certain set of keys, where each key is
- associated with a value of a consistent type. This expectation
- is not checked at runtime but is only enforced by type checkers.
- Usage::
-
- class Point2D(TypedDict):
- x: int
- y: int
- label: str
-
- a: Point2D = {'x': 1, 'y': 2, 'label': 'good'} # OK
- b: Point2D = {'z': 3, 'label': 'bad'} # Fails type check
-
- assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first')
-
- The type info can be accessed via the Point2D.__annotations__ dict, and
- the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets.
- TypedDict supports two additional equivalent forms::
-
- Point2D = TypedDict('Point2D', x=int, y=int, label=str)
- Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str})
-
- By default, all keys must be present in a TypedDict. It is possible
- to override this by specifying totality.
- Usage::
-
- class point2D(TypedDict, total=False):
- x: int
- y: int
-
- This means that a point2D TypedDict can have any of the keys omitted.A type
- checker is only expected to support a literal False or True as the value of
- the total argument. True is the default, and makes all items defined in the
- class body be required.
-
- The class syntax is only supported in Python 3.6+, while two other
- syntax forms work for Python 2.7 and 3.2+
- """
- if fields is None:
- fields = kwargs
- elif kwargs:
- raise TypeError("TypedDict takes either a dict or keyword arguments,"
- " but not both")
-
- ns = {'__annotations__': dict(fields)}
- try:
- # Setting correct module is necessary to make typed dict classes pickleable.
- ns['__module__'] = sys._getframe(1).f_globals.get('__name__', '__main__')
- except (AttributeError, ValueError):
- pass
-
- return _TypedDictMeta(typename, (), ns, total=total)
-
-_TypedDict = type.__new__(_TypedDictMeta, 'TypedDict', (), {})
-TypedDict.__mro_entries__ = lambda bases: (_TypedDict,)
-
-
+ if fields is None:
+ fields = kwargs.items()
+ elif kwargs:
+ raise TypeError("Either list of fields or keywords"
+ " can be provided to NamedTuple, not both")
+ try:
+ module = sys._getframe(1).f_globals.get('__name__', '__main__')
+ except (AttributeError, ValueError):
+ module = None
+ return _make_nmtuple(typename, fields, module=module)
+
+_NamedTuple = type.__new__(NamedTupleMeta, 'NamedTuple', (), {})
+
+def _namedtuple_mro_entries(bases):
+ if len(bases) > 1:
+ raise TypeError("Multiple inheritance with NamedTuple is not supported")
+ assert bases[0] is NamedTuple
+ return (_NamedTuple,)
+
+NamedTuple.__mro_entries__ = _namedtuple_mro_entries
+
+
+class _TypedDictMeta(type):
+ def __new__(cls, name, bases, ns, total=True):
+ """Create new typed dict class object.
+
+ This method is called when TypedDict is subclassed,
+ or when TypedDict is instantiated. This way
+ TypedDict supports all three syntax forms described in its docstring.
+ Subclasses and instances of TypedDict return actual dictionaries.
+ """
+ for base in bases:
+ if type(base) is not _TypedDictMeta:
+ raise TypeError('cannot inherit from both a TypedDict type '
+ 'and a non-TypedDict base class')
+ tp_dict = type.__new__(_TypedDictMeta, name, (dict,), ns)
+
+ annotations = {}
+ own_annotations = ns.get('__annotations__', {})
+ own_annotation_keys = set(own_annotations.keys())
+ msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type"
+ own_annotations = {
+ n: _type_check(tp, msg, module=tp_dict.__module__)
+ for n, tp in own_annotations.items()
+ }
+ required_keys = set()
+ optional_keys = set()
+
+ for base in bases:
+ annotations.update(base.__dict__.get('__annotations__', {}))
+ required_keys.update(base.__dict__.get('__required_keys__', ()))
+ optional_keys.update(base.__dict__.get('__optional_keys__', ()))
+
+ annotations.update(own_annotations)
+ if total:
+ required_keys.update(own_annotation_keys)
+ else:
+ optional_keys.update(own_annotation_keys)
+
+ tp_dict.__annotations__ = annotations
+ tp_dict.__required_keys__ = frozenset(required_keys)
+ tp_dict.__optional_keys__ = frozenset(optional_keys)
+ if not hasattr(tp_dict, '__total__'):
+ tp_dict.__total__ = total
+ return tp_dict
+
+ __call__ = dict # static method
+
+ def __subclasscheck__(cls, other):
+ # Typed dicts are only for static structural subtyping.
+ raise TypeError('TypedDict does not support instance and class checks')
+
+ __instancecheck__ = __subclasscheck__
+
+
+def TypedDict(typename, fields=None, /, *, total=True, **kwargs):
+ """A simple typed namespace. At runtime it is equivalent to a plain dict.
+
+ TypedDict creates a dictionary type that expects all of its
+ instances to have a certain set of keys, where each key is
+ associated with a value of a consistent type. This expectation
+ is not checked at runtime but is only enforced by type checkers.
+ Usage::
+
+ class Point2D(TypedDict):
+ x: int
+ y: int
+ label: str
+
+ a: Point2D = {'x': 1, 'y': 2, 'label': 'good'} # OK
+ b: Point2D = {'z': 3, 'label': 'bad'} # Fails type check
+
+ assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first')
+
+ The type info can be accessed via the Point2D.__annotations__ dict, and
+ the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets.
+ TypedDict supports two additional equivalent forms::
+
+ Point2D = TypedDict('Point2D', x=int, y=int, label=str)
+ Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str})
+
+ By default, all keys must be present in a TypedDict. It is possible
+ to override this by specifying totality.
+ Usage::
+
+ class point2D(TypedDict, total=False):
+ x: int
+ y: int
+
+ This means that a point2D TypedDict can have any of the keys omitted.A type
+ checker is only expected to support a literal False or True as the value of
+ the total argument. True is the default, and makes all items defined in the
+ class body be required.
+
+ The class syntax is only supported in Python 3.6+, while two other
+ syntax forms work for Python 2.7 and 3.2+
+ """
+ if fields is None:
+ fields = kwargs
+ elif kwargs:
+ raise TypeError("TypedDict takes either a dict or keyword arguments,"
+ " but not both")
+
+ ns = {'__annotations__': dict(fields)}
+ try:
+ # Setting correct module is necessary to make typed dict classes pickleable.
+ ns['__module__'] = sys._getframe(1).f_globals.get('__name__', '__main__')
+ except (AttributeError, ValueError):
+ pass
+
+ return _TypedDictMeta(typename, (), ns, total=total)
+
+_TypedDict = type.__new__(_TypedDictMeta, 'TypedDict', (), {})
+TypedDict.__mro_entries__ = lambda bases: (_TypedDict,)
+
+
def NewType(name, tp):
"""NewType creates simple unique types with almost zero
runtime overhead. NewType(name, tp) is considered a subtype of tp
@@ -2096,13 +2096,13 @@ class IO(Generic[AnyStr]):
__slots__ = ()
- @property
- @abstractmethod
+ @property
+ @abstractmethod
def mode(self) -> str:
pass
- @property
- @abstractmethod
+ @property
+ @abstractmethod
def name(self) -> str:
pass
@@ -2110,8 +2110,8 @@ class IO(Generic[AnyStr]):
def close(self) -> None:
pass
- @property
- @abstractmethod
+ @property
+ @abstractmethod
def closed(self) -> bool:
pass
@@ -2199,28 +2199,28 @@ class TextIO(IO[str]):
__slots__ = ()
- @property
- @abstractmethod
+ @property
+ @abstractmethod
def buffer(self) -> BinaryIO:
pass
- @property
- @abstractmethod
+ @property
+ @abstractmethod
def encoding(self) -> str:
pass
- @property
- @abstractmethod
+ @property
+ @abstractmethod
def errors(self) -> Optional[str]:
pass
- @property
- @abstractmethod
+ @property
+ @abstractmethod
def line_buffering(self) -> bool:
pass
- @property
- @abstractmethod
+ @property
+ @abstractmethod
def newlines(self) -> Any:
pass
@@ -2241,8 +2241,8 @@ class io:
io.__name__ = __name__ + '.io'
sys.modules[io.__name__] = io
-Pattern = _alias(stdlib_re.Pattern, 1)
-Match = _alias(stdlib_re.Match, 1)
+Pattern = _alias(stdlib_re.Pattern, 1)
+Match = _alias(stdlib_re.Match, 1)
class re:
"""Wrapper namespace for re type aliases."""
diff --git a/contrib/tools/python3/src/Lib/unittest/__init__.py b/contrib/tools/python3/src/Lib/unittest/__init__.py
index 348dc471f4..85c74ece81 100644
--- a/contrib/tools/python3/src/Lib/unittest/__init__.py
+++ b/contrib/tools/python3/src/Lib/unittest/__init__.py
@@ -44,12 +44,12 @@ AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE,
SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
"""
-__all__ = ['TestResult', 'TestCase', 'IsolatedAsyncioTestCase', 'TestSuite',
+__all__ = ['TestResult', 'TestCase', 'IsolatedAsyncioTestCase', 'TestSuite',
'TextTestRunner', 'TestLoader', 'FunctionTestCase', 'main',
'defaultTestLoader', 'SkipTest', 'skip', 'skipIf', 'skipUnless',
'expectedFailure', 'TextTestResult', 'installHandler',
- 'registerResult', 'removeResult', 'removeHandler',
- 'addModuleCleanup']
+ 'registerResult', 'removeResult', 'removeHandler',
+ 'addModuleCleanup']
# Expose obsolete functions for backwards compatibility
__all__.extend(['getTestCaseNames', 'makeSuite', 'findTestCases'])
@@ -57,15 +57,15 @@ __all__.extend(['getTestCaseNames', 'makeSuite', 'findTestCases'])
__unittest = True
from .result import TestResult
-from .case import (addModuleCleanup, TestCase, FunctionTestCase, SkipTest, skip,
- skipIf, skipUnless, expectedFailure)
+from .case import (addModuleCleanup, TestCase, FunctionTestCase, SkipTest, skip,
+ skipIf, skipUnless, expectedFailure)
from .suite import BaseTestSuite, TestSuite
from .loader import (TestLoader, defaultTestLoader, makeSuite, getTestCaseNames,
findTestCases)
from .main import TestProgram, main
from .runner import TextTestRunner, TextTestResult
from .signals import installHandler, registerResult, removeResult, removeHandler
-# IsolatedAsyncioTestCase will be imported lazily.
+# IsolatedAsyncioTestCase will be imported lazily.
# deprecated
_TextTestResult = TextTestResult
@@ -78,18 +78,18 @@ def load_tests(loader, tests, pattern):
# top level directory cached on loader instance
this_dir = os.path.dirname(__file__)
return loader.discover(start_dir=this_dir, pattern=pattern)
-
-
-# Lazy import of IsolatedAsyncioTestCase from .async_case
-# It imports asyncio, which is relatively heavy, but most tests
-# do not need it.
-
-def __dir__():
- return globals().keys() | {'IsolatedAsyncioTestCase'}
-
-def __getattr__(name):
- if name == 'IsolatedAsyncioTestCase':
- global IsolatedAsyncioTestCase
- from .async_case import IsolatedAsyncioTestCase
- return IsolatedAsyncioTestCase
- raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
+
+
+# Lazy import of IsolatedAsyncioTestCase from .async_case
+# It imports asyncio, which is relatively heavy, but most tests
+# do not need it.
+
+def __dir__():
+ return globals().keys() | {'IsolatedAsyncioTestCase'}
+
+def __getattr__(name):
+ if name == 'IsolatedAsyncioTestCase':
+ global IsolatedAsyncioTestCase
+ from .async_case import IsolatedAsyncioTestCase
+ return IsolatedAsyncioTestCase
+ raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
diff --git a/contrib/tools/python3/src/Lib/unittest/_log.py b/contrib/tools/python3/src/Lib/unittest/_log.py
index 94e7e758bd..9ec03f1262 100644
--- a/contrib/tools/python3/src/Lib/unittest/_log.py
+++ b/contrib/tools/python3/src/Lib/unittest/_log.py
@@ -1,69 +1,69 @@
-import logging
-import collections
-
-from .case import _BaseTestCaseContext
-
-
-_LoggingWatcher = collections.namedtuple("_LoggingWatcher",
- ["records", "output"])
-
-class _CapturingHandler(logging.Handler):
- """
- A logging handler capturing all (raw and formatted) logging output.
- """
-
- def __init__(self):
- logging.Handler.__init__(self)
- self.watcher = _LoggingWatcher([], [])
-
- def flush(self):
- pass
-
- def emit(self, record):
- self.watcher.records.append(record)
- msg = self.format(record)
- self.watcher.output.append(msg)
-
-
-class _AssertLogsContext(_BaseTestCaseContext):
- """A context manager used to implement TestCase.assertLogs()."""
-
- LOGGING_FORMAT = "%(levelname)s:%(name)s:%(message)s"
-
- def __init__(self, test_case, logger_name, level):
- _BaseTestCaseContext.__init__(self, test_case)
- self.logger_name = logger_name
- if level:
- self.level = logging._nameToLevel.get(level, level)
- else:
- self.level = logging.INFO
- self.msg = None
-
- def __enter__(self):
- if isinstance(self.logger_name, logging.Logger):
- logger = self.logger = self.logger_name
- else:
- logger = self.logger = logging.getLogger(self.logger_name)
- formatter = logging.Formatter(self.LOGGING_FORMAT)
- handler = _CapturingHandler()
- handler.setFormatter(formatter)
- self.watcher = handler.watcher
- self.old_handlers = logger.handlers[:]
- self.old_level = logger.level
- self.old_propagate = logger.propagate
- logger.handlers = [handler]
- logger.setLevel(self.level)
- logger.propagate = False
- return handler.watcher
-
- def __exit__(self, exc_type, exc_value, tb):
- self.logger.handlers = self.old_handlers
- self.logger.propagate = self.old_propagate
- self.logger.setLevel(self.old_level)
- if exc_type is not None:
- # let unexpected exceptions pass through
- return False
- if len(self.watcher.records) == 0:
- self._raiseFailure(
- "no logs of level {} or higher triggered on {}"
- .format(logging.getLevelName(self.level), self.logger.name))
+import logging
+import collections
+
+from .case import _BaseTestCaseContext
+
+
+_LoggingWatcher = collections.namedtuple("_LoggingWatcher",
+ ["records", "output"])
+
+class _CapturingHandler(logging.Handler):
+ """
+ A logging handler capturing all (raw and formatted) logging output.
+ """
+
+ def __init__(self):
+ logging.Handler.__init__(self)
+ self.watcher = _LoggingWatcher([], [])
+
+ def flush(self):
+ pass
+
+ def emit(self, record):
+ self.watcher.records.append(record)
+ msg = self.format(record)
+ self.watcher.output.append(msg)
+
+
+class _AssertLogsContext(_BaseTestCaseContext):
+ """A context manager used to implement TestCase.assertLogs()."""
+
+ LOGGING_FORMAT = "%(levelname)s:%(name)s:%(message)s"
+
+ def __init__(self, test_case, logger_name, level):
+ _BaseTestCaseContext.__init__(self, test_case)
+ self.logger_name = logger_name
+ if level:
+ self.level = logging._nameToLevel.get(level, level)
+ else:
+ self.level = logging.INFO
+ self.msg = None
+
+ def __enter__(self):
+ if isinstance(self.logger_name, logging.Logger):
+ logger = self.logger = self.logger_name
+ else:
+ logger = self.logger = logging.getLogger(self.logger_name)
+ formatter = logging.Formatter(self.LOGGING_FORMAT)
+ handler = _CapturingHandler()
+ handler.setFormatter(formatter)
+ self.watcher = handler.watcher
+ self.old_handlers = logger.handlers[:]
+ self.old_level = logger.level
+ self.old_propagate = logger.propagate
+ logger.handlers = [handler]
+ logger.setLevel(self.level)
+ logger.propagate = False
+ return handler.watcher
+
+ def __exit__(self, exc_type, exc_value, tb):
+ self.logger.handlers = self.old_handlers
+ self.logger.propagate = self.old_propagate
+ self.logger.setLevel(self.old_level)
+ if exc_type is not None:
+ # let unexpected exceptions pass through
+ return False
+ if len(self.watcher.records) == 0:
+ self._raiseFailure(
+ "no logs of level {} or higher triggered on {}"
+ .format(logging.getLevelName(self.level), self.logger.name))
diff --git a/contrib/tools/python3/src/Lib/unittest/async_case.py b/contrib/tools/python3/src/Lib/unittest/async_case.py
index a2980e797a..65ca285d69 100644
--- a/contrib/tools/python3/src/Lib/unittest/async_case.py
+++ b/contrib/tools/python3/src/Lib/unittest/async_case.py
@@ -1,168 +1,168 @@
-import asyncio
-import inspect
-
-from .case import TestCase
-
-
-class IsolatedAsyncioTestCase(TestCase):
- # Names intentionally have a long prefix
- # to reduce a chance of clashing with user-defined attributes
- # from inherited test case
- #
- # The class doesn't call loop.run_until_complete(self.setUp()) and family
- # but uses a different approach:
- # 1. create a long-running task that reads self.setUp()
- # awaitable from queue along with a future
- # 2. await the awaitable object passing in and set the result
- # into the future object
- # 3. Outer code puts the awaitable and the future object into a queue
- # with waiting for the future
- # The trick is necessary because every run_until_complete() call
- # creates a new task with embedded ContextVar context.
- # To share contextvars between setUp(), test and tearDown() we need to execute
- # them inside the same task.
-
- # Note: the test case modifies event loop policy if the policy was not instantiated
- # yet.
- # asyncio.get_event_loop_policy() creates a default policy on demand but never
- # returns None
- # I believe this is not an issue in user level tests but python itself for testing
- # should reset a policy in every test module
- # by calling asyncio.set_event_loop_policy(None) in tearDownModule()
-
- def __init__(self, methodName='runTest'):
- super().__init__(methodName)
- self._asyncioTestLoop = None
- self._asyncioCallsQueue = None
-
- async def asyncSetUp(self):
- pass
-
- async def asyncTearDown(self):
- pass
-
- def addAsyncCleanup(self, func, /, *args, **kwargs):
- # A trivial trampoline to addCleanup()
- # the function exists because it has a different semantics
- # and signature:
- # addCleanup() accepts regular functions
- # but addAsyncCleanup() accepts coroutines
- #
- # We intentionally don't add inspect.iscoroutinefunction() check
- # for func argument because there is no way
- # to check for async function reliably:
- # 1. It can be "async def func()" itself
- # 2. Class can implement "async def __call__()" method
- # 3. Regular "def func()" that returns awaitable object
- self.addCleanup(*(func, *args), **kwargs)
-
- def _callSetUp(self):
- self.setUp()
- self._callAsync(self.asyncSetUp)
-
- def _callTestMethod(self, method):
- self._callMaybeAsync(method)
-
- def _callTearDown(self):
- self._callAsync(self.asyncTearDown)
- self.tearDown()
-
- def _callCleanup(self, function, *args, **kwargs):
- self._callMaybeAsync(function, *args, **kwargs)
-
- def _callAsync(self, func, /, *args, **kwargs):
- assert self._asyncioTestLoop is not None, 'asyncio test loop is not initialized'
- ret = func(*args, **kwargs)
- assert inspect.isawaitable(ret), f'{func!r} returned non-awaitable'
- fut = self._asyncioTestLoop.create_future()
- self._asyncioCallsQueue.put_nowait((fut, ret))
- return self._asyncioTestLoop.run_until_complete(fut)
-
- def _callMaybeAsync(self, func, /, *args, **kwargs):
- assert self._asyncioTestLoop is not None, 'asyncio test loop is not initialized'
- ret = func(*args, **kwargs)
- if inspect.isawaitable(ret):
- fut = self._asyncioTestLoop.create_future()
- self._asyncioCallsQueue.put_nowait((fut, ret))
- return self._asyncioTestLoop.run_until_complete(fut)
- else:
- return ret
-
- async def _asyncioLoopRunner(self, fut):
- self._asyncioCallsQueue = queue = asyncio.Queue()
- fut.set_result(None)
- while True:
- query = await queue.get()
- queue.task_done()
- if query is None:
- return
- fut, awaitable = query
- try:
- ret = await awaitable
- if not fut.cancelled():
- fut.set_result(ret)
- except (SystemExit, KeyboardInterrupt):
- raise
- except (BaseException, asyncio.CancelledError) as ex:
- if not fut.cancelled():
- fut.set_exception(ex)
-
- def _setupAsyncioLoop(self):
- assert self._asyncioTestLoop is None, 'asyncio test loop already initialized'
- loop = asyncio.new_event_loop()
- asyncio.set_event_loop(loop)
- loop.set_debug(True)
- self._asyncioTestLoop = loop
- fut = loop.create_future()
- self._asyncioCallsTask = loop.create_task(self._asyncioLoopRunner(fut))
- loop.run_until_complete(fut)
-
- def _tearDownAsyncioLoop(self):
- assert self._asyncioTestLoop is not None, 'asyncio test loop is not initialized'
- loop = self._asyncioTestLoop
- self._asyncioTestLoop = None
- self._asyncioCallsQueue.put_nowait(None)
- loop.run_until_complete(self._asyncioCallsQueue.join())
-
- try:
- # cancel all tasks
- to_cancel = asyncio.all_tasks(loop)
- if not to_cancel:
- return
-
- for task in to_cancel:
- task.cancel()
-
- loop.run_until_complete(
- asyncio.gather(*to_cancel, loop=loop, return_exceptions=True))
-
- for task in to_cancel:
- if task.cancelled():
- continue
- if task.exception() is not None:
- loop.call_exception_handler({
- 'message': 'unhandled exception during test shutdown',
- 'exception': task.exception(),
- 'task': task,
- })
- # shutdown asyncgens
- loop.run_until_complete(loop.shutdown_asyncgens())
- finally:
- asyncio.set_event_loop(None)
- loop.close()
-
- def run(self, result=None):
- self._setupAsyncioLoop()
- try:
- return super().run(result)
- finally:
- self._tearDownAsyncioLoop()
-
- def debug(self):
- self._setupAsyncioLoop()
- super().debug()
- self._tearDownAsyncioLoop()
-
- def __del__(self):
- if self._asyncioTestLoop is not None:
- self._tearDownAsyncioLoop()
+import asyncio
+import inspect
+
+from .case import TestCase
+
+
+class IsolatedAsyncioTestCase(TestCase):
+ # Names intentionally have a long prefix
+ # to reduce a chance of clashing with user-defined attributes
+ # from inherited test case
+ #
+ # The class doesn't call loop.run_until_complete(self.setUp()) and family
+ # but uses a different approach:
+ # 1. create a long-running task that reads self.setUp()
+ # awaitable from queue along with a future
+ # 2. await the awaitable object passing in and set the result
+ # into the future object
+ # 3. Outer code puts the awaitable and the future object into a queue
+ # with waiting for the future
+ # The trick is necessary because every run_until_complete() call
+ # creates a new task with embedded ContextVar context.
+ # To share contextvars between setUp(), test and tearDown() we need to execute
+ # them inside the same task.
+
+ # Note: the test case modifies event loop policy if the policy was not instantiated
+ # yet.
+ # asyncio.get_event_loop_policy() creates a default policy on demand but never
+ # returns None
+ # I believe this is not an issue in user level tests but python itself for testing
+ # should reset a policy in every test module
+ # by calling asyncio.set_event_loop_policy(None) in tearDownModule()
+
+ def __init__(self, methodName='runTest'):
+ super().__init__(methodName)
+ self._asyncioTestLoop = None
+ self._asyncioCallsQueue = None
+
+ async def asyncSetUp(self):
+ pass
+
+ async def asyncTearDown(self):
+ pass
+
+ def addAsyncCleanup(self, func, /, *args, **kwargs):
+ # A trivial trampoline to addCleanup()
+ # the function exists because it has a different semantics
+ # and signature:
+ # addCleanup() accepts regular functions
+ # but addAsyncCleanup() accepts coroutines
+ #
+ # We intentionally don't add inspect.iscoroutinefunction() check
+ # for func argument because there is no way
+ # to check for async function reliably:
+ # 1. It can be "async def func()" itself
+ # 2. Class can implement "async def __call__()" method
+ # 3. Regular "def func()" that returns awaitable object
+ self.addCleanup(*(func, *args), **kwargs)
+
+ def _callSetUp(self):
+ self.setUp()
+ self._callAsync(self.asyncSetUp)
+
+ def _callTestMethod(self, method):
+ self._callMaybeAsync(method)
+
+ def _callTearDown(self):
+ self._callAsync(self.asyncTearDown)
+ self.tearDown()
+
+ def _callCleanup(self, function, *args, **kwargs):
+ self._callMaybeAsync(function, *args, **kwargs)
+
+ def _callAsync(self, func, /, *args, **kwargs):
+ assert self._asyncioTestLoop is not None, 'asyncio test loop is not initialized'
+ ret = func(*args, **kwargs)
+ assert inspect.isawaitable(ret), f'{func!r} returned non-awaitable'
+ fut = self._asyncioTestLoop.create_future()
+ self._asyncioCallsQueue.put_nowait((fut, ret))
+ return self._asyncioTestLoop.run_until_complete(fut)
+
+ def _callMaybeAsync(self, func, /, *args, **kwargs):
+ assert self._asyncioTestLoop is not None, 'asyncio test loop is not initialized'
+ ret = func(*args, **kwargs)
+ if inspect.isawaitable(ret):
+ fut = self._asyncioTestLoop.create_future()
+ self._asyncioCallsQueue.put_nowait((fut, ret))
+ return self._asyncioTestLoop.run_until_complete(fut)
+ else:
+ return ret
+
+ async def _asyncioLoopRunner(self, fut):
+ self._asyncioCallsQueue = queue = asyncio.Queue()
+ fut.set_result(None)
+ while True:
+ query = await queue.get()
+ queue.task_done()
+ if query is None:
+ return
+ fut, awaitable = query
+ try:
+ ret = await awaitable
+ if not fut.cancelled():
+ fut.set_result(ret)
+ except (SystemExit, KeyboardInterrupt):
+ raise
+ except (BaseException, asyncio.CancelledError) as ex:
+ if not fut.cancelled():
+ fut.set_exception(ex)
+
+ def _setupAsyncioLoop(self):
+ assert self._asyncioTestLoop is None, 'asyncio test loop already initialized'
+ loop = asyncio.new_event_loop()
+ asyncio.set_event_loop(loop)
+ loop.set_debug(True)
+ self._asyncioTestLoop = loop
+ fut = loop.create_future()
+ self._asyncioCallsTask = loop.create_task(self._asyncioLoopRunner(fut))
+ loop.run_until_complete(fut)
+
+ def _tearDownAsyncioLoop(self):
+ assert self._asyncioTestLoop is not None, 'asyncio test loop is not initialized'
+ loop = self._asyncioTestLoop
+ self._asyncioTestLoop = None
+ self._asyncioCallsQueue.put_nowait(None)
+ loop.run_until_complete(self._asyncioCallsQueue.join())
+
+ try:
+ # cancel all tasks
+ to_cancel = asyncio.all_tasks(loop)
+ if not to_cancel:
+ return
+
+ for task in to_cancel:
+ task.cancel()
+
+ loop.run_until_complete(
+ asyncio.gather(*to_cancel, loop=loop, return_exceptions=True))
+
+ for task in to_cancel:
+ if task.cancelled():
+ continue
+ if task.exception() is not None:
+ loop.call_exception_handler({
+ 'message': 'unhandled exception during test shutdown',
+ 'exception': task.exception(),
+ 'task': task,
+ })
+ # shutdown asyncgens
+ loop.run_until_complete(loop.shutdown_asyncgens())
+ finally:
+ asyncio.set_event_loop(None)
+ loop.close()
+
+ def run(self, result=None):
+ self._setupAsyncioLoop()
+ try:
+ return super().run(result)
+ finally:
+ self._tearDownAsyncioLoop()
+
+ def debug(self):
+ self._setupAsyncioLoop()
+ super().debug()
+ self._tearDownAsyncioLoop()
+
+ def __del__(self):
+ if self._asyncioTestLoop is not None:
+ self._tearDownAsyncioLoop()
diff --git a/contrib/tools/python3/src/Lib/unittest/case.py b/contrib/tools/python3/src/Lib/unittest/case.py
index 88f1a40865..157325c822 100644
--- a/contrib/tools/python3/src/Lib/unittest/case.py
+++ b/contrib/tools/python3/src/Lib/unittest/case.py
@@ -9,7 +9,7 @@ import warnings
import collections
import contextlib
import traceback
-import types
+import types
from . import result
from .util import (strclass, safe_repr, _count_diff_all_purpose,
@@ -84,30 +84,30 @@ class _Outcome(object):
def _id(obj):
return obj
-
-_module_cleanups = []
-def addModuleCleanup(function, /, *args, **kwargs):
- """Same as addCleanup, except the cleanup items are called even if
- setUpModule fails (unlike tearDownModule)."""
- _module_cleanups.append((function, args, kwargs))
-
-
-def doModuleCleanups():
- """Execute all module cleanup functions. Normally called for you after
- tearDownModule."""
- exceptions = []
- while _module_cleanups:
- function, args, kwargs = _module_cleanups.pop()
- try:
- function(*args, **kwargs)
- except Exception as exc:
- exceptions.append(exc)
- if exceptions:
- # Swallows all but first exception. If a multi-exception handler
- # gets written we should use that here instead.
- raise exceptions[0]
-
-
+
+_module_cleanups = []
+def addModuleCleanup(function, /, *args, **kwargs):
+ """Same as addCleanup, except the cleanup items are called even if
+ setUpModule fails (unlike tearDownModule)."""
+ _module_cleanups.append((function, args, kwargs))
+
+
+def doModuleCleanups():
+ """Execute all module cleanup functions. Normally called for you after
+ tearDownModule."""
+ exceptions = []
+ while _module_cleanups:
+ function, args, kwargs = _module_cleanups.pop()
+ try:
+ function(*args, **kwargs)
+ except Exception as exc:
+ exceptions.append(exc)
+ if exceptions:
+ # Swallows all but first exception. If a multi-exception handler
+ # gets written we should use that here instead.
+ raise exceptions[0]
+
+
def skip(reason):
"""
Unconditionally skip a test.
@@ -122,10 +122,10 @@ def skip(reason):
test_item.__unittest_skip__ = True
test_item.__unittest_skip_why__ = reason
return test_item
- if isinstance(reason, types.FunctionType):
- test_item = reason
- reason = ''
- return decorator(test_item)
+ if isinstance(reason, types.FunctionType):
+ test_item = reason
+ reason = ''
+ return decorator(test_item)
return decorator
def skipIf(condition, reason):
@@ -188,8 +188,8 @@ class _AssertRaisesBaseContext(_BaseTestCaseContext):
if not args:
self.msg = kwargs.pop('msg', None)
if kwargs:
- raise TypeError('%r is an invalid keyword argument for '
- 'this function' % (next(iter(kwargs)),))
+ raise TypeError('%r is an invalid keyword argument for '
+ 'this function' % (next(iter(kwargs)),))
return self
callable_obj, *args = args
@@ -240,9 +240,9 @@ class _AssertRaisesContext(_AssertRaisesBaseContext):
expected_regex.pattern, str(exc_value)))
return True
- __class_getitem__ = classmethod(types.GenericAlias)
-
+ __class_getitem__ = classmethod(types.GenericAlias)
+
class _AssertWarnsContext(_AssertRaisesBaseContext):
"""A context manager used to implement TestCase.assertWarns* methods."""
@@ -252,7 +252,7 @@ class _AssertWarnsContext(_AssertRaisesBaseContext):
def __enter__(self):
# The __warningregistry__'s need to be in a pristine state for tests
# to work properly.
- for v in list(sys.modules.values()):
+ for v in list(sys.modules.values()):
if getattr(v, '__warningregistry__', None):
v.__warningregistry__ = {}
self.warnings_manager = warnings.catch_warnings(record=True)
@@ -353,8 +353,8 @@ class TestCase(object):
_classSetupFailed = False
- _class_cleanups = []
-
+ _class_cleanups = []
+
def __init__(self, methodName='runTest'):
"""Create an instance of the class that will use the named test
method when executed. Raises a ValueError if the instance does
@@ -402,7 +402,7 @@ class TestCase(object):
"""
self._type_equality_funcs[typeobj] = function
- def addCleanup(self, function, /, *args, **kwargs):
+ def addCleanup(self, function, /, *args, **kwargs):
"""Add a function, with arguments, to be called when the test is
completed. Functions added are called on a LIFO basis and are
called after tearDown on test failure or success.
@@ -410,12 +410,12 @@ class TestCase(object):
Cleanup items are called even if setUp fails (unlike tearDown)."""
self._cleanups.append((function, args, kwargs))
- @classmethod
- def addClassCleanup(cls, function, /, *args, **kwargs):
- """Same as addCleanup, except the cleanup items are called even if
- setUpClass fails (unlike tearDownClass)."""
- cls._class_cleanups.append((function, args, kwargs))
-
+ @classmethod
+ def addClassCleanup(cls, function, /, *args, **kwargs):
+ """Same as addCleanup, except the cleanup items are called even if
+ setUpClass fails (unlike tearDownClass)."""
+ cls._class_cleanups.append((function, args, kwargs))
+
def setUp(self):
"Hook method for setting up the test fixture before exercising it."
pass
@@ -446,7 +446,7 @@ class TestCase(object):
the specified test method's docstring.
"""
doc = self._testMethodDoc
- return doc.strip().split("\n")[0].strip() if doc else None
+ return doc.strip().split("\n")[0].strip() if doc else None
def id(self):
@@ -543,84 +543,84 @@ class TestCase(object):
else:
addUnexpectedSuccess(self)
- def _callSetUp(self):
- self.setUp()
-
- def _callTestMethod(self, method):
- method()
-
- def _callTearDown(self):
- self.tearDown()
-
- def _callCleanup(self, function, /, *args, **kwargs):
- function(*args, **kwargs)
-
+ def _callSetUp(self):
+ self.setUp()
+
+ def _callTestMethod(self, method):
+ method()
+
+ def _callTearDown(self):
+ self.tearDown()
+
+ def _callCleanup(self, function, /, *args, **kwargs):
+ function(*args, **kwargs)
+
def run(self, result=None):
if result is None:
result = self.defaultTestResult()
startTestRun = getattr(result, 'startTestRun', None)
- stopTestRun = getattr(result, 'stopTestRun', None)
+ stopTestRun = getattr(result, 'stopTestRun', None)
if startTestRun is not None:
startTestRun()
- else:
- stopTestRun = None
+ else:
+ stopTestRun = None
result.startTest(self)
- try:
- testMethod = getattr(self, self._testMethodName)
- if (getattr(self.__class__, "__unittest_skip__", False) or
- getattr(testMethod, "__unittest_skip__", False)):
- # If the class or method was skipped.
+ try:
+ testMethod = getattr(self, self._testMethodName)
+ if (getattr(self.__class__, "__unittest_skip__", False) or
+ getattr(testMethod, "__unittest_skip__", False)):
+ # If the class or method was skipped.
skip_why = (getattr(self.__class__, '__unittest_skip_why__', '')
or getattr(testMethod, '__unittest_skip_why__', ''))
self._addSkip(result, self, skip_why)
- return result
-
- expecting_failure = (
- getattr(self, "__unittest_expecting_failure__", False) or
- getattr(testMethod, "__unittest_expecting_failure__", False)
- )
- outcome = _Outcome(result)
- try:
- self._outcome = outcome
-
+ return result
+
+ expecting_failure = (
+ getattr(self, "__unittest_expecting_failure__", False) or
+ getattr(testMethod, "__unittest_expecting_failure__", False)
+ )
+ outcome = _Outcome(result)
+ try:
+ self._outcome = outcome
+
with outcome.testPartExecutor(self):
- self._callSetUp()
- if outcome.success:
- outcome.expecting_failure = expecting_failure
- with outcome.testPartExecutor(self, isTest=True):
- self._callTestMethod(testMethod)
- outcome.expecting_failure = False
- with outcome.testPartExecutor(self):
- self._callTearDown()
-
- self.doCleanups()
- for test, reason in outcome.skipped:
- self._addSkip(result, test, reason)
- self._feedErrorsToResult(result, outcome.errors)
- if outcome.success:
- if expecting_failure:
- if outcome.expectedFailure:
- self._addExpectedFailure(result, outcome.expectedFailure)
- else:
- self._addUnexpectedSuccess(result)
+ self._callSetUp()
+ if outcome.success:
+ outcome.expecting_failure = expecting_failure
+ with outcome.testPartExecutor(self, isTest=True):
+ self._callTestMethod(testMethod)
+ outcome.expecting_failure = False
+ with outcome.testPartExecutor(self):
+ self._callTearDown()
+
+ self.doCleanups()
+ for test, reason in outcome.skipped:
+ self._addSkip(result, test, reason)
+ self._feedErrorsToResult(result, outcome.errors)
+ if outcome.success:
+ if expecting_failure:
+ if outcome.expectedFailure:
+ self._addExpectedFailure(result, outcome.expectedFailure)
+ else:
+ self._addUnexpectedSuccess(result)
else:
- result.addSuccess(self)
- return result
- finally:
- # explicitly break reference cycles:
- # outcome.errors -> frame -> outcome -> outcome.errors
- # outcome.expectedFailure -> frame -> outcome -> outcome.expectedFailure
- outcome.errors.clear()
- outcome.expectedFailure = None
-
- # clear the outcome, no more needed
- self._outcome = None
-
+ result.addSuccess(self)
+ return result
+ finally:
+ # explicitly break reference cycles:
+ # outcome.errors -> frame -> outcome -> outcome.errors
+ # outcome.expectedFailure -> frame -> outcome -> outcome.expectedFailure
+ outcome.errors.clear()
+ outcome.expectedFailure = None
+
+ # clear the outcome, no more needed
+ self._outcome = None
+
finally:
result.stopTest(self)
- if stopTestRun is not None:
- stopTestRun()
+ if stopTestRun is not None:
+ stopTestRun()
def doCleanups(self):
"""Execute all cleanup functions. Normally called for you after
@@ -629,43 +629,43 @@ class TestCase(object):
while self._cleanups:
function, args, kwargs = self._cleanups.pop()
with outcome.testPartExecutor(self):
- self._callCleanup(function, *args, **kwargs)
+ self._callCleanup(function, *args, **kwargs)
# return this for backwards compatibility
- # even though we no longer use it internally
+ # even though we no longer use it internally
return outcome.success
- @classmethod
- def doClassCleanups(cls):
- """Execute all class cleanup functions. Normally called for you after
- tearDownClass."""
- cls.tearDown_exceptions = []
- while cls._class_cleanups:
- function, args, kwargs = cls._class_cleanups.pop()
- try:
- function(*args, **kwargs)
- except Exception:
- cls.tearDown_exceptions.append(sys.exc_info())
-
+ @classmethod
+ def doClassCleanups(cls):
+ """Execute all class cleanup functions. Normally called for you after
+ tearDownClass."""
+ cls.tearDown_exceptions = []
+ while cls._class_cleanups:
+ function, args, kwargs = cls._class_cleanups.pop()
+ try:
+ function(*args, **kwargs)
+ except Exception:
+ cls.tearDown_exceptions.append(sys.exc_info())
+
def __call__(self, *args, **kwds):
return self.run(*args, **kwds)
def debug(self):
"""Run the test without collecting errors in a TestResult"""
- testMethod = getattr(self, self._testMethodName)
- if (getattr(self.__class__, "__unittest_skip__", False) or
- getattr(testMethod, "__unittest_skip__", False)):
- # If the class or method was skipped.
- skip_why = (getattr(self.__class__, '__unittest_skip_why__', '')
- or getattr(testMethod, '__unittest_skip_why__', ''))
- raise SkipTest(skip_why)
-
- self._callSetUp()
- self._callTestMethod(testMethod)
- self._callTearDown()
+ testMethod = getattr(self, self._testMethodName)
+ if (getattr(self.__class__, "__unittest_skip__", False) or
+ getattr(testMethod, "__unittest_skip__", False)):
+ # If the class or method was skipped.
+ skip_why = (getattr(self.__class__, '__unittest_skip_why__', '')
+ or getattr(testMethod, '__unittest_skip_why__', ''))
+ raise SkipTest(skip_why)
+
+ self._callSetUp()
+ self._callTestMethod(testMethod)
+ self._callTearDown()
while self._cleanups:
- function, args, kwargs = self._cleanups.pop()
- self._callCleanup(function, *args, **kwargs)
+ function, args, kwargs = self._cleanups.pop()
+ self._callCleanup(function, *args, **kwargs)
def skipTest(self, reason):
"""Skip this test."""
@@ -792,8 +792,8 @@ class TestCase(object):
self.assertEqual(cm.output, ['INFO:foo:first message',
'ERROR:foo.bar:second message'])
"""
- # Lazy import to avoid importing logging if it is not needed.
- from ._log import _AssertLogsContext
+ # Lazy import to avoid importing logging if it is not needed.
+ from ._log import _AssertLogsContext
return _AssertLogsContext(self, logger, level)
def _getAssertEqualityFunc(self, first, second):
@@ -1165,8 +1165,8 @@ class TestCase(object):
def assertCountEqual(self, first, second, msg=None):
- """Asserts that two iterables have the same elements, the same number of
- times, without regard to order.
+ """Asserts that two iterables have the same elements, the same number of
+ times, without regard to order.
self.assertEqual(Counter(list(first)),
Counter(list(second)))
diff --git a/contrib/tools/python3/src/Lib/unittest/mock.py b/contrib/tools/python3/src/Lib/unittest/mock.py
index 5c78274d09..5c6141571e 100644
--- a/contrib/tools/python3/src/Lib/unittest/mock.py
+++ b/contrib/tools/python3/src/Lib/unittest/mock.py
@@ -13,7 +13,7 @@ __all__ = (
'ANY',
'call',
'create_autospec',
- 'AsyncMock',
+ 'AsyncMock',
'FILTER_DIR',
'NonCallableMock',
'NonCallableMagicMock',
@@ -23,16 +23,16 @@ __all__ = (
)
-import asyncio
-import contextlib
-import io
+import asyncio
+import contextlib
+import io
import inspect
import pprint
import sys
import builtins
-from asyncio import iscoroutinefunction
-from types import CodeType, ModuleType, MethodType
-from unittest.util import safe_repr
+from asyncio import iscoroutinefunction
+from types import CodeType, ModuleType, MethodType
+from unittest.util import safe_repr
from functools import wraps, partial
@@ -44,21 +44,21 @@ FILTER_DIR = True
# Without this, the __class__ properties wouldn't be set correctly
_safe_super = super
-def _is_async_obj(obj):
- if _is_instance_mock(obj) and not isinstance(obj, AsyncMock):
- return False
- if hasattr(obj, '__func__'):
- obj = getattr(obj, '__func__')
- return iscoroutinefunction(obj) or inspect.isawaitable(obj)
-
-
-def _is_async_func(func):
- if getattr(func, '__code__', None):
- return iscoroutinefunction(func)
- else:
- return False
-
-
+def _is_async_obj(obj):
+ if _is_instance_mock(obj) and not isinstance(obj, AsyncMock):
+ return False
+ if hasattr(obj, '__func__'):
+ obj = getattr(obj, '__func__')
+ return iscoroutinefunction(obj) or inspect.isawaitable(obj)
+
+
+def _is_async_func(func):
+ if getattr(func, '__code__', None):
+ return iscoroutinefunction(func)
+ else:
+ return False
+
+
def _is_instance_mock(obj):
# can't use isinstance on Mock objects because they override __class__
# The base class for all mocks is NonCallableMock
@@ -67,20 +67,20 @@ def _is_instance_mock(obj):
def _is_exception(obj):
return (
- isinstance(obj, BaseException) or
- isinstance(obj, type) and issubclass(obj, BaseException)
+ isinstance(obj, BaseException) or
+ isinstance(obj, type) and issubclass(obj, BaseException)
)
-def _extract_mock(obj):
- # Autospecced functions will return a FunctionType with "mock" attribute
- # which is the actual mock object that needs to be used.
- if isinstance(obj, FunctionTypes) and hasattr(obj, 'mock'):
- return obj.mock
- else:
- return obj
-
-
+def _extract_mock(obj):
+ # Autospecced functions will return a FunctionType with "mock" attribute
+ # which is the actual mock object that needs to be used.
+ if isinstance(obj, FunctionTypes) and hasattr(obj, 'mock'):
+ return obj.mock
+ else:
+ return obj
+
+
def _get_signature_object(func, as_instance, eat_self):
"""
Given an arbitrary, possibly callable object, try to create a suitable
@@ -89,7 +89,7 @@ def _get_signature_object(func, as_instance, eat_self):
"""
if isinstance(func, type) and not as_instance:
# If it's a type and should be modelled as a type, use __init__.
- func = func.__init__
+ func = func.__init__
# Skip the `self` argument in __init__
eat_self = True
elif not isinstance(func, FunctionTypes):
@@ -115,7 +115,7 @@ def _check_signature(func, mock, skipfirst, instance=False):
if sig is None:
return
func, sig = sig
- def checksig(self, /, *args, **kwargs):
+ def checksig(self, /, *args, **kwargs):
sig.bind(*args, **kwargs)
_copy_func_details(func, checksig)
type(mock)._mock_check_sig = checksig
@@ -138,8 +138,8 @@ def _copy_func_details(func, funcopy):
def _callable(obj):
if isinstance(obj, type):
return True
- if isinstance(obj, (staticmethod, classmethod, MethodType)):
- return _callable(obj.__func__)
+ if isinstance(obj, (staticmethod, classmethod, MethodType)):
+ return _callable(obj.__func__)
if getattr(obj, '__call__', None) is not None:
return True
return False
@@ -242,33 +242,33 @@ def _setup_func(funcopy, mock, sig):
mock._mock_delegate = funcopy
-def _setup_async_mock(mock):
- mock._is_coroutine = asyncio.coroutines._is_coroutine
- mock.await_count = 0
- mock.await_args = None
- mock.await_args_list = _CallList()
-
- # Mock is not configured yet so the attributes are set
- # to a function and then the corresponding mock helper function
- # is called when the helper is accessed similar to _setup_func.
- def wrapper(attr, /, *args, **kwargs):
- return getattr(mock.mock, attr)(*args, **kwargs)
-
- for attribute in ('assert_awaited',
- 'assert_awaited_once',
- 'assert_awaited_with',
- 'assert_awaited_once_with',
- 'assert_any_await',
- 'assert_has_awaits',
- 'assert_not_awaited'):
-
- # setattr(mock, attribute, wrapper) causes late binding
- # hence attribute will always be the last value in the loop
- # Use partial(wrapper, attribute) to ensure the attribute is bound
- # correctly.
- setattr(mock, attribute, partial(wrapper, attribute))
-
-
+def _setup_async_mock(mock):
+ mock._is_coroutine = asyncio.coroutines._is_coroutine
+ mock.await_count = 0
+ mock.await_args = None
+ mock.await_args_list = _CallList()
+
+ # Mock is not configured yet so the attributes are set
+ # to a function and then the corresponding mock helper function
+ # is called when the helper is accessed similar to _setup_func.
+ def wrapper(attr, /, *args, **kwargs):
+ return getattr(mock.mock, attr)(*args, **kwargs)
+
+ for attribute in ('assert_awaited',
+ 'assert_awaited_once',
+ 'assert_awaited_with',
+ 'assert_awaited_once_with',
+ 'assert_any_await',
+ 'assert_has_awaits',
+ 'assert_not_awaited'):
+
+ # setattr(mock, attribute, wrapper) causes late binding
+ # hence attribute will always be the last value in the loop
+ # Use partial(wrapper, attribute) to ensure the attribute is bound
+ # correctly.
+ setattr(mock, attribute, partial(wrapper, attribute))
+
+
def _is_magic(name):
return '__%s__' % name[2:-2] == name
@@ -354,7 +354,7 @@ class _CallList(list):
def _check_and_set_parent(parent, value, name, new_name):
- value = _extract_mock(value)
+ value = _extract_mock(value)
if not _is_instance_mock(value):
return False
@@ -389,7 +389,7 @@ class _MockIter(object):
class Base(object):
_mock_return_value = DEFAULT
_mock_side_effect = None
- def __init__(self, /, *args, **kwargs):
+ def __init__(self, /, *args, **kwargs):
pass
@@ -397,19 +397,19 @@ class Base(object):
class NonCallableMock(Base):
"""A non-callable version of `Mock`"""
- def __new__(cls, /, *args, **kw):
+ def __new__(cls, /, *args, **kw):
# every instance has its own class
# so we can create magic methods on the
# class without stomping on other mocks
- bases = (cls,)
- if not issubclass(cls, AsyncMockMixin):
- # Check if spec is an async object or function
- bound_args = _MOCK_SIG.bind_partial(cls, *args, **kw).arguments
- spec_arg = bound_args.get('spec_set', bound_args.get('spec'))
- if spec_arg is not None and _is_async_obj(spec_arg):
- bases = (AsyncMockMixin, cls)
- new = type(cls.__name__, bases, {'__doc__': cls.__doc__})
- instance = _safe_super(NonCallableMock, cls).__new__(new)
+ bases = (cls,)
+ if not issubclass(cls, AsyncMockMixin):
+ # Check if spec is an async object or function
+ bound_args = _MOCK_SIG.bind_partial(cls, *args, **kw).arguments
+ spec_arg = bound_args.get('spec_set', bound_args.get('spec'))
+ if spec_arg is not None and _is_async_obj(spec_arg):
+ bases = (AsyncMockMixin, cls)
+ new = type(cls.__name__, bases, {'__doc__': cls.__doc__})
+ instance = _safe_super(NonCallableMock, cls).__new__(new)
return instance
@@ -463,13 +463,13 @@ class NonCallableMock(Base):
Attach a mock as an attribute of this one, replacing its name and
parent. Calls to the attached mock will be recorded in the
`method_calls` and `mock_calls` attributes of this one."""
- inner_mock = _extract_mock(mock)
-
- inner_mock._mock_parent = None
- inner_mock._mock_new_parent = None
- inner_mock._mock_name = ''
- inner_mock._mock_new_name = None
+ inner_mock = _extract_mock(mock)
+ inner_mock._mock_parent = None
+ inner_mock._mock_new_parent = None
+ inner_mock._mock_name = ''
+ inner_mock._mock_new_name = None
+
setattr(self, attribute, mock)
@@ -486,17 +486,17 @@ class NonCallableMock(Base):
_eat_self=False):
_spec_class = None
_spec_signature = None
- _spec_asyncs = []
-
- for attr in dir(spec):
- if iscoroutinefunction(getattr(spec, attr, None)):
- _spec_asyncs.append(attr)
+ _spec_asyncs = []
+ for attr in dir(spec):
+ if iscoroutinefunction(getattr(spec, attr, None)):
+ _spec_asyncs.append(attr)
+
if spec is not None and not _is_list(spec):
if isinstance(spec, type):
_spec_class = spec
else:
- _spec_class = type(spec)
+ _spec_class = type(spec)
res = _get_signature_object(spec,
_spec_as_instance, _eat_self)
_spec_signature = res and res[1]
@@ -508,7 +508,7 @@ class NonCallableMock(Base):
__dict__['_spec_set'] = spec_set
__dict__['_spec_signature'] = _spec_signature
__dict__['_mock_methods'] = spec
- __dict__['_spec_asyncs'] = _spec_asyncs
+ __dict__['_spec_asyncs'] = _spec_asyncs
def __get_return_value(self):
ret = self._mock_return_value
@@ -593,14 +593,14 @@ class NonCallableMock(Base):
for child in self._mock_children.values():
if isinstance(child, _SpecState) or child is _deleted:
continue
- child.reset_mock(visited, return_value=return_value, side_effect=side_effect)
+ child.reset_mock(visited, return_value=return_value, side_effect=side_effect)
ret = self._mock_return_value
if _is_instance_mock(ret) and ret is not self:
ret.reset_mock(visited)
- def configure_mock(self, /, **kwargs):
+ def configure_mock(self, /, **kwargs):
"""Set attributes on the mock through keyword arguments.
Attributes plus return values and side effects can be set on child
@@ -632,8 +632,8 @@ class NonCallableMock(Base):
raise AttributeError(name)
if not self._mock_unsafe:
if name.startswith(('assert', 'assret')):
- raise AttributeError("Attributes cannot start with 'assert' "
- "or 'assret'")
+ raise AttributeError("Attributes cannot start with 'assert' "
+ "or 'assret'")
result = self._mock_children.get(name)
if result is _deleted:
@@ -669,7 +669,7 @@ class NonCallableMock(Base):
dot = '.'
if _name_list == ['()']:
dot = ''
-
+
while _parent is not None:
last = _parent
@@ -717,14 +717,14 @@ class NonCallableMock(Base):
extras = self._mock_methods or []
from_type = dir(type(self))
from_dict = list(self.__dict__)
- from_child_mocks = [
- m_name for m_name, m_value in self._mock_children.items()
- if m_value is not _deleted]
+ from_child_mocks = [
+ m_name for m_name, m_value in self._mock_children.items()
+ if m_value is not _deleted]
from_type = [e for e in from_type if not e.startswith('_')]
from_dict = [e for e in from_dict if not e.startswith('_') or
_is_magic(e)]
- return sorted(set(extras + from_type + from_dict + from_child_mocks))
+ return sorted(set(extras + from_type + from_dict + from_child_mocks))
def __setattr__(self, name, value):
@@ -776,7 +776,7 @@ class NonCallableMock(Base):
obj = self._mock_children.get(name, _missing)
if name in self.__dict__:
- _safe_super(NonCallableMock, self).__delattr__(name)
+ _safe_super(NonCallableMock, self).__delattr__(name)
elif obj is _deleted:
raise AttributeError(name)
if obj is not _missing:
@@ -789,47 +789,47 @@ class NonCallableMock(Base):
return _format_call_signature(name, args, kwargs)
- def _format_mock_failure_message(self, args, kwargs, action='call'):
- message = 'expected %s not found.\nExpected: %s\nActual: %s'
+ def _format_mock_failure_message(self, args, kwargs, action='call'):
+ message = 'expected %s not found.\nExpected: %s\nActual: %s'
expected_string = self._format_mock_call_signature(args, kwargs)
call_args = self.call_args
actual_string = self._format_mock_call_signature(*call_args)
- return message % (action, expected_string, actual_string)
-
-
- def _get_call_signature_from_name(self, name):
- """
- * If call objects are asserted against a method/function like obj.meth1
- then there could be no name for the call object to lookup. Hence just
- return the spec_signature of the method/function being asserted against.
- * If the name is not empty then remove () and split by '.' to get
- list of names to iterate through the children until a potential
- match is found. A child mock is created only during attribute access
- so if we get a _SpecState then no attributes of the spec were accessed
- and can be safely exited.
- """
- if not name:
- return self._spec_signature
-
- sig = None
- names = name.replace('()', '').split('.')
- children = self._mock_children
-
- for name in names:
- child = children.get(name)
- if child is None or isinstance(child, _SpecState):
- break
- else:
- # If an autospecced object is attached using attach_mock the
- # child would be a function with mock object as attribute from
- # which signature has to be derived.
- child = _extract_mock(child)
- children = child._mock_children
- sig = child._spec_signature
-
- return sig
-
-
+ return message % (action, expected_string, actual_string)
+
+
+ def _get_call_signature_from_name(self, name):
+ """
+ * If call objects are asserted against a method/function like obj.meth1
+ then there could be no name for the call object to lookup. Hence just
+ return the spec_signature of the method/function being asserted against.
+ * If the name is not empty then remove () and split by '.' to get
+ list of names to iterate through the children until a potential
+ match is found. A child mock is created only during attribute access
+ so if we get a _SpecState then no attributes of the spec were accessed
+ and can be safely exited.
+ """
+ if not name:
+ return self._spec_signature
+
+ sig = None
+ names = name.replace('()', '').split('.')
+ children = self._mock_children
+
+ for name in names:
+ child = children.get(name)
+ if child is None or isinstance(child, _SpecState):
+ break
+ else:
+ # If an autospecced object is attached using attach_mock the
+ # child would be a function with mock object as attribute from
+ # which signature has to be derived.
+ child = _extract_mock(child)
+ children = child._mock_children
+ sig = child._spec_signature
+
+ return sig
+
+
def _call_matcher(self, _call):
"""
Given a call (or simply an (args, kwargs) tuple), return a
@@ -837,12 +837,12 @@ class NonCallableMock(Base):
This is a best effort method which relies on the spec's signature,
if available, or falls back on the arguments themselves.
"""
-
- if isinstance(_call, tuple) and len(_call) > 2:
- sig = self._get_call_signature_from_name(_call[0])
- else:
- sig = self._spec_signature
-
+
+ if isinstance(_call, tuple) and len(_call) > 2:
+ sig = self._get_call_signature_from_name(_call[0])
+ else:
+ sig = self._spec_signature
+
if sig is not None:
if len(_call) == 2:
name = ''
@@ -850,71 +850,71 @@ class NonCallableMock(Base):
else:
name, args, kwargs = _call
try:
- bound_call = sig.bind(*args, **kwargs)
- return call(name, bound_call.args, bound_call.kwargs)
+ bound_call = sig.bind(*args, **kwargs)
+ return call(name, bound_call.args, bound_call.kwargs)
except TypeError as e:
return e.with_traceback(None)
else:
return _call
- def assert_not_called(self):
+ def assert_not_called(self):
"""assert that the mock was never called.
"""
if self.call_count != 0:
- msg = ("Expected '%s' to not have been called. Called %s times.%s"
- % (self._mock_name or 'mock',
- self.call_count,
- self._calls_repr()))
+ msg = ("Expected '%s' to not have been called. Called %s times.%s"
+ % (self._mock_name or 'mock',
+ self.call_count,
+ self._calls_repr()))
raise AssertionError(msg)
- def assert_called(self):
+ def assert_called(self):
"""assert that the mock was called at least once
"""
if self.call_count == 0:
msg = ("Expected '%s' to have been called." %
- (self._mock_name or 'mock'))
+ (self._mock_name or 'mock'))
raise AssertionError(msg)
- def assert_called_once(self):
+ def assert_called_once(self):
"""assert that the mock was called only once.
"""
if not self.call_count == 1:
- msg = ("Expected '%s' to have been called once. Called %s times.%s"
- % (self._mock_name or 'mock',
- self.call_count,
- self._calls_repr()))
+ msg = ("Expected '%s' to have been called once. Called %s times.%s"
+ % (self._mock_name or 'mock',
+ self.call_count,
+ self._calls_repr()))
raise AssertionError(msg)
- def assert_called_with(self, /, *args, **kwargs):
- """assert that the last call was made with the specified arguments.
+ def assert_called_with(self, /, *args, **kwargs):
+ """assert that the last call was made with the specified arguments.
Raises an AssertionError if the args and keyword args passed in are
different to the last call to the mock."""
if self.call_args is None:
expected = self._format_mock_call_signature(args, kwargs)
- actual = 'not called.'
- error_message = ('expected call not found.\nExpected: %s\nActual: %s'
- % (expected, actual))
- raise AssertionError(error_message)
+ actual = 'not called.'
+ error_message = ('expected call not found.\nExpected: %s\nActual: %s'
+ % (expected, actual))
+ raise AssertionError(error_message)
def _error_message():
msg = self._format_mock_failure_message(args, kwargs)
return msg
- expected = self._call_matcher(_Call((args, kwargs), two=True))
+ expected = self._call_matcher(_Call((args, kwargs), two=True))
actual = self._call_matcher(self.call_args)
- if actual != expected:
+ if actual != expected:
cause = expected if isinstance(expected, Exception) else None
raise AssertionError(_error_message()) from cause
- def assert_called_once_with(self, /, *args, **kwargs):
+ def assert_called_once_with(self, /, *args, **kwargs):
"""assert that the mock was called exactly once and that that call was
with the specified arguments."""
if not self.call_count == 1:
- msg = ("Expected '%s' to be called once. Called %s times.%s"
- % (self._mock_name or 'mock',
- self.call_count,
- self._calls_repr()))
+ msg = ("Expected '%s' to be called once. Called %s times.%s"
+ % (self._mock_name or 'mock',
+ self.call_count,
+ self._calls_repr()))
raise AssertionError(msg)
return self.assert_called_with(*args, **kwargs)
@@ -930,21 +930,21 @@ class NonCallableMock(Base):
If `any_order` is True then the calls can be in any order, but
they must all appear in `mock_calls`."""
expected = [self._call_matcher(c) for c in calls]
- cause = next((e for e in expected if isinstance(e, Exception)), None)
+ cause = next((e for e in expected if isinstance(e, Exception)), None)
all_calls = _CallList(self._call_matcher(c) for c in self.mock_calls)
if not any_order:
if expected not in all_calls:
- if cause is None:
- problem = 'Calls not found.'
- else:
- problem = ('Error processing expected calls.\n'
- 'Errors: {}').format(
- [e if isinstance(e, Exception) else None
- for e in expected])
+ if cause is None:
+ problem = 'Calls not found.'
+ else:
+ problem = ('Error processing expected calls.\n'
+ 'Errors: {}').format(
+ [e if isinstance(e, Exception) else None
+ for e in expected])
raise AssertionError(
- f'{problem}\n'
- f'Expected: {_CallList(calls)}'
- f'{self._calls_repr(prefix="Actual").rstrip(".")}'
+ f'{problem}\n'
+ f'Expected: {_CallList(calls)}'
+ f'{self._calls_repr(prefix="Actual").rstrip(".")}'
) from cause
return
@@ -958,29 +958,29 @@ class NonCallableMock(Base):
not_found.append(kall)
if not_found:
raise AssertionError(
- '%r does not contain all of %r in its call list, '
- 'found %r instead' % (self._mock_name or 'mock',
- tuple(not_found), all_calls)
+ '%r does not contain all of %r in its call list, '
+ 'found %r instead' % (self._mock_name or 'mock',
+ tuple(not_found), all_calls)
) from cause
- def assert_any_call(self, /, *args, **kwargs):
+ def assert_any_call(self, /, *args, **kwargs):
"""assert the mock has been called with the specified arguments.
The assert passes if the mock has *ever* been called, unlike
`assert_called_with` and `assert_called_once_with` that only pass if
the call is the most recent one."""
- expected = self._call_matcher(_Call((args, kwargs), two=True))
- cause = expected if isinstance(expected, Exception) else None
+ expected = self._call_matcher(_Call((args, kwargs), two=True))
+ cause = expected if isinstance(expected, Exception) else None
actual = [self._call_matcher(c) for c in self.call_args_list]
- if cause or expected not in _AnyComparer(actual):
+ if cause or expected not in _AnyComparer(actual):
expected_string = self._format_mock_call_signature(args, kwargs)
raise AssertionError(
'%s call not found' % expected_string
) from cause
- def _get_child_mock(self, /, **kw):
+ def _get_child_mock(self, /, **kw):
"""Create the child mocks for attributes and return value.
By default child mocks will be the same type as the parent.
Subclasses of Mock may want to override this to customize the way
@@ -988,68 +988,68 @@ class NonCallableMock(Base):
For non-callable mocks the callable variant will be used (rather than
any custom subclass)."""
- _new_name = kw.get("_new_name")
- if _new_name in self.__dict__['_spec_asyncs']:
- return AsyncMock(**kw)
-
- if self._mock_sealed:
- attribute = f".{kw['name']}" if "name" in kw else "()"
- mock_name = self._extract_mock_name() + attribute
- raise AttributeError(mock_name)
-
+ _new_name = kw.get("_new_name")
+ if _new_name in self.__dict__['_spec_asyncs']:
+ return AsyncMock(**kw)
+
+ if self._mock_sealed:
+ attribute = f".{kw['name']}" if "name" in kw else "()"
+ mock_name = self._extract_mock_name() + attribute
+ raise AttributeError(mock_name)
+
_type = type(self)
- if issubclass(_type, MagicMock) and _new_name in _async_method_magics:
- # Any asynchronous magic becomes an AsyncMock
- klass = AsyncMock
- elif issubclass(_type, AsyncMockMixin):
- if (_new_name in _all_sync_magics or
- self._mock_methods and _new_name in self._mock_methods):
- # Any synchronous method on AsyncMock becomes a MagicMock
- klass = MagicMock
- else:
- klass = AsyncMock
- elif not issubclass(_type, CallableMixin):
+ if issubclass(_type, MagicMock) and _new_name in _async_method_magics:
+ # Any asynchronous magic becomes an AsyncMock
+ klass = AsyncMock
+ elif issubclass(_type, AsyncMockMixin):
+ if (_new_name in _all_sync_magics or
+ self._mock_methods and _new_name in self._mock_methods):
+ # Any synchronous method on AsyncMock becomes a MagicMock
+ klass = MagicMock
+ else:
+ klass = AsyncMock
+ elif not issubclass(_type, CallableMixin):
if issubclass(_type, NonCallableMagicMock):
klass = MagicMock
- elif issubclass(_type, NonCallableMock):
+ elif issubclass(_type, NonCallableMock):
klass = Mock
else:
klass = _type.__mro__[1]
return klass(**kw)
- def _calls_repr(self, prefix="Calls"):
- """Renders self.mock_calls as a string.
-
- Example: "\nCalls: [call(1), call(2)]."
-
- If self.mock_calls is empty, an empty string is returned. The
- output will be truncated if very long.
- """
- if not self.mock_calls:
- return ""
- return f"\n{prefix}: {safe_repr(self.mock_calls)}."
-
-
-_MOCK_SIG = inspect.signature(NonCallableMock.__init__)
-
-
-class _AnyComparer(list):
- """A list which checks if it contains a call which may have an
- argument of ANY, flipping the components of item and self from
- their traditional locations so that ANY is guaranteed to be on
- the left."""
- def __contains__(self, item):
- for _call in self:
- assert len(item) == len(_call)
- if all([
- expected == actual
- for expected, actual in zip(item, _call)
- ]):
- return True
- return False
-
-
+ def _calls_repr(self, prefix="Calls"):
+ """Renders self.mock_calls as a string.
+
+ Example: "\nCalls: [call(1), call(2)]."
+
+ If self.mock_calls is empty, an empty string is returned. The
+ output will be truncated if very long.
+ """
+ if not self.mock_calls:
+ return ""
+ return f"\n{prefix}: {safe_repr(self.mock_calls)}."
+
+
+_MOCK_SIG = inspect.signature(NonCallableMock.__init__)
+
+
+class _AnyComparer(list):
+ """A list which checks if it contains a call which may have an
+ argument of ANY, flipping the components of item and self from
+ their traditional locations so that ANY is guaranteed to be on
+ the left."""
+ def __contains__(self, item):
+ for _call in self:
+ assert len(item) == len(_call)
+ if all([
+ expected == actual
+ for expected, actual in zip(item, _call)
+ ]):
+ return True
+ return False
+
+
def _try_iter(obj):
if obj is None:
return obj
@@ -1079,29 +1079,29 @@ class CallableMixin(Base):
self.side_effect = side_effect
- def _mock_check_sig(self, /, *args, **kwargs):
+ def _mock_check_sig(self, /, *args, **kwargs):
# stub method that can be replaced with one with a specific signature
pass
- def __call__(self, /, *args, **kwargs):
+ def __call__(self, /, *args, **kwargs):
# can't use self in-case a function / method we are mocking uses self
# in the signature
- self._mock_check_sig(*args, **kwargs)
- self._increment_mock_call(*args, **kwargs)
- return self._mock_call(*args, **kwargs)
-
+ self._mock_check_sig(*args, **kwargs)
+ self._increment_mock_call(*args, **kwargs)
+ return self._mock_call(*args, **kwargs)
- def _mock_call(self, /, *args, **kwargs):
- return self._execute_mock_call(*args, **kwargs)
- def _increment_mock_call(self, /, *args, **kwargs):
+ def _mock_call(self, /, *args, **kwargs):
+ return self._execute_mock_call(*args, **kwargs)
+
+ def _increment_mock_call(self, /, *args, **kwargs):
self.called = True
self.call_count += 1
# handle call_args
- # needs to be set here so assertions on call arguments pass before
- # execution in the case of awaited calls
+ # needs to be set here so assertions on call arguments pass before
+ # execution in the case of awaited calls
_call = _Call((args, kwargs), two=True)
self.call_args = _call
self.call_args_list.append(_call)
@@ -1141,9 +1141,9 @@ class CallableMixin(Base):
# follow the parental chain:
_new_parent = _new_parent._mock_new_parent
- def _execute_mock_call(self, /, *args, **kwargs):
- # separate from _increment_mock_call so that awaited functions are
- # executed separately from their call, also AsyncMock overrides this method
+ def _execute_mock_call(self, /, *args, **kwargs):
+ # separate from _increment_mock_call so that awaited functions are
+ # executed separately from their call, also AsyncMock overrides this method
effect = self.side_effect
if effect is not None:
@@ -1288,8 +1288,8 @@ class _patch(object):
def __call__(self, func):
if isinstance(func, type):
return self.decorate_class(func)
- if inspect.iscoroutinefunction(func):
- return self.decorate_async_callable(func)
+ if inspect.iscoroutinefunction(func):
+ return self.decorate_async_callable(func)
return self.decorate_callable(func)
@@ -1307,51 +1307,51 @@ class _patch(object):
return klass
- @contextlib.contextmanager
- def decoration_helper(self, patched, args, keywargs):
- extra_args = []
- with contextlib.ExitStack() as exit_stack:
- for patching in patched.patchings:
- arg = exit_stack.enter_context(patching)
- if patching.attribute_name is not None:
- keywargs.update(arg)
- elif patching.new is DEFAULT:
- extra_args.append(arg)
-
- args += tuple(extra_args)
- yield (args, keywargs)
-
-
+ @contextlib.contextmanager
+ def decoration_helper(self, patched, args, keywargs):
+ extra_args = []
+ with contextlib.ExitStack() as exit_stack:
+ for patching in patched.patchings:
+ arg = exit_stack.enter_context(patching)
+ if patching.attribute_name is not None:
+ keywargs.update(arg)
+ elif patching.new is DEFAULT:
+ extra_args.append(arg)
+
+ args += tuple(extra_args)
+ yield (args, keywargs)
+
+
def decorate_callable(self, func):
- # NB. Keep the method in sync with decorate_async_callable()
+ # NB. Keep the method in sync with decorate_async_callable()
if hasattr(func, 'patchings'):
func.patchings.append(self)
return func
@wraps(func)
def patched(*args, **keywargs):
- with self.decoration_helper(patched,
- args,
- keywargs) as (newargs, newkeywargs):
- return func(*newargs, **newkeywargs)
-
- patched.patchings = [self]
- return patched
-
-
- def decorate_async_callable(self, func):
- # NB. Keep the method in sync with decorate_callable()
- if hasattr(func, 'patchings'):
- func.patchings.append(self)
- return func
-
- @wraps(func)
- async def patched(*args, **keywargs):
- with self.decoration_helper(patched,
- args,
- keywargs) as (newargs, newkeywargs):
- return await func(*newargs, **newkeywargs)
-
+ with self.decoration_helper(patched,
+ args,
+ keywargs) as (newargs, newkeywargs):
+ return func(*newargs, **newkeywargs)
+
+ patched.patchings = [self]
+ return patched
+
+
+ def decorate_async_callable(self, func):
+ # NB. Keep the method in sync with decorate_callable()
+ if hasattr(func, 'patchings'):
+ func.patchings.append(self)
+ return func
+
+ @wraps(func)
+ async def patched(*args, **keywargs):
+ with self.decoration_helper(patched,
+ args,
+ keywargs) as (newargs, newkeywargs):
+ return await func(*newargs, **newkeywargs)
+
patched.patchings = [self]
return patched
@@ -1424,10 +1424,10 @@ class _patch(object):
if isinstance(original, type):
# If we're patching out a class and there is a spec
inherit = True
- if spec is None and _is_async_obj(original):
- Klass = AsyncMock
- else:
- Klass = MagicMock
+ if spec is None and _is_async_obj(original):
+ Klass = AsyncMock
+ else:
+ Klass = MagicMock
_kwargs = {}
if new_callable is not None:
Klass = new_callable
@@ -1439,9 +1439,9 @@ class _patch(object):
not_callable = '__call__' not in this_spec
else:
not_callable = not callable(this_spec)
- if _is_async_obj(this_spec):
- Klass = AsyncMock
- elif not_callable:
+ if _is_async_obj(this_spec):
+ Klass = AsyncMock
+ elif not_callable:
Klass = NonCallableMagicMock
if spec is not None:
@@ -1496,23 +1496,23 @@ class _patch(object):
self.temp_original = original
self.is_local = local
- self._exit_stack = contextlib.ExitStack()
- try:
- setattr(self.target, self.attribute, new_attr)
- if self.attribute_name is not None:
- extra_args = {}
- if self.new is DEFAULT:
- extra_args[self.attribute_name] = new
- for patching in self.additional_patchers:
- arg = self._exit_stack.enter_context(patching)
- if patching.new is DEFAULT:
- extra_args.update(arg)
- return extra_args
-
- return new
- except:
- if not self.__exit__(*sys.exc_info()):
- raise
+ self._exit_stack = contextlib.ExitStack()
+ try:
+ setattr(self.target, self.attribute, new_attr)
+ if self.attribute_name is not None:
+ extra_args = {}
+ if self.new is DEFAULT:
+ extra_args[self.attribute_name] = new
+ for patching in self.additional_patchers:
+ arg = self._exit_stack.enter_context(patching)
+ if patching.new is DEFAULT:
+ extra_args.update(arg)
+ return extra_args
+
+ return new
+ except:
+ if not self.__exit__(*sys.exc_info()):
+ raise
def __exit__(self, *exc_info):
"""Undo the patch."""
@@ -1530,9 +1530,9 @@ class _patch(object):
del self.temp_original
del self.is_local
del self.target
- exit_stack = self._exit_stack
- del self._exit_stack
- return exit_stack.__exit__(*exc_info)
+ exit_stack = self._exit_stack
+ del self._exit_stack
+ return exit_stack.__exit__(*exc_info)
def start(self):
@@ -1548,9 +1548,9 @@ class _patch(object):
self._active_patches.remove(self)
except ValueError:
# If the patch hasn't been started this will fail
- return None
+ return None
- return self.__exit__(None, None, None)
+ return self.__exit__(None, None, None)
@@ -1582,10 +1582,10 @@ def _patch_object(
When used as a class decorator `patch.object` honours `patch.TEST_PREFIX`
for choosing which methods to wrap.
"""
- if type(target) is str:
- raise TypeError(
- f"{target!r} must be the actual object to be patched, not a str"
- )
+ if type(target) is str:
+ raise TypeError(
+ f"{target!r} must be the actual object to be patched, not a str"
+ )
getter = lambda: target
return _patch(
getter, attribute, new, spec, create,
@@ -1652,9 +1652,9 @@ def patch(
is patched with a `new` object. When the function/with statement exits
the patch is undone.
- If `new` is omitted, then the target is replaced with an
- `AsyncMock if the patched object is an async function or a
- `MagicMock` otherwise. If `patch` is used as a decorator and `new` is
+ If `new` is omitted, then the target is replaced with an
+ `AsyncMock if the patched object is an async function or a
+ `MagicMock` otherwise. If `patch` is used as a decorator and `new` is
omitted, the created mock is passed in as an extra argument to the
decorated function. If `patch` is used as a context manager the created
mock is returned by the context manager.
@@ -1672,8 +1672,8 @@ def patch(
patch to pass in the object being mocked as the spec/spec_set object.
`new_callable` allows you to specify a different class, or callable object,
- that will be called to create the `new` object. By default `AsyncMock` is
- used for async functions and `MagicMock` for the rest.
+ that will be called to create the `new` object. By default `AsyncMock` is
+ used for async functions and `MagicMock` for the rest.
A more powerful form of `spec` is `autospec`. If you set `autospec=True`
then the mock will be created with a spec from the object being replaced.
@@ -1707,8 +1707,8 @@ def patch(
"as"; very useful if `patch` is creating a mock object for you.
`patch` takes arbitrary keyword arguments. These will be passed to
- `AsyncMock` if the patched object is asynchronous, to `MagicMock`
- otherwise or to `new_callable` if specified.
+ `AsyncMock` if the patched object is asynchronous, to `MagicMock`
+ otherwise or to `new_callable` if specified.
`patch.dict(...)`, `patch.multiple(...)` and `patch.object(...)` are
available for alternate use-cases.
@@ -1786,7 +1786,7 @@ class _patch_dict(object):
def __enter__(self):
"""Patch the dict."""
self._patch_dict()
- return self.in_dict
+ return self.in_dict
def _patch_dict(self):
@@ -1832,29 +1832,29 @@ class _patch_dict(object):
def __exit__(self, *args):
"""Unpatch the dict."""
- if self._original is not None:
- self._unpatch_dict()
+ if self._original is not None:
+ self._unpatch_dict()
return False
- def start(self):
- """Activate a patch, returning any created mock."""
- result = self.__enter__()
- _patch._active_patches.append(self)
- return result
-
-
- def stop(self):
- """Stop an active patch."""
- try:
- _patch._active_patches.remove(self)
- except ValueError:
- # If the patch hasn't been started this will fail
- return None
-
- return self.__exit__(None, None, None)
-
-
+ def start(self):
+ """Activate a patch, returning any created mock."""
+ result = self.__enter__()
+ _patch._active_patches.append(self)
+ return result
+
+
+ def stop(self):
+ """Stop an active patch."""
+ try:
+ _patch._active_patches.remove(self)
+ except ValueError:
+ # If the patch hasn't been started this will fail
+ return None
+
+ return self.__exit__(None, None, None)
+
+
def _clear_dict(in_dict):
try:
in_dict.clear()
@@ -1886,10 +1886,10 @@ magic_methods = (
# because there is no idivmod
"divmod rdivmod neg pos abs invert "
"complex int float index "
- "round trunc floor ceil "
+ "round trunc floor ceil "
"bool next "
- "fspath "
- "aiter "
+ "fspath "
+ "aiter "
)
numerics = (
@@ -1913,7 +1913,7 @@ _non_defaults = {
def _get_method(name, func):
"Turns a callable object (like a mock) into a real function"
- def method(self, /, *args, **kw):
+ def method(self, /, *args, **kw):
return func(self, *args, **kw)
method.__name__ = name
return method
@@ -1924,15 +1924,15 @@ _magics = {
' '.join([magic_methods, numerics, inplace, right]).split()
}
-# Magic methods used for async `with` statements
-_async_method_magics = {"__aenter__", "__aexit__", "__anext__"}
-# Magic methods that are only used with async calls but are synchronous functions themselves
-_sync_async_magics = {"__aiter__"}
-_async_magics = _async_method_magics | _sync_async_magics
-
-_all_sync_magics = _magics | _non_defaults
-_all_magics = _all_sync_magics | _async_magics
+# Magic methods used for async `with` statements
+_async_method_magics = {"__aenter__", "__aexit__", "__anext__"}
+# Magic methods that are only used with async calls but are synchronous functions themselves
+_sync_async_magics = {"__aiter__"}
+_async_magics = _async_method_magics | _sync_async_magics
+_all_sync_magics = _magics | _non_defaults
+_all_magics = _all_sync_magics | _async_magics
+
_unsupported_magics = {
'__getattr__', '__setattr__',
'__init__', '__new__', '__prepare__',
@@ -1944,7 +1944,7 @@ _calculate_return_value = {
'__hash__': lambda self: object.__hash__(self),
'__str__': lambda self: object.__str__(self),
'__sizeof__': lambda self: object.__sizeof__(self),
- '__fspath__': lambda self: f"{type(self).__name__}/{self._extract_mock_name()}/{id(self)}",
+ '__fspath__': lambda self: f"{type(self).__name__}/{self._extract_mock_name()}/{id(self)}",
}
_return_values = {
@@ -1960,7 +1960,7 @@ _return_values = {
'__float__': 1.0,
'__bool__': True,
'__index__': 1,
- '__aexit__': False,
+ '__aexit__': False,
}
@@ -1993,19 +1993,19 @@ def _get_iter(self):
return iter(ret_val)
return __iter__
-def _get_async_iter(self):
- def __aiter__():
- ret_val = self.__aiter__._mock_return_value
- if ret_val is DEFAULT:
- return _AsyncIterator(iter([]))
- return _AsyncIterator(iter(ret_val))
- return __aiter__
-
+def _get_async_iter(self):
+ def __aiter__():
+ ret_val = self.__aiter__._mock_return_value
+ if ret_val is DEFAULT:
+ return _AsyncIterator(iter([]))
+ return _AsyncIterator(iter(ret_val))
+ return __aiter__
+
_side_effect_methods = {
'__eq__': _get_eq,
'__ne__': _get_ne,
'__iter__': _get_iter,
- '__aiter__': _get_async_iter
+ '__aiter__': _get_async_iter
}
@@ -2016,9 +2016,9 @@ def _set_return_value(mock, method, name):
method.return_value = fixed
return
- return_calculator = _calculate_return_value.get(name)
- if return_calculator is not None:
- return_value = return_calculator(mock)
+ return_calculator = _calculate_return_value.get(name)
+ if return_calculator is not None:
+ return_value = return_calculator(mock)
method.return_value = return_value
return
@@ -2028,22 +2028,22 @@ def _set_return_value(mock, method, name):
-class MagicMixin(Base):
- def __init__(self, /, *args, **kw):
+class MagicMixin(Base):
+ def __init__(self, /, *args, **kw):
self._mock_set_magics() # make magic work for kwargs in init
_safe_super(MagicMixin, self).__init__(*args, **kw)
self._mock_set_magics() # fix magic broken by upper level init
def _mock_set_magics(self):
- orig_magics = _magics | _async_method_magics
- these_magics = orig_magics
+ orig_magics = _magics | _async_method_magics
+ these_magics = orig_magics
if getattr(self, "_mock_methods", None) is not None:
- these_magics = orig_magics.intersection(self._mock_methods)
+ these_magics = orig_magics.intersection(self._mock_methods)
remove_magics = set()
- remove_magics = orig_magics - these_magics
+ remove_magics = orig_magics - these_magics
for entry in remove_magics:
if entry in type(self).__dict__:
@@ -2071,11 +2071,11 @@ class NonCallableMagicMock(MagicMixin, NonCallableMock):
self._mock_set_magics()
-class AsyncMagicMixin(MagicMixin):
- def __init__(self, /, *args, **kw):
- self._mock_set_magics() # make magic work for kwargs in init
- _safe_super(AsyncMagicMixin, self).__init__(*args, **kw)
- self._mock_set_magics() # fix magic broken by upper level init
+class AsyncMagicMixin(MagicMixin):
+ def __init__(self, /, *args, **kw):
+ self._mock_set_magics() # make magic work for kwargs in init
+ _safe_super(AsyncMagicMixin, self).__init__(*args, **kw)
+ self._mock_set_magics() # fix magic broken by upper level init
class MagicMock(MagicMixin, Mock):
"""
@@ -2099,7 +2099,7 @@ class MagicMock(MagicMixin, Mock):
-class MagicProxy(Base):
+class MagicProxy(Base):
def __init__(self, name, parent):
self.name = name
self.parent = parent
@@ -2117,232 +2117,232 @@ class MagicProxy(Base):
return self.create_mock()
-class AsyncMockMixin(Base):
- await_count = _delegating_property('await_count')
- await_args = _delegating_property('await_args')
- await_args_list = _delegating_property('await_args_list')
-
- def __init__(self, /, *args, **kwargs):
- super().__init__(*args, **kwargs)
- # iscoroutinefunction() checks _is_coroutine property to say if an
- # object is a coroutine. Without this check it looks to see if it is a
- # function/method, which in this case it is not (since it is an
- # AsyncMock).
- # It is set through __dict__ because when spec_set is True, this
- # attribute is likely undefined.
- self.__dict__['_is_coroutine'] = asyncio.coroutines._is_coroutine
- self.__dict__['_mock_await_count'] = 0
- self.__dict__['_mock_await_args'] = None
- self.__dict__['_mock_await_args_list'] = _CallList()
- code_mock = NonCallableMock(spec_set=CodeType)
- code_mock.co_flags = inspect.CO_COROUTINE
- self.__dict__['__code__'] = code_mock
-
- async def _execute_mock_call(self, /, *args, **kwargs):
- # This is nearly just like super(), except for special handling
- # of coroutines
-
- _call = _Call((args, kwargs), two=True)
- self.await_count += 1
- self.await_args = _call
- self.await_args_list.append(_call)
-
- effect = self.side_effect
- if effect is not None:
- if _is_exception(effect):
- raise effect
- elif not _callable(effect):
- try:
- result = next(effect)
- except StopIteration:
- # It is impossible to propogate a StopIteration
- # through coroutines because of PEP 479
- raise StopAsyncIteration
- if _is_exception(result):
- raise result
- elif iscoroutinefunction(effect):
- result = await effect(*args, **kwargs)
- else:
- result = effect(*args, **kwargs)
-
- if result is not DEFAULT:
- return result
-
- if self._mock_return_value is not DEFAULT:
- return self.return_value
-
- if self._mock_wraps is not None:
- if iscoroutinefunction(self._mock_wraps):
- return await self._mock_wraps(*args, **kwargs)
- return self._mock_wraps(*args, **kwargs)
-
- return self.return_value
-
- def assert_awaited(self):
- """
- Assert that the mock was awaited at least once.
- """
- if self.await_count == 0:
- msg = f"Expected {self._mock_name or 'mock'} to have been awaited."
- raise AssertionError(msg)
-
- def assert_awaited_once(self):
- """
- Assert that the mock was awaited exactly once.
- """
- if not self.await_count == 1:
- msg = (f"Expected {self._mock_name or 'mock'} to have been awaited once."
- f" Awaited {self.await_count} times.")
- raise AssertionError(msg)
-
- def assert_awaited_with(self, /, *args, **kwargs):
- """
- Assert that the last await was with the specified arguments.
- """
- if self.await_args is None:
- expected = self._format_mock_call_signature(args, kwargs)
- raise AssertionError(f'Expected await: {expected}\nNot awaited')
-
- def _error_message():
- msg = self._format_mock_failure_message(args, kwargs, action='await')
- return msg
-
- expected = self._call_matcher(_Call((args, kwargs), two=True))
- actual = self._call_matcher(self.await_args)
- if actual != expected:
- cause = expected if isinstance(expected, Exception) else None
- raise AssertionError(_error_message()) from cause
-
- def assert_awaited_once_with(self, /, *args, **kwargs):
- """
- Assert that the mock was awaited exactly once and with the specified
- arguments.
- """
- if not self.await_count == 1:
- msg = (f"Expected {self._mock_name or 'mock'} to have been awaited once."
- f" Awaited {self.await_count} times.")
- raise AssertionError(msg)
- return self.assert_awaited_with(*args, **kwargs)
-
- def assert_any_await(self, /, *args, **kwargs):
- """
- Assert the mock has ever been awaited with the specified arguments.
- """
- expected = self._call_matcher(_Call((args, kwargs), two=True))
- cause = expected if isinstance(expected, Exception) else None
- actual = [self._call_matcher(c) for c in self.await_args_list]
- if cause or expected not in _AnyComparer(actual):
- expected_string = self._format_mock_call_signature(args, kwargs)
- raise AssertionError(
- '%s await not found' % expected_string
- ) from cause
-
- def assert_has_awaits(self, calls, any_order=False):
- """
- Assert the mock has been awaited with the specified calls.
- The :attr:`await_args_list` list is checked for the awaits.
-
- If `any_order` is False (the default) then the awaits must be
- sequential. There can be extra calls before or after the
- specified awaits.
-
- If `any_order` is True then the awaits can be in any order, but
- they must all appear in :attr:`await_args_list`.
- """
- expected = [self._call_matcher(c) for c in calls]
- cause = next((e for e in expected if isinstance(e, Exception)), None)
- all_awaits = _CallList(self._call_matcher(c) for c in self.await_args_list)
- if not any_order:
- if expected not in all_awaits:
- if cause is None:
- problem = 'Awaits not found.'
- else:
- problem = ('Error processing expected awaits.\n'
- 'Errors: {}').format(
- [e if isinstance(e, Exception) else None
- for e in expected])
- raise AssertionError(
- f'{problem}\n'
- f'Expected: {_CallList(calls)}\n'
- f'Actual: {self.await_args_list}'
- ) from cause
- return
-
- all_awaits = list(all_awaits)
-
- not_found = []
- for kall in expected:
- try:
- all_awaits.remove(kall)
- except ValueError:
- not_found.append(kall)
- if not_found:
- raise AssertionError(
- '%r not all found in await list' % (tuple(not_found),)
- ) from cause
-
- def assert_not_awaited(self):
- """
- Assert that the mock was never awaited.
- """
- if self.await_count != 0:
- msg = (f"Expected {self._mock_name or 'mock'} to not have been awaited."
- f" Awaited {self.await_count} times.")
- raise AssertionError(msg)
-
- def reset_mock(self, /, *args, **kwargs):
- """
- See :func:`.Mock.reset_mock()`
- """
- super().reset_mock(*args, **kwargs)
- self.await_count = 0
- self.await_args = None
- self.await_args_list = _CallList()
-
-
-class AsyncMock(AsyncMockMixin, AsyncMagicMixin, Mock):
- """
- Enhance :class:`Mock` with features allowing to mock
- an async function.
-
- The :class:`AsyncMock` object will behave so the object is
- recognized as an async function, and the result of a call is an awaitable:
-
- >>> mock = AsyncMock()
- >>> iscoroutinefunction(mock)
- True
- >>> inspect.isawaitable(mock())
- True
-
-
- The result of ``mock()`` is an async function which will have the outcome
- of ``side_effect`` or ``return_value``:
-
- - if ``side_effect`` is a function, the async function will return the
- result of that function,
- - if ``side_effect`` is an exception, the async function will raise the
- exception,
- - if ``side_effect`` is an iterable, the async function will return the
- next value of the iterable, however, if the sequence of result is
- exhausted, ``StopIteration`` is raised immediately,
- - if ``side_effect`` is not defined, the async function will return the
- value defined by ``return_value``, hence, by default, the async function
- returns a new :class:`AsyncMock` object.
-
- If the outcome of ``side_effect`` or ``return_value`` is an async function,
- the mock async function obtained when the mock object is called will be this
- async function itself (and not an async function returning an async
- function).
-
- The test author can also specify a wrapped object with ``wraps``. In this
- case, the :class:`Mock` object behavior is the same as with an
- :class:`.Mock` object: the wrapped object may have methods
- defined as async function functions.
-
- Based on Martin Richard's asynctest project.
- """
-
-
+class AsyncMockMixin(Base):
+ await_count = _delegating_property('await_count')
+ await_args = _delegating_property('await_args')
+ await_args_list = _delegating_property('await_args_list')
+
+ def __init__(self, /, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ # iscoroutinefunction() checks _is_coroutine property to say if an
+ # object is a coroutine. Without this check it looks to see if it is a
+ # function/method, which in this case it is not (since it is an
+ # AsyncMock).
+ # It is set through __dict__ because when spec_set is True, this
+ # attribute is likely undefined.
+ self.__dict__['_is_coroutine'] = asyncio.coroutines._is_coroutine
+ self.__dict__['_mock_await_count'] = 0
+ self.__dict__['_mock_await_args'] = None
+ self.__dict__['_mock_await_args_list'] = _CallList()
+ code_mock = NonCallableMock(spec_set=CodeType)
+ code_mock.co_flags = inspect.CO_COROUTINE
+ self.__dict__['__code__'] = code_mock
+
+ async def _execute_mock_call(self, /, *args, **kwargs):
+ # This is nearly just like super(), except for special handling
+ # of coroutines
+
+ _call = _Call((args, kwargs), two=True)
+ self.await_count += 1
+ self.await_args = _call
+ self.await_args_list.append(_call)
+
+ effect = self.side_effect
+ if effect is not None:
+ if _is_exception(effect):
+ raise effect
+ elif not _callable(effect):
+ try:
+ result = next(effect)
+ except StopIteration:
+ # It is impossible to propogate a StopIteration
+ # through coroutines because of PEP 479
+ raise StopAsyncIteration
+ if _is_exception(result):
+ raise result
+ elif iscoroutinefunction(effect):
+ result = await effect(*args, **kwargs)
+ else:
+ result = effect(*args, **kwargs)
+
+ if result is not DEFAULT:
+ return result
+
+ if self._mock_return_value is not DEFAULT:
+ return self.return_value
+
+ if self._mock_wraps is not None:
+ if iscoroutinefunction(self._mock_wraps):
+ return await self._mock_wraps(*args, **kwargs)
+ return self._mock_wraps(*args, **kwargs)
+
+ return self.return_value
+
+ def assert_awaited(self):
+ """
+ Assert that the mock was awaited at least once.
+ """
+ if self.await_count == 0:
+ msg = f"Expected {self._mock_name or 'mock'} to have been awaited."
+ raise AssertionError(msg)
+
+ def assert_awaited_once(self):
+ """
+ Assert that the mock was awaited exactly once.
+ """
+ if not self.await_count == 1:
+ msg = (f"Expected {self._mock_name or 'mock'} to have been awaited once."
+ f" Awaited {self.await_count} times.")
+ raise AssertionError(msg)
+
+ def assert_awaited_with(self, /, *args, **kwargs):
+ """
+ Assert that the last await was with the specified arguments.
+ """
+ if self.await_args is None:
+ expected = self._format_mock_call_signature(args, kwargs)
+ raise AssertionError(f'Expected await: {expected}\nNot awaited')
+
+ def _error_message():
+ msg = self._format_mock_failure_message(args, kwargs, action='await')
+ return msg
+
+ expected = self._call_matcher(_Call((args, kwargs), two=True))
+ actual = self._call_matcher(self.await_args)
+ if actual != expected:
+ cause = expected if isinstance(expected, Exception) else None
+ raise AssertionError(_error_message()) from cause
+
+ def assert_awaited_once_with(self, /, *args, **kwargs):
+ """
+ Assert that the mock was awaited exactly once and with the specified
+ arguments.
+ """
+ if not self.await_count == 1:
+ msg = (f"Expected {self._mock_name or 'mock'} to have been awaited once."
+ f" Awaited {self.await_count} times.")
+ raise AssertionError(msg)
+ return self.assert_awaited_with(*args, **kwargs)
+
+ def assert_any_await(self, /, *args, **kwargs):
+ """
+ Assert the mock has ever been awaited with the specified arguments.
+ """
+ expected = self._call_matcher(_Call((args, kwargs), two=True))
+ cause = expected if isinstance(expected, Exception) else None
+ actual = [self._call_matcher(c) for c in self.await_args_list]
+ if cause or expected not in _AnyComparer(actual):
+ expected_string = self._format_mock_call_signature(args, kwargs)
+ raise AssertionError(
+ '%s await not found' % expected_string
+ ) from cause
+
+ def assert_has_awaits(self, calls, any_order=False):
+ """
+ Assert the mock has been awaited with the specified calls.
+ The :attr:`await_args_list` list is checked for the awaits.
+
+ If `any_order` is False (the default) then the awaits must be
+ sequential. There can be extra calls before or after the
+ specified awaits.
+
+ If `any_order` is True then the awaits can be in any order, but
+ they must all appear in :attr:`await_args_list`.
+ """
+ expected = [self._call_matcher(c) for c in calls]
+ cause = next((e for e in expected if isinstance(e, Exception)), None)
+ all_awaits = _CallList(self._call_matcher(c) for c in self.await_args_list)
+ if not any_order:
+ if expected not in all_awaits:
+ if cause is None:
+ problem = 'Awaits not found.'
+ else:
+ problem = ('Error processing expected awaits.\n'
+ 'Errors: {}').format(
+ [e if isinstance(e, Exception) else None
+ for e in expected])
+ raise AssertionError(
+ f'{problem}\n'
+ f'Expected: {_CallList(calls)}\n'
+ f'Actual: {self.await_args_list}'
+ ) from cause
+ return
+
+ all_awaits = list(all_awaits)
+
+ not_found = []
+ for kall in expected:
+ try:
+ all_awaits.remove(kall)
+ except ValueError:
+ not_found.append(kall)
+ if not_found:
+ raise AssertionError(
+ '%r not all found in await list' % (tuple(not_found),)
+ ) from cause
+
+ def assert_not_awaited(self):
+ """
+ Assert that the mock was never awaited.
+ """
+ if self.await_count != 0:
+ msg = (f"Expected {self._mock_name or 'mock'} to not have been awaited."
+ f" Awaited {self.await_count} times.")
+ raise AssertionError(msg)
+
+ def reset_mock(self, /, *args, **kwargs):
+ """
+ See :func:`.Mock.reset_mock()`
+ """
+ super().reset_mock(*args, **kwargs)
+ self.await_count = 0
+ self.await_args = None
+ self.await_args_list = _CallList()
+
+
+class AsyncMock(AsyncMockMixin, AsyncMagicMixin, Mock):
+ """
+ Enhance :class:`Mock` with features allowing to mock
+ an async function.
+
+ The :class:`AsyncMock` object will behave so the object is
+ recognized as an async function, and the result of a call is an awaitable:
+
+ >>> mock = AsyncMock()
+ >>> iscoroutinefunction(mock)
+ True
+ >>> inspect.isawaitable(mock())
+ True
+
+
+ The result of ``mock()`` is an async function which will have the outcome
+ of ``side_effect`` or ``return_value``:
+
+ - if ``side_effect`` is a function, the async function will return the
+ result of that function,
+ - if ``side_effect`` is an exception, the async function will raise the
+ exception,
+ - if ``side_effect`` is an iterable, the async function will return the
+ next value of the iterable, however, if the sequence of result is
+ exhausted, ``StopIteration`` is raised immediately,
+ - if ``side_effect`` is not defined, the async function will return the
+ value defined by ``return_value``, hence, by default, the async function
+ returns a new :class:`AsyncMock` object.
+
+ If the outcome of ``side_effect`` or ``return_value`` is an async function,
+ the mock async function obtained when the mock object is called will be this
+ async function itself (and not an async function returning an async
+ function).
+
+ The test author can also specify a wrapped object with ``wraps``. In this
+ case, the :class:`Mock` object behavior is the same as with an
+ :class:`.Mock` object: the wrapped object may have methods
+ defined as async function functions.
+
+ Based on Martin Richard's asynctest project.
+ """
+
+
class _ANY(object):
"A helper object that compares equal to everything."
@@ -2364,7 +2364,7 @@ def _format_call_signature(name, args, kwargs):
formatted_args = ''
args_string = ', '.join([repr(arg) for arg in args])
kwargs_string = ', '.join([
- '%s=%r' % (key, value) for key, value in kwargs.items()
+ '%s=%r' % (key, value) for key, value in kwargs.items()
])
if args_string:
formatted_args = args_string
@@ -2439,7 +2439,7 @@ class _Call(tuple):
try:
len_other = len(other)
except TypeError:
- return NotImplemented
+ return NotImplemented
self_name = ''
if len(self) == 2:
@@ -2491,7 +2491,7 @@ class _Call(tuple):
__ne__ = object.__ne__
- def __call__(self, /, *args, **kwargs):
+ def __call__(self, /, *args, **kwargs):
if self._mock_name is None:
return _Call(('', args, kwargs), name='()')
@@ -2506,28 +2506,28 @@ class _Call(tuple):
return _Call(name=name, parent=self, from_kall=False)
- def __getattribute__(self, attr):
- if attr in tuple.__dict__:
- raise AttributeError
- return tuple.__getattribute__(self, attr)
-
-
- def _get_call_arguments(self):
- if len(self) == 2:
- args, kwargs = self
- else:
- name, args, kwargs = self
-
- return args, kwargs
-
- @property
- def args(self):
- return self._get_call_arguments()[0]
-
- @property
- def kwargs(self):
- return self._get_call_arguments()[1]
-
+ def __getattribute__(self, attr):
+ if attr in tuple.__dict__:
+ raise AttributeError
+ return tuple.__getattribute__(self, attr)
+
+
+ def _get_call_arguments(self):
+ if len(self) == 2:
+ args, kwargs = self
+ else:
+ name, args, kwargs = self
+
+ return args, kwargs
+
+ @property
+ def args(self):
+ return self._get_call_arguments()[0]
+
+ @property
+ def kwargs(self):
+ return self._get_call_arguments()[1]
+
def __repr__(self):
if not self._mock_from_kall:
name = self._mock_name or 'call'
@@ -2590,7 +2590,7 @@ def create_autospec(spec, spec_set=False, instance=False, _parent=None,
spec = type(spec)
is_type = isinstance(spec, type)
- is_async_func = _is_async_func(spec)
+ is_async_func = _is_async_func(spec)
_kwargs = {'spec': spec}
if spec_set:
_kwargs = {'spec_set': spec}
@@ -2607,11 +2607,11 @@ def create_autospec(spec, spec_set=False, instance=False, _parent=None,
# descriptors don't have a spec
# because we don't know what type they return
_kwargs = {}
- elif is_async_func:
- if instance:
- raise RuntimeError("Instance can not be True when create_autospec "
- "is mocking an async function")
- Klass = AsyncMock
+ elif is_async_func:
+ if instance:
+ raise RuntimeError("Instance can not be True when create_autospec "
+ "is mocking an async function")
+ Klass = AsyncMock
elif not _callable(spec):
Klass = NonCallableMagicMock
elif is_type and instance and not _instance_callable(spec):
@@ -2631,8 +2631,8 @@ def create_autospec(spec, spec_set=False, instance=False, _parent=None,
# should only happen at the top level because we don't
# recurse for functions
mock = _set_signature(mock, spec)
- if is_async_func:
- _setup_async_mock(mock)
+ if is_async_func:
+ _setup_async_mock(mock)
else:
_check_signature(spec, mock, is_type, instance)
@@ -2676,13 +2676,13 @@ def create_autospec(spec, spec_set=False, instance=False, _parent=None,
skipfirst = _must_skip(spec, entry, is_type)
kwargs['_eat_self'] = skipfirst
- if iscoroutinefunction(original):
- child_klass = AsyncMock
- else:
- child_klass = MagicMock
- new = child_klass(parent=parent, name=entry, _new_name=entry,
- _new_parent=parent,
- **kwargs)
+ if iscoroutinefunction(original):
+ child_klass = AsyncMock
+ else:
+ child_klass = MagicMock
+ new = child_klass(parent=parent, name=entry, _new_name=entry,
+ _new_parent=parent,
+ **kwargs)
mock._mock_children[entry] = new
_check_signature(original, new, skipfirst=skipfirst)
@@ -2713,14 +2713,14 @@ def _must_skip(spec, entry, is_type):
continue
if isinstance(result, (staticmethod, classmethod)):
return False
- elif isinstance(result, FunctionTypes):
+ elif isinstance(result, FunctionTypes):
# Normal method => skip if looked up on type
# (if looked up on instance, self is already skipped)
return is_type
else:
return False
- # function is a dynamically provided attribute
+ # function is a dynamically provided attribute
return is_type
@@ -2747,11 +2747,11 @@ FunctionTypes = (
file_spec = None
-def _to_stream(read_data):
- if isinstance(read_data, bytes):
- return io.BytesIO(read_data)
+def _to_stream(read_data):
+ if isinstance(read_data, bytes):
+ return io.BytesIO(read_data)
else:
- return io.StringIO(read_data)
+ return io.StringIO(read_data)
def mock_open(mock=None, read_data=''):
@@ -2766,23 +2766,23 @@ def mock_open(mock=None, read_data=''):
`read_data` is a string for the `read`, `readline` and `readlines` of the
file handle to return. This is an empty string by default.
"""
- _read_data = _to_stream(read_data)
- _state = [_read_data, None]
-
+ _read_data = _to_stream(read_data)
+ _state = [_read_data, None]
+
def _readlines_side_effect(*args, **kwargs):
if handle.readlines.return_value is not None:
return handle.readlines.return_value
- return _state[0].readlines(*args, **kwargs)
+ return _state[0].readlines(*args, **kwargs)
def _read_side_effect(*args, **kwargs):
if handle.read.return_value is not None:
return handle.read.return_value
- return _state[0].read(*args, **kwargs)
+ return _state[0].read(*args, **kwargs)
- def _readline_side_effect(*args, **kwargs):
+ def _readline_side_effect(*args, **kwargs):
yield from _iter_side_effect()
while True:
- yield _state[0].readline(*args, **kwargs)
+ yield _state[0].readline(*args, **kwargs)
def _iter_side_effect():
if handle.readline.return_value is not None:
@@ -2791,11 +2791,11 @@ def mock_open(mock=None, read_data=''):
for line in _state[0]:
yield line
- def _next_side_effect():
- if handle.readline.return_value is not None:
- return handle.readline.return_value
- return next(_state[0])
-
+ def _next_side_effect():
+ if handle.readline.return_value is not None:
+ return handle.readline.return_value
+ return next(_state[0])
+
global file_spec
if file_spec is None:
import _io
@@ -2817,10 +2817,10 @@ def mock_open(mock=None, read_data=''):
handle.readline.side_effect = _state[1]
handle.readlines.side_effect = _readlines_side_effect
handle.__iter__.side_effect = _iter_side_effect
- handle.__next__.side_effect = _next_side_effect
+ handle.__next__.side_effect = _next_side_effect
def reset_data(*args, **kwargs):
- _state[0] = _to_stream(read_data)
+ _state[0] = _to_stream(read_data)
if handle.readline.side_effect == _state[1]:
# Only reset the side effect if the user hasn't overridden it.
_state[1] = _readline_side_effect()
@@ -2841,10 +2841,10 @@ class PropertyMock(Mock):
Fetching a `PropertyMock` instance from an object calls the mock, with
no args. Setting it calls the mock with the value being set.
"""
- def _get_child_mock(self, /, **kwargs):
+ def _get_child_mock(self, /, **kwargs):
return MagicMock(**kwargs)
- def __get__(self, obj, obj_type=None):
+ def __get__(self, obj, obj_type=None):
return self()
def __set__(self, obj, val):
self(val)
@@ -2868,25 +2868,25 @@ def seal(mock):
continue
if not isinstance(m, NonCallableMock):
continue
- if isinstance(m._mock_children.get(attr), _SpecState):
- continue
+ if isinstance(m._mock_children.get(attr), _SpecState):
+ continue
if m._mock_new_parent is mock:
seal(m)
-
-
-class _AsyncIterator:
- """
- Wraps an iterator in an asynchronous iterator.
- """
- def __init__(self, iterator):
- self.iterator = iterator
- code_mock = NonCallableMock(spec_set=CodeType)
- code_mock.co_flags = inspect.CO_ITERABLE_COROUTINE
- self.__dict__['__code__'] = code_mock
-
- async def __anext__(self):
- try:
- return next(self.iterator)
- except StopIteration:
- pass
- raise StopAsyncIteration
+
+
+class _AsyncIterator:
+ """
+ Wraps an iterator in an asynchronous iterator.
+ """
+ def __init__(self, iterator):
+ self.iterator = iterator
+ code_mock = NonCallableMock(spec_set=CodeType)
+ code_mock.co_flags = inspect.CO_ITERABLE_COROUTINE
+ self.__dict__['__code__'] = code_mock
+
+ async def __anext__(self):
+ try:
+ return next(self.iterator)
+ except StopIteration:
+ pass
+ raise StopAsyncIteration
diff --git a/contrib/tools/python3/src/Lib/unittest/result.py b/contrib/tools/python3/src/Lib/unittest/result.py
index 111317b329..177bf08024 100644
--- a/contrib/tools/python3/src/Lib/unittest/result.py
+++ b/contrib/tools/python3/src/Lib/unittest/result.py
@@ -161,7 +161,7 @@ class TestResult(object):
"""Tells whether or not this result was a success."""
# The hasattr check is for test_result's OldResult test. That
# way this method works on objects that lack the attribute.
- # (where would such result instances come from? old stored pickles?)
+ # (where would such result instances come from? old stored pickles?)
return ((len(self.failures) == len(self.errors) == 0) and
(not hasattr(self, 'unexpectedSuccesses') or
len(self.unexpectedSuccesses) == 0))
diff --git a/contrib/tools/python3/src/Lib/unittest/runner.py b/contrib/tools/python3/src/Lib/unittest/runner.py
index caf159002d..dc65682650 100644
--- a/contrib/tools/python3/src/Lib/unittest/runner.py
+++ b/contrib/tools/python3/src/Lib/unittest/runner.py
@@ -59,7 +59,7 @@ class TextTestResult(result.TestResult):
super(TextTestResult, self).addSuccess(test)
if self.showAll:
self.stream.writeln("ok")
- self.stream.flush()
+ self.stream.flush()
elif self.dots:
self.stream.write('.')
self.stream.flush()
@@ -68,7 +68,7 @@ class TextTestResult(result.TestResult):
super(TextTestResult, self).addError(test, err)
if self.showAll:
self.stream.writeln("ERROR")
- self.stream.flush()
+ self.stream.flush()
elif self.dots:
self.stream.write('E')
self.stream.flush()
@@ -77,7 +77,7 @@ class TextTestResult(result.TestResult):
super(TextTestResult, self).addFailure(test, err)
if self.showAll:
self.stream.writeln("FAIL")
- self.stream.flush()
+ self.stream.flush()
elif self.dots:
self.stream.write('F')
self.stream.flush()
@@ -86,7 +86,7 @@ class TextTestResult(result.TestResult):
super(TextTestResult, self).addSkip(test, reason)
if self.showAll:
self.stream.writeln("skipped {0!r}".format(reason))
- self.stream.flush()
+ self.stream.flush()
elif self.dots:
self.stream.write("s")
self.stream.flush()
@@ -95,7 +95,7 @@ class TextTestResult(result.TestResult):
super(TextTestResult, self).addExpectedFailure(test, err)
if self.showAll:
self.stream.writeln("expected failure")
- self.stream.flush()
+ self.stream.flush()
elif self.dots:
self.stream.write("x")
self.stream.flush()
@@ -104,7 +104,7 @@ class TextTestResult(result.TestResult):
super(TextTestResult, self).addUnexpectedSuccess(test)
if self.showAll:
self.stream.writeln("unexpected success")
- self.stream.flush()
+ self.stream.flush()
elif self.dots:
self.stream.write("u")
self.stream.flush()
@@ -112,7 +112,7 @@ class TextTestResult(result.TestResult):
def printErrors(self):
if self.dots or self.showAll:
self.stream.writeln()
- self.stream.flush()
+ self.stream.flush()
self.printErrorList('ERROR', self.errors)
self.printErrorList('FAIL', self.failures)
@@ -122,7 +122,7 @@ class TextTestResult(result.TestResult):
self.stream.writeln("%s: %s" % (flavour,self.getDescription(test)))
self.stream.writeln(self.separator2)
self.stream.writeln("%s" % err)
- self.stream.flush()
+ self.stream.flush()
class TextTestRunner(object):
@@ -226,5 +226,5 @@ class TextTestRunner(object):
self.stream.writeln(" (%s)" % (", ".join(infos),))
else:
self.stream.write("\n")
- self.stream.flush()
+ self.stream.flush()
return result
diff --git a/contrib/tools/python3/src/Lib/unittest/suite.py b/contrib/tools/python3/src/Lib/unittest/suite.py
index 6f45b6fe5f..6df95fbc1b 100644
--- a/contrib/tools/python3/src/Lib/unittest/suite.py
+++ b/contrib/tools/python3/src/Lib/unittest/suite.py
@@ -149,7 +149,7 @@ class TestSuite(BaseTestSuite):
if getattr(currentClass, "__unittest_skip__", False):
return
- failed = False
+ failed = False
try:
currentClass._classSetupFailed = False
except TypeError:
@@ -158,30 +158,30 @@ class TestSuite(BaseTestSuite):
pass
setUpClass = getattr(currentClass, 'setUpClass', None)
- doClassCleanups = getattr(currentClass, 'doClassCleanups', None)
+ doClassCleanups = getattr(currentClass, 'doClassCleanups', None)
if setUpClass is not None:
_call_if_exists(result, '_setupStdout')
try:
- try:
- setUpClass()
- except Exception as e:
- if isinstance(result, _DebugResult):
- raise
- failed = True
- try:
- currentClass._classSetupFailed = True
- except TypeError:
- pass
- className = util.strclass(currentClass)
- self._createClassOrModuleLevelException(result, e,
- 'setUpClass',
- className)
- if failed and doClassCleanups is not None:
- doClassCleanups()
- for exc_info in currentClass.tearDown_exceptions:
- self._createClassOrModuleLevelException(
- result, exc_info[1], 'setUpClass', className,
- info=exc_info)
+ try:
+ setUpClass()
+ except Exception as e:
+ if isinstance(result, _DebugResult):
+ raise
+ failed = True
+ try:
+ currentClass._classSetupFailed = True
+ except TypeError:
+ pass
+ className = util.strclass(currentClass)
+ self._createClassOrModuleLevelException(result, e,
+ 'setUpClass',
+ className)
+ if failed and doClassCleanups is not None:
+ doClassCleanups()
+ for exc_info in currentClass.tearDown_exceptions:
+ self._createClassOrModuleLevelException(
+ result, exc_info[1], 'setUpClass', className,
+ info=exc_info)
finally:
_call_if_exists(result, '_restoreStdout')
@@ -211,41 +211,41 @@ class TestSuite(BaseTestSuite):
if setUpModule is not None:
_call_if_exists(result, '_setupStdout')
try:
- try:
- setUpModule()
- except Exception as e:
- if isinstance(result, _DebugResult):
- raise
- result._moduleSetUpFailed = True
- self._createClassOrModuleLevelException(result, e,
- 'setUpModule',
- currentModule)
- if result._moduleSetUpFailed:
- try:
- case.doModuleCleanups()
- except Exception as e:
- self._createClassOrModuleLevelException(result, e,
- 'setUpModule',
- currentModule)
+ try:
+ setUpModule()
+ except Exception as e:
+ if isinstance(result, _DebugResult):
+ raise
+ result._moduleSetUpFailed = True
+ self._createClassOrModuleLevelException(result, e,
+ 'setUpModule',
+ currentModule)
+ if result._moduleSetUpFailed:
+ try:
+ case.doModuleCleanups()
+ except Exception as e:
+ self._createClassOrModuleLevelException(result, e,
+ 'setUpModule',
+ currentModule)
finally:
_call_if_exists(result, '_restoreStdout')
- def _createClassOrModuleLevelException(self, result, exc, method_name,
- parent, info=None):
- errorName = f'{method_name} ({parent})'
- self._addClassOrModuleLevelException(result, exc, errorName, info)
-
- def _addClassOrModuleLevelException(self, result, exception, errorName,
- info=None):
+ def _createClassOrModuleLevelException(self, result, exc, method_name,
+ parent, info=None):
+ errorName = f'{method_name} ({parent})'
+ self._addClassOrModuleLevelException(result, exc, errorName, info)
+
+ def _addClassOrModuleLevelException(self, result, exception, errorName,
+ info=None):
error = _ErrorHolder(errorName)
addSkip = getattr(result, 'addSkip', None)
if addSkip is not None and isinstance(exception, case.SkipTest):
addSkip(error, str(exception))
else:
- if not info:
- result.addError(error, sys.exc_info())
- else:
- result.addError(error, info)
+ if not info:
+ result.addError(error, sys.exc_info())
+ else:
+ result.addError(error, info)
def _handleModuleTearDown(self, result):
previousModule = self._get_previous_module(result)
@@ -259,33 +259,33 @@ class TestSuite(BaseTestSuite):
except KeyError:
return
- _call_if_exists(result, '_setupStdout')
- try:
- tearDownModule = getattr(module, 'tearDownModule', None)
- if tearDownModule is not None:
- try:
- tearDownModule()
- except Exception as e:
- if isinstance(result, _DebugResult):
- raise
- self._createClassOrModuleLevelException(result, e,
- 'tearDownModule',
- previousModule)
+ _call_if_exists(result, '_setupStdout')
+ try:
+ tearDownModule = getattr(module, 'tearDownModule', None)
+ if tearDownModule is not None:
+ try:
+ tearDownModule()
+ except Exception as e:
+ if isinstance(result, _DebugResult):
+ raise
+ self._createClassOrModuleLevelException(result, e,
+ 'tearDownModule',
+ previousModule)
try:
- case.doModuleCleanups()
+ case.doModuleCleanups()
except Exception as e:
if isinstance(result, _DebugResult):
raise
- self._createClassOrModuleLevelException(result, e,
- 'tearDownModule',
- previousModule)
- finally:
- _call_if_exists(result, '_restoreStdout')
+ self._createClassOrModuleLevelException(result, e,
+ 'tearDownModule',
+ previousModule)
+ finally:
+ _call_if_exists(result, '_restoreStdout')
def _tearDownPreviousClass(self, test, result):
previousClass = getattr(result, '_previousTestClass', None)
currentClass = test.__class__
- if currentClass == previousClass or previousClass is None:
+ if currentClass == previousClass or previousClass is None:
return
if getattr(previousClass, '_classSetupFailed', False):
return
@@ -295,36 +295,36 @@ class TestSuite(BaseTestSuite):
return
tearDownClass = getattr(previousClass, 'tearDownClass', None)
- doClassCleanups = getattr(previousClass, 'doClassCleanups', None)
- if tearDownClass is None and doClassCleanups is None:
- return
-
- _call_if_exists(result, '_setupStdout')
- try:
- if tearDownClass is not None:
- try:
- tearDownClass()
- except Exception as e:
- if isinstance(result, _DebugResult):
- raise
- className = util.strclass(previousClass)
- self._createClassOrModuleLevelException(result, e,
- 'tearDownClass',
- className)
- if doClassCleanups is not None:
- doClassCleanups()
- for exc_info in previousClass.tearDown_exceptions:
- if isinstance(result, _DebugResult):
- raise exc_info[1]
- className = util.strclass(previousClass)
- self._createClassOrModuleLevelException(result, exc_info[1],
- 'tearDownClass',
- className,
- info=exc_info)
- finally:
- _call_if_exists(result, '_restoreStdout')
-
-
+ doClassCleanups = getattr(previousClass, 'doClassCleanups', None)
+ if tearDownClass is None and doClassCleanups is None:
+ return
+
+ _call_if_exists(result, '_setupStdout')
+ try:
+ if tearDownClass is not None:
+ try:
+ tearDownClass()
+ except Exception as e:
+ if isinstance(result, _DebugResult):
+ raise
+ className = util.strclass(previousClass)
+ self._createClassOrModuleLevelException(result, e,
+ 'tearDownClass',
+ className)
+ if doClassCleanups is not None:
+ doClassCleanups()
+ for exc_info in previousClass.tearDown_exceptions:
+ if isinstance(result, _DebugResult):
+ raise exc_info[1]
+ className = util.strclass(previousClass)
+ self._createClassOrModuleLevelException(result, exc_info[1],
+ 'tearDownClass',
+ className,
+ info=exc_info)
+ finally:
+ _call_if_exists(result, '_restoreStdout')
+
+
class _ErrorHolder(object):
"""
Placeholder for a TestCase inside a result. As far as a TestResult
diff --git a/contrib/tools/python3/src/Lib/urllib/parse.py b/contrib/tools/python3/src/Lib/urllib/parse.py
index b7965fe3d2..f6299398c9 100644
--- a/contrib/tools/python3/src/Lib/urllib/parse.py
+++ b/contrib/tools/python3/src/Lib/urllib/parse.py
@@ -29,9 +29,9 @@ test_urlparse.py provides a good indicator of parsing behavior.
import re
import sys
-import types
+import types
import collections
-import warnings
+import warnings
__all__ = ["urlparse", "urlunparse", "urljoin", "urldefrag",
"urlsplit", "urlunsplit", "urlencode", "parse_qs",
@@ -78,9 +78,9 @@ scheme_chars = ('abcdefghijklmnopqrstuvwxyz'
'0123456789'
'+-.')
-# Unsafe bytes to be removed per WHATWG spec
-_UNSAFE_URL_BYTES_TO_REMOVE = ['\t', '\r', '\n']
-
+# Unsafe bytes to be removed per WHATWG spec
+_UNSAFE_URL_BYTES_TO_REMOVE = ['\t', '\r', '\n']
+
# XXX: Consider replacing with functools.lru_cache
MAX_CACHE_SIZE = 20
_parse_cache = {}
@@ -171,18 +171,18 @@ class _NetlocResultMixinBase(object):
def port(self):
port = self._hostinfo[1]
if port is not None:
- try:
- port = int(port, 10)
- except ValueError:
- message = f'Port could not be cast to integer value as {port!r}'
- raise ValueError(message) from None
+ try:
+ port = int(port, 10)
+ except ValueError:
+ message = f'Port could not be cast to integer value as {port!r}'
+ raise ValueError(message) from None
if not ( 0 <= port <= 65535):
raise ValueError("Port out of range 0-65535")
return port
- __class_getitem__ = classmethod(types.GenericAlias)
-
+ __class_getitem__ = classmethod(types.GenericAlias)
+
class _NetlocResultMixinStr(_NetlocResultMixinBase, _ResultMixinStr):
__slots__ = ()
@@ -295,7 +295,7 @@ by reference to a primary resource and additional identifying information.
"""
_ParseResultBase.__doc__ = """
-ParseResult(scheme, netloc, path, params, query, fragment)
+ParseResult(scheme, netloc, path, params, query, fragment)
A 6-tuple that contains components of a parsed URL.
"""
@@ -372,23 +372,23 @@ del _fix_result_transcoding
def urlparse(url, scheme='', allow_fragments=True):
"""Parse a URL into 6 components:
<scheme>://<netloc>/<path>;<params>?<query>#<fragment>
-
- The result is a named 6-tuple with fields corresponding to the
- above. It is either a ParseResult or ParseResultBytes object,
- depending on the type of the url parameter.
-
- The username, password, hostname, and port sub-components of netloc
- can also be accessed as attributes of the returned object.
-
- The scheme argument provides the default value of the scheme
- component when no scheme is found in url.
-
- If allow_fragments is False, no attempt is made to separate the
- fragment component from the previous component, which can be either
- path or query.
-
- Note that % escapes are not expanded.
- """
+
+ The result is a named 6-tuple with fields corresponding to the
+ above. It is either a ParseResult or ParseResultBytes object,
+ depending on the type of the url parameter.
+
+ The username, password, hostname, and port sub-components of netloc
+ can also be accessed as attributes of the returned object.
+
+ The scheme argument provides the default value of the scheme
+ component when no scheme is found in url.
+
+ If allow_fragments is False, no attempt is made to separate the
+ fragment component from the previous component, which can be either
+ path or query.
+
+ Note that % escapes are not expanded.
+ """
url, scheme, _coerce_result = _coerce_args(url, scheme)
splitresult = urlsplit(url, scheme, allow_fragments)
scheme, netloc, url, query, fragment = splitresult
@@ -422,45 +422,45 @@ def _checknetloc(netloc):
# looking for characters like \u2100 that expand to 'a/c'
# IDNA uses NFKC equivalence, so normalize for this check
import unicodedata
- n = netloc.replace('@', '') # ignore characters already included
- n = n.replace(':', '') # but not the surrounding text
- n = n.replace('#', '')
- n = n.replace('?', '')
- netloc2 = unicodedata.normalize('NFKC', n)
- if n == netloc2:
+ n = netloc.replace('@', '') # ignore characters already included
+ n = n.replace(':', '') # but not the surrounding text
+ n = n.replace('#', '')
+ n = n.replace('?', '')
+ netloc2 = unicodedata.normalize('NFKC', n)
+ if n == netloc2:
return
for c in '/?#@:':
if c in netloc2:
- raise ValueError("netloc '" + netloc + "' contains invalid " +
+ raise ValueError("netloc '" + netloc + "' contains invalid " +
"characters under NFKC normalization")
def urlsplit(url, scheme='', allow_fragments=True):
"""Parse a URL into 5 components:
<scheme>://<netloc>/<path>?<query>#<fragment>
-
- The result is a named 5-tuple with fields corresponding to the
- above. It is either a SplitResult or SplitResultBytes object,
- depending on the type of the url parameter.
-
- The username, password, hostname, and port sub-components of netloc
- can also be accessed as attributes of the returned object.
-
- The scheme argument provides the default value of the scheme
- component when no scheme is found in url.
-
- If allow_fragments is False, no attempt is made to separate the
- fragment component from the previous component, which can be either
- path or query.
-
- Note that % escapes are not expanded.
- """
-
+
+ The result is a named 5-tuple with fields corresponding to the
+ above. It is either a SplitResult or SplitResultBytes object,
+ depending on the type of the url parameter.
+
+ The username, password, hostname, and port sub-components of netloc
+ can also be accessed as attributes of the returned object.
+
+ The scheme argument provides the default value of the scheme
+ component when no scheme is found in url.
+
+ If allow_fragments is False, no attempt is made to separate the
+ fragment component from the previous component, which can be either
+ path or query.
+
+ Note that % escapes are not expanded.
+ """
+
url, scheme, _coerce_result = _coerce_args(url, scheme)
-
- for b in _UNSAFE_URL_BYTES_TO_REMOVE:
- url = url.replace(b, "")
- scheme = scheme.replace(b, "")
-
+
+ for b in _UNSAFE_URL_BYTES_TO_REMOVE:
+ url = url.replace(b, "")
+ scheme = scheme.replace(b, "")
+
allow_fragments = bool(allow_fragments)
key = url, scheme, allow_fragments, type(url), type(scheme)
cached = _parse_cache.get(key, None)
@@ -475,7 +475,7 @@ def urlsplit(url, scheme='', allow_fragments=True):
if c not in scheme_chars:
break
else:
- scheme, url = url[:i].lower(), url[i+1:]
+ scheme, url = url[:i].lower(), url[i+1:]
if url[:2] == '//':
netloc, url = _splitnetloc(url, 2)
@@ -651,8 +651,8 @@ def unquote(string, encoding='utf-8', errors='replace'):
unquote('abc%20def') -> 'abc def'.
"""
- if isinstance(string, bytes):
- return unquote_to_bytes(string).decode(encoding, errors)
+ if isinstance(string, bytes):
+ return unquote_to_bytes(string).decode(encoding, errors)
if '%' not in string:
string.split
return string
@@ -670,7 +670,7 @@ def unquote(string, encoding='utf-8', errors='replace'):
def parse_qs(qs, keep_blank_values=False, strict_parsing=False,
- encoding='utf-8', errors='replace', max_num_fields=None, separator='&'):
+ encoding='utf-8', errors='replace', max_num_fields=None, separator='&'):
"""Parse a query given as a string argument.
Arguments:
@@ -694,15 +694,15 @@ def parse_qs(qs, keep_blank_values=False, strict_parsing=False,
max_num_fields: int. If set, then throws a ValueError if there
are more than n fields read by parse_qsl().
- separator: str. The symbol to use for separating the query arguments.
- Defaults to &.
-
+ separator: str. The symbol to use for separating the query arguments.
+ Defaults to &.
+
Returns a dictionary.
"""
parsed_result = {}
pairs = parse_qsl(qs, keep_blank_values, strict_parsing,
encoding=encoding, errors=errors,
- max_num_fields=max_num_fields, separator=separator)
+ max_num_fields=max_num_fields, separator=separator)
for name, value in pairs:
if name in parsed_result:
parsed_result[name].append(value)
@@ -712,7 +712,7 @@ def parse_qs(qs, keep_blank_values=False, strict_parsing=False,
def parse_qsl(qs, keep_blank_values=False, strict_parsing=False,
- encoding='utf-8', errors='replace', max_num_fields=None, separator='&'):
+ encoding='utf-8', errors='replace', max_num_fields=None, separator='&'):
"""Parse a query given as a string argument.
Arguments:
@@ -735,26 +735,26 @@ def parse_qsl(qs, keep_blank_values=False, strict_parsing=False,
max_num_fields: int. If set, then throws a ValueError
if there are more than n fields read by parse_qsl().
- separator: str. The symbol to use for separating the query arguments.
- Defaults to &.
-
+ separator: str. The symbol to use for separating the query arguments.
+ Defaults to &.
+
Returns a list, as G-d intended.
"""
qs, _coerce_result = _coerce_args(qs)
- separator, _ = _coerce_args(separator)
-
- if not separator or (not isinstance(separator, (str, bytes))):
- raise ValueError("Separator must be of type string or bytes.")
+ separator, _ = _coerce_args(separator)
+ if not separator or (not isinstance(separator, (str, bytes))):
+ raise ValueError("Separator must be of type string or bytes.")
+
# If max_num_fields is defined then check that the number of fields
# is less than max_num_fields. This prevents a memory exhaustion DOS
# attack via post bodies with many fields.
if max_num_fields is not None:
- num_fields = 1 + qs.count(separator)
+ num_fields = 1 + qs.count(separator)
if max_num_fields < num_fields:
raise ValueError('Max number of fields exceeded')
- pairs = [s1 for s1 in qs.split(separator)]
+ pairs = [s1 for s1 in qs.split(separator)]
r = []
for name_value in pairs:
if not name_value and not strict_parsing:
@@ -820,32 +820,32 @@ def quote(string, safe='/', encoding=None, errors=None):
"""quote('abc def') -> 'abc%20def'
Each part of a URL, e.g. the path info, the query, etc., has a
- different set of reserved characters that must be quoted. The
- quote function offers a cautious (not minimal) way to quote a
- string for most of these parts.
+ different set of reserved characters that must be quoted. The
+ quote function offers a cautious (not minimal) way to quote a
+ string for most of these parts.
- RFC 3986 Uniform Resource Identifier (URI): Generic Syntax lists
- the following (un)reserved characters.
+ RFC 3986 Uniform Resource Identifier (URI): Generic Syntax lists
+ the following (un)reserved characters.
- unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~"
- reserved = gen-delims / sub-delims
- gen-delims = ":" / "/" / "?" / "#" / "[" / "]" / "@"
- sub-delims = "!" / "$" / "&" / "'" / "(" / ")"
- / "*" / "+" / "," / ";" / "="
+ unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~"
+ reserved = gen-delims / sub-delims
+ gen-delims = ":" / "/" / "?" / "#" / "[" / "]" / "@"
+ sub-delims = "!" / "$" / "&" / "'" / "(" / ")"
+ / "*" / "+" / "," / ";" / "="
- Each of the reserved characters is reserved in some component of a URL,
+ Each of the reserved characters is reserved in some component of a URL,
but not necessarily in all of them.
- The quote function %-escapes all characters that are neither in the
- unreserved chars ("always safe") nor the additional chars set via the
- safe arg.
-
- The default for the safe arg is '/'. The character is reserved, but in
- typical usage the quote function is being called on a path where the
- existing slash characters are to be preserved.
-
+ The quote function %-escapes all characters that are neither in the
+ unreserved chars ("always safe") nor the additional chars set via the
+ safe arg.
+
+ The default for the safe arg is '/'. The character is reserved, but in
+ typical usage the quote function is being called on a path where the
+ existing slash characters are to be preserved.
+
Python 3.7 updates from using RFC 2396 to RFC 3986 to quote URL strings.
- Now, "~" is included in the set of unreserved characters.
+ Now, "~" is included in the set of unreserved characters.
string and safe may be either str or bytes objects. encoding and errors
must not be specified if string is a bytes object.
@@ -989,14 +989,14 @@ def urlencode(query, doseq=False, safe='', encoding=None, errors=None,
l.append(k + '=' + elt)
return '&'.join(l)
-
+
def to_bytes(url):
- warnings.warn("urllib.parse.to_bytes() is deprecated as of 3.8",
- DeprecationWarning, stacklevel=2)
- return _to_bytes(url)
-
-
-def _to_bytes(url):
+ warnings.warn("urllib.parse.to_bytes() is deprecated as of 3.8",
+ DeprecationWarning, stacklevel=2)
+ return _to_bytes(url)
+
+
+def _to_bytes(url):
"""to_bytes(u"URL") --> 'URL'."""
# Most URL schemes require ASCII. If that changes, the conversion
# can be relaxed.
@@ -1009,29 +1009,29 @@ def _to_bytes(url):
" contains non-ASCII characters")
return url
-
+
def unwrap(url):
- """Transform a string like '<URL:scheme://host/path>' into 'scheme://host/path'.
-
- The string is returned unchanged if it's not a wrapped URL.
- """
+ """Transform a string like '<URL:scheme://host/path>' into 'scheme://host/path'.
+
+ The string is returned unchanged if it's not a wrapped URL.
+ """
url = str(url).strip()
if url[:1] == '<' and url[-1:] == '>':
url = url[1:-1].strip()
- if url[:4] == 'URL:':
- url = url[4:].strip()
+ if url[:4] == 'URL:':
+ url = url[4:].strip()
return url
-
-def splittype(url):
- warnings.warn("urllib.parse.splittype() is deprecated as of 3.8, "
- "use urllib.parse.urlparse() instead",
- DeprecationWarning, stacklevel=2)
- return _splittype(url)
-
-
+
+def splittype(url):
+ warnings.warn("urllib.parse.splittype() is deprecated as of 3.8, "
+ "use urllib.parse.urlparse() instead",
+ DeprecationWarning, stacklevel=2)
+ return _splittype(url)
+
+
_typeprog = None
-def _splittype(url):
+def _splittype(url):
"""splittype('type:opaquestring') --> 'type', 'opaquestring'."""
global _typeprog
if _typeprog is None:
@@ -1043,16 +1043,16 @@ def _splittype(url):
return scheme.lower(), data
return None, url
-
-def splithost(url):
- warnings.warn("urllib.parse.splithost() is deprecated as of 3.8, "
- "use urllib.parse.urlparse() instead",
- DeprecationWarning, stacklevel=2)
- return _splithost(url)
-
-
+
+def splithost(url):
+ warnings.warn("urllib.parse.splithost() is deprecated as of 3.8, "
+ "use urllib.parse.urlparse() instead",
+ DeprecationWarning, stacklevel=2)
+ return _splithost(url)
+
+
_hostprog = None
-def _splithost(url):
+def _splithost(url):
"""splithost('//host[:port]/path') --> 'host[:port]', '/path'."""
global _hostprog
if _hostprog is None:
@@ -1066,64 +1066,64 @@ def _splithost(url):
return host_port, path
return None, url
-
+
def splituser(host):
- warnings.warn("urllib.parse.splituser() is deprecated as of 3.8, "
- "use urllib.parse.urlparse() instead",
- DeprecationWarning, stacklevel=2)
- return _splituser(host)
-
-
-def _splituser(host):
+ warnings.warn("urllib.parse.splituser() is deprecated as of 3.8, "
+ "use urllib.parse.urlparse() instead",
+ DeprecationWarning, stacklevel=2)
+ return _splituser(host)
+
+
+def _splituser(host):
"""splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'."""
user, delim, host = host.rpartition('@')
return (user if delim else None), host
-
+
def splitpasswd(user):
- warnings.warn("urllib.parse.splitpasswd() is deprecated as of 3.8, "
- "use urllib.parse.urlparse() instead",
- DeprecationWarning, stacklevel=2)
- return _splitpasswd(user)
-
-
-def _splitpasswd(user):
+ warnings.warn("urllib.parse.splitpasswd() is deprecated as of 3.8, "
+ "use urllib.parse.urlparse() instead",
+ DeprecationWarning, stacklevel=2)
+ return _splitpasswd(user)
+
+
+def _splitpasswd(user):
"""splitpasswd('user:passwd') -> 'user', 'passwd'."""
user, delim, passwd = user.partition(':')
return user, (passwd if delim else None)
-
-def splitport(host):
- warnings.warn("urllib.parse.splitport() is deprecated as of 3.8, "
- "use urllib.parse.urlparse() instead",
- DeprecationWarning, stacklevel=2)
- return _splitport(host)
-
-
+
+def splitport(host):
+ warnings.warn("urllib.parse.splitport() is deprecated as of 3.8, "
+ "use urllib.parse.urlparse() instead",
+ DeprecationWarning, stacklevel=2)
+ return _splitport(host)
+
+
# splittag('/path#tag') --> '/path', 'tag'
_portprog = None
-def _splitport(host):
+def _splitport(host):
"""splitport('host:port') --> 'host', 'port'."""
global _portprog
if _portprog is None:
- _portprog = re.compile('(.*):([0-9]*)', re.DOTALL)
+ _portprog = re.compile('(.*):([0-9]*)', re.DOTALL)
- match = _portprog.fullmatch(host)
+ match = _portprog.fullmatch(host)
if match:
host, port = match.groups()
if port:
return host, port
return host, None
-
+
def splitnport(host, defport=-1):
- warnings.warn("urllib.parse.splitnport() is deprecated as of 3.8, "
- "use urllib.parse.urlparse() instead",
- DeprecationWarning, stacklevel=2)
- return _splitnport(host, defport)
-
-
-def _splitnport(host, defport=-1):
+ warnings.warn("urllib.parse.splitnport() is deprecated as of 3.8, "
+ "use urllib.parse.urlparse() instead",
+ DeprecationWarning, stacklevel=2)
+ return _splitnport(host, defport)
+
+
+def _splitnport(host, defport=-1):
"""Split host and port, returning numeric port.
Return given default port if no ':' found; defaults to -1.
Return numerical port if a valid number are found after ':'.
@@ -1139,59 +1139,59 @@ def _splitnport(host, defport=-1):
return host, nport
return host, defport
-
+
def splitquery(url):
- warnings.warn("urllib.parse.splitquery() is deprecated as of 3.8, "
- "use urllib.parse.urlparse() instead",
- DeprecationWarning, stacklevel=2)
- return _splitquery(url)
-
-
-def _splitquery(url):
+ warnings.warn("urllib.parse.splitquery() is deprecated as of 3.8, "
+ "use urllib.parse.urlparse() instead",
+ DeprecationWarning, stacklevel=2)
+ return _splitquery(url)
+
+
+def _splitquery(url):
"""splitquery('/path?query') --> '/path', 'query'."""
path, delim, query = url.rpartition('?')
if delim:
return path, query
return url, None
-
+
def splittag(url):
- warnings.warn("urllib.parse.splittag() is deprecated as of 3.8, "
- "use urllib.parse.urlparse() instead",
- DeprecationWarning, stacklevel=2)
- return _splittag(url)
-
-
-def _splittag(url):
+ warnings.warn("urllib.parse.splittag() is deprecated as of 3.8, "
+ "use urllib.parse.urlparse() instead",
+ DeprecationWarning, stacklevel=2)
+ return _splittag(url)
+
+
+def _splittag(url):
"""splittag('/path#tag') --> '/path', 'tag'."""
path, delim, tag = url.rpartition('#')
if delim:
return path, tag
return url, None
-
+
def splitattr(url):
- warnings.warn("urllib.parse.splitattr() is deprecated as of 3.8, "
- "use urllib.parse.urlparse() instead",
- DeprecationWarning, stacklevel=2)
- return _splitattr(url)
-
-
-def _splitattr(url):
+ warnings.warn("urllib.parse.splitattr() is deprecated as of 3.8, "
+ "use urllib.parse.urlparse() instead",
+ DeprecationWarning, stacklevel=2)
+ return _splitattr(url)
+
+
+def _splitattr(url):
"""splitattr('/path;attr1=value1;attr2=value2;...') ->
'/path', ['attr1=value1', 'attr2=value2', ...]."""
words = url.split(';')
return words[0], words[1:]
-
+
def splitvalue(attr):
- warnings.warn("urllib.parse.splitvalue() is deprecated as of 3.8, "
- "use urllib.parse.parse_qsl() instead",
- DeprecationWarning, stacklevel=2)
- return _splitvalue(attr)
-
-
-def _splitvalue(attr):
+ warnings.warn("urllib.parse.splitvalue() is deprecated as of 3.8, "
+ "use urllib.parse.parse_qsl() instead",
+ DeprecationWarning, stacklevel=2)
+ return _splitvalue(attr)
+
+
+def _splitvalue(attr):
"""splitvalue('attr=value') --> 'attr', 'value'."""
attr, delim, value = attr.partition('=')
return attr, (value if delim else None)
diff --git a/contrib/tools/python3/src/Lib/urllib/request.py b/contrib/tools/python3/src/Lib/urllib/request.py
index bbdc2254e3..6d4053afd0 100644
--- a/contrib/tools/python3/src/Lib/urllib/request.py
+++ b/contrib/tools/python3/src/Lib/urllib/request.py
@@ -64,7 +64,7 @@ opener = urllib.request.build_opener(proxy_support, authinfo,
# install it
urllib.request.install_opener(opener)
-f = urllib.request.urlopen('https://www.python.org/')
+f = urllib.request.urlopen('https://www.python.org/')
"""
# XXX issues:
@@ -102,8 +102,8 @@ import warnings
from urllib.error import URLError, HTTPError, ContentTooShortError
from urllib.parse import (
urlparse, urlsplit, urljoin, unwrap, quote, unquote,
- _splittype, _splithost, _splitport, _splituser, _splitpasswd,
- _splitattr, _splitquery, _splitvalue, _splittag, _to_bytes,
+ _splittype, _splithost, _splitport, _splituser, _splitpasswd,
+ _splitattr, _splitquery, _splitvalue, _splittag, _to_bytes,
unquote_to_bytes, urlunparse)
from urllib.response import addinfourl, addclosehook
@@ -164,9 +164,9 @@ def urlopen(url, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
The *cadefault* parameter is ignored.
- This function always returns an object which can work as a
- context manager and has the properties url, headers, and status.
- See urllib.response.addinfourl for more detail on these properties.
+ This function always returns an object which can work as a
+ context manager and has the properties url, headers, and status.
+ See urllib.response.addinfourl for more detail on these properties.
For HTTP and HTTPS URLs, this function returns a http.client.HTTPResponse
object slightly modified. In addition to the three new methods above, the
@@ -234,7 +234,7 @@ def urlretrieve(url, filename=None, reporthook=None, data=None):
Returns a tuple containing the path to the newly created
data file as well as the resulting HTTPMessage object.
"""
- url_type, path = _splittype(url)
+ url_type, path = _splittype(url)
with contextlib.closing(urlopen(url, data)) as fp:
headers = fp.info()
@@ -342,7 +342,7 @@ class Request:
def full_url(self, url):
# unwrap('<URL:type://host/path>') --> 'type://host/path'
self._full_url = unwrap(url)
- self._full_url, self.fragment = _splittag(self._full_url)
+ self._full_url, self.fragment = _splittag(self._full_url)
self._parse()
@full_url.deleter
@@ -370,10 +370,10 @@ class Request:
self.data = None
def _parse(self):
- self.type, rest = _splittype(self._full_url)
+ self.type, rest = _splittype(self._full_url)
if self.type is None:
raise ValueError("unknown url type: %r" % self.full_url)
- self.host, self.selector = _splithost(rest)
+ self.host, self.selector = _splithost(rest)
if self.host:
self.host = unquote(self.host)
@@ -418,7 +418,7 @@ class Request:
self.unredirected_hdrs.pop(header_name, None)
def header_items(self):
- hdrs = {**self.unredirected_hdrs, **self.headers}
+ hdrs = {**self.unredirected_hdrs, **self.headers}
return list(hdrs.items())
class OpenerDirector:
@@ -513,7 +513,7 @@ class OpenerDirector:
meth = getattr(processor, meth_name)
req = meth(req)
- sys.audit('urllib.Request', req.full_url, req.data, req.headers, req.get_method())
+ sys.audit('urllib.Request', req.full_url, req.data, req.headers, req.get_method())
response = self._open(req, data)
# post-process response
@@ -760,7 +760,7 @@ def _parse_proxy(proxy):
According to RFC 3986, having an authority component means the URL must
have two slashes after the scheme.
"""
- scheme, r_scheme = _splittype(proxy)
+ scheme, r_scheme = _splittype(proxy)
if not r_scheme.startswith("/"):
# authority
scheme = None
@@ -771,17 +771,17 @@ def _parse_proxy(proxy):
raise ValueError("proxy URL with no authority: %r" % proxy)
# We have an authority, so for RFC 3986-compliant URLs (by ss 3.
# and 3.3.), path is empty or starts with '/'
- if '@' in r_scheme:
- host_separator = r_scheme.find('@')
- end = r_scheme.find("/", host_separator)
- else:
- end = r_scheme.find("/", 2)
+ if '@' in r_scheme:
+ host_separator = r_scheme.find('@')
+ end = r_scheme.find("/", host_separator)
+ else:
+ end = r_scheme.find("/", 2)
if end == -1:
end = None
authority = r_scheme[2:end]
- userinfo, hostport = _splituser(authority)
+ userinfo, hostport = _splituser(authority)
if userinfo is not None:
- user, password = _splitpasswd(userinfo)
+ user, password = _splitpasswd(userinfo)
else:
user = password = None
return scheme, user, password, hostport
@@ -796,7 +796,7 @@ class ProxyHandler(BaseHandler):
assert hasattr(proxies, 'keys'), "proxies must be a mapping"
self.proxies = proxies
for type, url in proxies.items():
- type = type.lower()
+ type = type.lower()
setattr(self, '%s_open' % type,
lambda r, proxy=url, type=type, meth=self.proxy_open:
meth(r, proxy, type))
@@ -869,7 +869,7 @@ class HTTPPasswordMgr:
scheme = None
authority = uri
path = '/'
- host, port = _splitport(authority)
+ host, port = _splitport(authority)
if default_port and port is None and scheme is not None:
dport = {"http": 80,
"https": 443,
@@ -941,15 +941,15 @@ class AbstractBasicAuthHandler:
# allow for double- and single-quoted realm values
# (single quotes are a violation of the RFC, but appear in the wild)
- rx = re.compile('(?:^|,)' # start of the string or ','
- '[ \t]*' # optional whitespaces
- '([^ \t,]+)' # scheme like "Basic"
- '[ \t]+' # mandatory whitespaces
- # realm=xxx
- # realm='xxx'
- # realm="xxx"
- 'realm=(["\']?)([^"\']*)\\2',
- re.I)
+ rx = re.compile('(?:^|,)' # start of the string or ','
+ '[ \t]*' # optional whitespaces
+ '([^ \t,]+)' # scheme like "Basic"
+ '[ \t]+' # mandatory whitespaces
+ # realm=xxx
+ # realm='xxx'
+ # realm="xxx"
+ 'realm=(["\']?)([^"\']*)\\2',
+ re.I)
# XXX could pre-emptively send auth info already accepted (RFC 2617,
# end of section 2, and section 1.2 immediately after "credentials"
@@ -961,52 +961,52 @@ class AbstractBasicAuthHandler:
self.passwd = password_mgr
self.add_password = self.passwd.add_password
- def _parse_realm(self, header):
- # parse WWW-Authenticate header: accept multiple challenges per header
- found_challenge = False
- for mo in AbstractBasicAuthHandler.rx.finditer(header):
- scheme, quote, realm = mo.groups()
- if quote not in ['"', "'"]:
- warnings.warn("Basic Auth Realm was unquoted",
- UserWarning, 3)
-
- yield (scheme, realm)
-
- found_challenge = True
-
- if not found_challenge:
- if header:
- scheme = header.split()[0]
- else:
- scheme = ''
- yield (scheme, None)
-
+ def _parse_realm(self, header):
+ # parse WWW-Authenticate header: accept multiple challenges per header
+ found_challenge = False
+ for mo in AbstractBasicAuthHandler.rx.finditer(header):
+ scheme, quote, realm = mo.groups()
+ if quote not in ['"', "'"]:
+ warnings.warn("Basic Auth Realm was unquoted",
+ UserWarning, 3)
+
+ yield (scheme, realm)
+
+ found_challenge = True
+
+ if not found_challenge:
+ if header:
+ scheme = header.split()[0]
+ else:
+ scheme = ''
+ yield (scheme, None)
+
def http_error_auth_reqed(self, authreq, host, req, headers):
# host may be an authority (without userinfo) or a URL with an
# authority
- headers = headers.get_all(authreq)
- if not headers:
- # no header found
- return
-
- unsupported = None
- for header in headers:
- for scheme, realm in self._parse_realm(header):
- if scheme.lower() != 'basic':
- unsupported = scheme
- continue
-
- if realm is not None:
- # Use the first matching Basic challenge.
- # Ignore following challenges even if they use the Basic
- # scheme.
- return self.retry_http_basic_auth(host, req, realm)
-
- if unsupported is not None:
- raise ValueError("AbstractBasicAuthHandler does not "
- "support the following scheme: %r"
- % (scheme,))
-
+ headers = headers.get_all(authreq)
+ if not headers:
+ # no header found
+ return
+
+ unsupported = None
+ for header in headers:
+ for scheme, realm in self._parse_realm(header):
+ if scheme.lower() != 'basic':
+ unsupported = scheme
+ continue
+
+ if realm is not None:
+ # Use the first matching Basic challenge.
+ # Ignore following challenges even if they use the Basic
+ # scheme.
+ return self.retry_http_basic_auth(host, req, realm)
+
+ if unsupported is not None:
+ raise ValueError("AbstractBasicAuthHandler does not "
+ "support the following scheme: %r"
+ % (scheme,))
+
def retry_http_basic_auth(self, host, req, realm):
user, pw = self.passwd.find_user_password(realm, host)
if pw is not None:
@@ -1171,11 +1171,11 @@ class AbstractDigestAuthHandler:
A2 = "%s:%s" % (req.get_method(),
# XXX selector: what about proxies and full urls
req.selector)
- # NOTE: As per RFC 2617, when server sends "auth,auth-int", the client could use either `auth`
- # or `auth-int` to the response back. we use `auth` to send the response back.
- if qop is None:
- respdig = KD(H(A1), "%s:%s" % (nonce, H(A2)))
- elif 'auth' in qop.split(','):
+ # NOTE: As per RFC 2617, when server sends "auth,auth-int", the client could use either `auth`
+ # or `auth-int` to the response back. we use `auth` to send the response back.
+ if qop is None:
+ respdig = KD(H(A1), "%s:%s" % (nonce, H(A2)))
+ elif 'auth' in qop.split(','):
if nonce == self.last_nonce:
self.nonce_count += 1
else:
@@ -1183,7 +1183,7 @@ class AbstractDigestAuthHandler:
self.last_nonce = nonce
ncvalue = '%08x' % self.nonce_count
cnonce = self.get_cnonce(nonce)
- noncebit = "%s:%s:%s:%s:%s" % (nonce, ncvalue, cnonce, 'auth', H(A2))
+ noncebit = "%s:%s:%s:%s:%s" % (nonce, ncvalue, cnonce, 'auth', H(A2))
respdig = KD(H(A1), noncebit)
else:
# XXX handle auth-int.
@@ -1291,8 +1291,8 @@ class AbstractHTTPHandler(BaseHandler):
sel_host = host
if request.has_proxy():
- scheme, sel = _splittype(request.selector)
- sel_host, sel_path = _splithost(sel)
+ scheme, sel = _splittype(request.selector)
+ sel_host, sel_path = _splithost(sel)
if not request.has_header('Host'):
request.add_unredirected_header('Host', sel_host)
for name, value in self.parent.addheaders:
@@ -1508,7 +1508,7 @@ class FileHandler(BaseHandler):
'Content-type: %s\nContent-length: %d\nLast-modified: %s\n' %
(mtype or 'text/plain', size, modified))
if host:
- host, port = _splitport(host)
+ host, port = _splitport(host)
if not host or \
(not port and _safe_gethostbyname(host) in self.get_names()):
if host:
@@ -1533,16 +1533,16 @@ class FTPHandler(BaseHandler):
host = req.host
if not host:
raise URLError('ftp error: no host given')
- host, port = _splitport(host)
+ host, port = _splitport(host)
if port is None:
port = ftplib.FTP_PORT
else:
port = int(port)
# username/password handling
- user, host = _splituser(host)
+ user, host = _splituser(host)
if user:
- user, passwd = _splitpasswd(user)
+ user, passwd = _splitpasswd(user)
else:
passwd = None
host = unquote(host)
@@ -1553,7 +1553,7 @@ class FTPHandler(BaseHandler):
host = socket.gethostbyname(host)
except OSError as msg:
raise URLError(msg)
- path, attrs = _splitattr(req.selector)
+ path, attrs = _splitattr(req.selector)
dirs = path.split('/')
dirs = list(map(unquote, dirs))
dirs, file = dirs[:-1], dirs[-1]
@@ -1563,7 +1563,7 @@ class FTPHandler(BaseHandler):
fw = self.connect_ftp(user, passwd, host, port, dirs, req.timeout)
type = file and 'I' or 'D'
for attr in attrs:
- attr, value = _splitvalue(attr)
+ attr, value = _splitvalue(attr)
if attr.lower() == 'type' and \
value in ('a', 'A', 'i', 'I', 'd', 'D'):
type = value.upper()
@@ -1757,26 +1757,26 @@ class URLopener:
# External interface
def open(self, fullurl, data=None):
"""Use URLopener().open(file) instead of open(file, 'r')."""
- fullurl = unwrap(_to_bytes(fullurl))
+ fullurl = unwrap(_to_bytes(fullurl))
fullurl = quote(fullurl, safe="%/:=&?~#+!$,;'@()*[]|")
if self.tempcache and fullurl in self.tempcache:
filename, headers = self.tempcache[fullurl]
fp = open(filename, 'rb')
return addinfourl(fp, headers, fullurl)
- urltype, url = _splittype(fullurl)
+ urltype, url = _splittype(fullurl)
if not urltype:
urltype = 'file'
if urltype in self.proxies:
proxy = self.proxies[urltype]
- urltype, proxyhost = _splittype(proxy)
- host, selector = _splithost(proxyhost)
+ urltype, proxyhost = _splittype(proxy)
+ host, selector = _splithost(proxyhost)
url = (host, fullurl) # Signal special case to open_*()
else:
proxy = None
name = 'open_' + urltype
self.type = urltype
name = name.replace('-', '_')
- if not hasattr(self, name) or name == 'open_local_file':
+ if not hasattr(self, name) or name == 'open_local_file':
if proxy:
return self.open_unknown_proxy(proxy, fullurl, data)
else:
@@ -1793,29 +1793,29 @@ class URLopener:
def open_unknown(self, fullurl, data=None):
"""Overridable interface to open unknown URL type."""
- type, url = _splittype(fullurl)
+ type, url = _splittype(fullurl)
raise OSError('url error', 'unknown url type', type)
def open_unknown_proxy(self, proxy, fullurl, data=None):
"""Overridable interface to open unknown URL type."""
- type, url = _splittype(fullurl)
+ type, url = _splittype(fullurl)
raise OSError('url error', 'invalid proxy for %s' % type, proxy)
# External interface
def retrieve(self, url, filename=None, reporthook=None, data=None):
"""retrieve(url) returns (filename, headers) for a local object
or (tempfilename, headers) for a remote object."""
- url = unwrap(_to_bytes(url))
+ url = unwrap(_to_bytes(url))
if self.tempcache and url in self.tempcache:
return self.tempcache[url]
- type, url1 = _splittype(url)
+ type, url1 = _splittype(url)
if filename is None and (not type or type == 'file'):
try:
fp = self.open_local_file(url1)
hdrs = fp.info()
fp.close()
- return url2pathname(_splithost(url1)[1]), hdrs
- except OSError:
+ return url2pathname(_splithost(url1)[1]), hdrs
+ except OSError:
pass
fp = self.open(url, data)
try:
@@ -1823,10 +1823,10 @@ class URLopener:
if filename:
tfp = open(filename, 'wb')
else:
- garbage, path = _splittype(url)
- garbage, path = _splithost(path or "")
- path, garbage = _splitquery(path or "")
- path, garbage = _splitattr(path or "")
+ garbage, path = _splittype(url)
+ garbage, path = _splithost(path or "")
+ path, garbage = _splitquery(path or "")
+ path, garbage = _splitattr(path or "")
suffix = os.path.splitext(path)[1]
(fd, filename) = tempfile.mkstemp(suffix)
self.__tempfiles.append(filename)
@@ -1883,25 +1883,25 @@ class URLopener:
user_passwd = None
proxy_passwd= None
if isinstance(url, str):
- host, selector = _splithost(url)
+ host, selector = _splithost(url)
if host:
- user_passwd, host = _splituser(host)
+ user_passwd, host = _splituser(host)
host = unquote(host)
realhost = host
else:
host, selector = url
# check whether the proxy contains authorization information
- proxy_passwd, host = _splituser(host)
+ proxy_passwd, host = _splituser(host)
# now we proceed with the url we want to obtain
- urltype, rest = _splittype(selector)
+ urltype, rest = _splittype(selector)
url = rest
user_passwd = None
if urltype.lower() != 'http':
realhost = None
else:
- realhost, rest = _splithost(rest)
+ realhost, rest = _splithost(rest)
if realhost:
- user_passwd, realhost = _splituser(realhost)
+ user_passwd, realhost = _splituser(realhost)
if user_passwd:
selector = "%s://%s%s" % (urltype, realhost, rest)
if proxy_bypass(realhost):
@@ -2007,7 +2007,7 @@ class URLopener:
"""Use local file."""
import email.utils
import mimetypes
- host, file = _splithost(url)
+ host, file = _splithost(url)
localname = url2pathname(file)
try:
stats = os.stat(localname)
@@ -2024,7 +2024,7 @@ class URLopener:
if file[:1] == '/':
urlfile = 'file://' + file
return addinfourl(open(localname, 'rb'), headers, urlfile)
- host, port = _splitport(host)
+ host, port = _splitport(host)
if (not port
and socket.gethostbyname(host) in ((localhost(),) + thishost())):
urlfile = file
@@ -2040,11 +2040,11 @@ class URLopener:
if not isinstance(url, str):
raise URLError('ftp error: proxy support for ftp protocol currently not implemented')
import mimetypes
- host, path = _splithost(url)
+ host, path = _splithost(url)
if not host: raise URLError('ftp error: no host given')
- host, port = _splitport(host)
- user, host = _splituser(host)
- if user: user, passwd = _splitpasswd(user)
+ host, port = _splitport(host)
+ user, host = _splituser(host)
+ if user: user, passwd = _splitpasswd(user)
else: passwd = None
host = unquote(host)
user = unquote(user or '')
@@ -2055,7 +2055,7 @@ class URLopener:
port = ftplib.FTP_PORT
else:
port = int(port)
- path, attrs = _splitattr(path)
+ path, attrs = _splitattr(path)
path = unquote(path)
dirs = path.split('/')
dirs, file = dirs[:-1], dirs[-1]
@@ -2077,7 +2077,7 @@ class URLopener:
if not file: type = 'D'
else: type = 'I'
for attr in attrs:
- attr, value = _splitvalue(attr)
+ attr, value = _splitvalue(attr)
if attr.lower() == 'type' and \
value in ('a', 'A', 'i', 'I', 'd', 'D'):
type = value.upper()
@@ -2260,11 +2260,11 @@ class FancyURLopener(URLopener):
return getattr(self,name)(url, realm, data)
def retry_proxy_http_basic_auth(self, url, realm, data=None):
- host, selector = _splithost(url)
+ host, selector = _splithost(url)
newurl = 'http://' + host + selector
proxy = self.proxies['http']
- urltype, proxyhost = _splittype(proxy)
- proxyhost, proxyselector = _splithost(proxyhost)
+ urltype, proxyhost = _splittype(proxy)
+ proxyhost, proxyselector = _splithost(proxyhost)
i = proxyhost.find('@') + 1
proxyhost = proxyhost[i:]
user, passwd = self.get_user_passwd(proxyhost, realm, i)
@@ -2278,11 +2278,11 @@ class FancyURLopener(URLopener):
return self.open(newurl, data)
def retry_proxy_https_basic_auth(self, url, realm, data=None):
- host, selector = _splithost(url)
+ host, selector = _splithost(url)
newurl = 'https://' + host + selector
proxy = self.proxies['https']
- urltype, proxyhost = _splittype(proxy)
- proxyhost, proxyselector = _splithost(proxyhost)
+ urltype, proxyhost = _splittype(proxy)
+ proxyhost, proxyselector = _splithost(proxyhost)
i = proxyhost.find('@') + 1
proxyhost = proxyhost[i:]
user, passwd = self.get_user_passwd(proxyhost, realm, i)
@@ -2296,7 +2296,7 @@ class FancyURLopener(URLopener):
return self.open(newurl, data)
def retry_http_basic_auth(self, url, realm, data=None):
- host, selector = _splithost(url)
+ host, selector = _splithost(url)
i = host.find('@') + 1
host = host[i:]
user, passwd = self.get_user_passwd(host, realm, i)
@@ -2310,7 +2310,7 @@ class FancyURLopener(URLopener):
return self.open(newurl, data)
def retry_https_basic_auth(self, url, realm, data=None):
- host, selector = _splithost(url)
+ host, selector = _splithost(url)
i = host.find('@') + 1
host = host[i:]
user, passwd = self.get_user_passwd(host, realm, i)
@@ -2527,26 +2527,26 @@ def proxy_bypass_environment(host, proxies=None):
try:
no_proxy = proxies['no']
except KeyError:
- return False
+ return False
# '*' is special case for always bypass
if no_proxy == '*':
- return True
- host = host.lower()
+ return True
+ host = host.lower()
# strip port off host
- hostonly, port = _splitport(host)
+ hostonly, port = _splitport(host)
# check if the host ends with any of the DNS suffixes
- for name in no_proxy.split(','):
- name = name.strip()
+ for name in no_proxy.split(','):
+ name = name.strip()
if name:
name = name.lstrip('.') # ignore leading dots
- name = name.lower()
- if hostonly == name or host == name:
- return True
- name = '.' + name
- if hostonly.endswith(name) or host.endswith(name):
- return True
+ name = name.lower()
+ if hostonly == name or host == name:
+ return True
+ name = '.' + name
+ if hostonly.endswith(name) or host.endswith(name):
+ return True
# otherwise, don't bypass
- return False
+ return False
# This code tests an OSX specific data structure but is testable on all
@@ -2565,7 +2565,7 @@ def _proxy_bypass_macosx_sysconf(host, proxy_settings):
"""
from fnmatch import fnmatch
- hostonly, port = _splitport(host)
+ hostonly, port = _splitport(host)
def ip2num(ipAddr):
parts = ipAddr.split('.')
@@ -2600,11 +2600,11 @@ def _proxy_bypass_macosx_sysconf(host, proxy_settings):
mask = 8 * (m.group(1).count('.') + 1)
else:
mask = int(mask[1:])
-
- if mask < 0 or mask > 32:
- # System libraries ignore invalid prefix lengths
- continue
-
+
+ if mask < 0 or mask > 32:
+ # System libraries ignore invalid prefix lengths
+ continue
+
mask = 32 - mask
if (hostIP >> mask) == (base >> mask):
@@ -2677,7 +2677,7 @@ elif os.name == 'nt':
for p in proxyServer.split(';'):
protocol, address = p.split('=', 1)
# See if address has a type:// prefix
- if not re.match('(?:[^/:]+)://', address):
+ if not re.match('(?:[^/:]+)://', address):
address = '%s://%s' % (protocol, address)
proxies[protocol] = address
else:
@@ -2724,7 +2724,7 @@ elif os.name == 'nt':
if not proxyEnable or not proxyOverride:
return 0
# try to make a host list from name and IP address.
- rawHost, port = _splitport(host)
+ rawHost, port = _splitport(host)
host = [rawHost]
try:
addr = socket.gethostbyname(rawHost)
diff --git a/contrib/tools/python3/src/Lib/urllib/response.py b/contrib/tools/python3/src/Lib/urllib/response.py
index 5a2c3cc78c..aab657304c 100644
--- a/contrib/tools/python3/src/Lib/urllib/response.py
+++ b/contrib/tools/python3/src/Lib/urllib/response.py
@@ -73,10 +73,10 @@ class addinfourl(addinfo):
self.url = url
self.code = code
- @property
- def status(self):
- return self.code
-
+ @property
+ def status(self):
+ return self.code
+
def getcode(self):
return self.code
diff --git a/contrib/tools/python3/src/Lib/urllib/robotparser.py b/contrib/tools/python3/src/Lib/urllib/robotparser.py
index c58565e394..71f74f4dc0 100644
--- a/contrib/tools/python3/src/Lib/urllib/robotparser.py
+++ b/contrib/tools/python3/src/Lib/urllib/robotparser.py
@@ -27,7 +27,7 @@ class RobotFileParser:
def __init__(self, url=''):
self.entries = []
- self.sitemaps = []
+ self.sitemaps = []
self.default_entry = None
self.disallow_all = False
self.allow_all = False
@@ -142,12 +142,12 @@ class RobotFileParser:
and numbers[1].strip().isdigit()):
entry.req_rate = RequestRate(int(numbers[0]), int(numbers[1]))
state = 2
- elif line[0] == "sitemap":
- # According to http://www.sitemaps.org/protocol.html
- # "This directive is independent of the user-agent line,
- # so it doesn't matter where you place it in your file."
- # Therefore we do not change the state of the parser.
- self.sitemaps.append(line[1])
+ elif line[0] == "sitemap":
+ # According to http://www.sitemaps.org/protocol.html
+ # "This directive is independent of the user-agent line,
+ # so it doesn't matter where you place it in your file."
+ # Therefore we do not change the state of the parser.
+ self.sitemaps.append(line[1])
if state == 2:
self._add_entry(entry)
@@ -186,9 +186,9 @@ class RobotFileParser:
for entry in self.entries:
if entry.applies_to(useragent):
return entry.delay
- if self.default_entry:
- return self.default_entry.delay
- return None
+ if self.default_entry:
+ return self.default_entry.delay
+ return None
def request_rate(self, useragent):
if not self.mtime():
@@ -196,20 +196,20 @@ class RobotFileParser:
for entry in self.entries:
if entry.applies_to(useragent):
return entry.req_rate
- if self.default_entry:
- return self.default_entry.req_rate
- return None
-
- def site_maps(self):
- if not self.sitemaps:
- return None
- return self.sitemaps
-
+ if self.default_entry:
+ return self.default_entry.req_rate
+ return None
+
+ def site_maps(self):
+ if not self.sitemaps:
+ return None
+ return self.sitemaps
+
def __str__(self):
entries = self.entries
if self.default_entry is not None:
entries = entries + [self.default_entry]
- return '\n\n'.join(map(str, entries))
+ return '\n\n'.join(map(str, entries))
class RuleLine:
diff --git a/contrib/tools/python3/src/Lib/uu.py b/contrib/tools/python3/src/Lib/uu.py
index 9f1f37f1a6..b7874a13da 100644
--- a/contrib/tools/python3/src/Lib/uu.py
+++ b/contrib/tools/python3/src/Lib/uu.py
@@ -73,14 +73,14 @@ def encode(in_file, out_file, name=None, mode=None, *, backtick=False):
name = '-'
if mode is None:
mode = 0o666
-
- #
- # Remove newline chars from name
- #
- name = name.replace('\n','\\n')
- name = name.replace('\r','\\r')
-
+
#
+ # Remove newline chars from name
+ #
+ name = name.replace('\n','\\n')
+ name = name.replace('\r','\\r')
+
+ #
# Write the data
#
out_file.write(('begin %o %s\n' % ((mode & 0o777), name)).encode("ascii"))
diff --git a/contrib/tools/python3/src/Lib/uuid.py b/contrib/tools/python3/src/Lib/uuid.py
index 5ae0a3e5fa..1ef891ce6f 100644
--- a/contrib/tools/python3/src/Lib/uuid.py
+++ b/contrib/tools/python3/src/Lib/uuid.py
@@ -52,21 +52,21 @@ from enum import Enum
__author__ = 'Ka-Ping Yee <ping@zesty.ca>'
-# The recognized platforms - known behaviors
-if sys.platform in ('win32', 'darwin'):
- _AIX = _LINUX = False
-else:
- import platform
- _platform_system = platform.system()
- _AIX = _platform_system == 'AIX'
- _LINUX = _platform_system == 'Linux'
-
-_MAC_DELIM = b':'
-_MAC_OMITS_LEADING_ZEROES = False
-if _AIX:
- _MAC_DELIM = b'.'
- _MAC_OMITS_LEADING_ZEROES = True
-
+# The recognized platforms - known behaviors
+if sys.platform in ('win32', 'darwin'):
+ _AIX = _LINUX = False
+else:
+ import platform
+ _platform_system = platform.system()
+ _AIX = _platform_system == 'AIX'
+ _LINUX = _platform_system == 'Linux'
+
+_MAC_DELIM = b':'
+_MAC_OMITS_LEADING_ZEROES = False
+if _AIX:
+ _MAC_DELIM = b'.'
+ _MAC_OMITS_LEADING_ZEROES = True
+
RESERVED_NCS, RFC_4122, RESERVED_MICROSOFT, RESERVED_FUTURE = [
'reserved for NCS compatibility', 'specified in RFC 4122',
'reserved for Microsoft compatibility', 'reserved for future definition']
@@ -133,8 +133,8 @@ class UUID:
uuid_generate_time_safe(3).
"""
- __slots__ = ('int', 'is_safe', '__weakref__')
-
+ __slots__ = ('int', 'is_safe', '__weakref__')
+
def __init__(self, hex=None, bytes=None, bytes_le=None, fields=None,
int=None, version=None,
*, is_safe=SafeUUID.unknown):
@@ -218,23 +218,23 @@ class UUID:
# Set the version number.
int &= ~(0xf000 << 64)
int |= version << 76
- object.__setattr__(self, 'int', int)
- object.__setattr__(self, 'is_safe', is_safe)
+ object.__setattr__(self, 'int', int)
+ object.__setattr__(self, 'is_safe', is_safe)
def __getstate__(self):
- d = {'int': self.int}
+ d = {'int': self.int}
if self.is_safe != SafeUUID.unknown:
# is_safe is a SafeUUID instance. Return just its value, so that
# it can be un-pickled in older Python versions without SafeUUID.
- d['is_safe'] = self.is_safe.value
- return d
+ d['is_safe'] = self.is_safe.value
+ return d
def __setstate__(self, state):
- object.__setattr__(self, 'int', state['int'])
+ object.__setattr__(self, 'int', state['int'])
# is_safe was added in 3.7; it is also omitted when it is "unknown"
- object.__setattr__(self, 'is_safe',
- SafeUUID(state['is_safe'])
- if 'is_safe' in state else SafeUUID.unknown)
+ object.__setattr__(self, 'is_safe',
+ SafeUUID(state['is_safe'])
+ if 'is_safe' in state else SafeUUID.unknown)
def __eq__(self, other):
if isinstance(other, UUID):
@@ -355,33 +355,33 @@ class UUID:
if self.variant == RFC_4122:
return int((self.int >> 76) & 0xf)
-
-def _get_command_stdout(command, *args):
- import io, os, shutil, subprocess
-
- try:
- path_dirs = os.environ.get('PATH', os.defpath).split(os.pathsep)
- path_dirs.extend(['/sbin', '/usr/sbin'])
- executable = shutil.which(command, path=os.pathsep.join(path_dirs))
+
+def _get_command_stdout(command, *args):
+ import io, os, shutil, subprocess
+
+ try:
+ path_dirs = os.environ.get('PATH', os.defpath).split(os.pathsep)
+ path_dirs.extend(['/sbin', '/usr/sbin'])
+ executable = shutil.which(command, path=os.pathsep.join(path_dirs))
if executable is None:
return None
- # LC_ALL=C to ensure English output, stderr=DEVNULL to prevent output
- # on stderr (Note: we don't have an example where the words we search
- # for are actually localized, but in theory some system could do so.)
- env = dict(os.environ)
- env['LC_ALL'] = 'C'
- proc = subprocess.Popen((executable,) + args,
- stdout=subprocess.PIPE,
- stderr=subprocess.DEVNULL,
- env=env)
- if not proc:
- return None
- stdout, stderr = proc.communicate()
- return io.BytesIO(stdout)
- except (OSError, subprocess.SubprocessError):
- return None
-
-
+ # LC_ALL=C to ensure English output, stderr=DEVNULL to prevent output
+ # on stderr (Note: we don't have an example where the words we search
+ # for are actually localized, but in theory some system could do so.)
+ env = dict(os.environ)
+ env['LC_ALL'] = 'C'
+ proc = subprocess.Popen((executable,) + args,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.DEVNULL,
+ env=env)
+ if not proc:
+ return None
+ stdout, stderr = proc.communicate()
+ return io.BytesIO(stdout)
+ except (OSError, subprocess.SubprocessError):
+ return None
+
+
# For MAC (a.k.a. IEEE 802, or EUI-48) addresses, the second least significant
# bit of the first octet signifies whether the MAC address is universally (0)
# or locally (1) administered. Network cards from hardware manufacturers will
@@ -400,114 +400,114 @@ def _get_command_stdout(command, *args):
def _is_universal(mac):
return not (mac & (1 << 41))
-
-def _find_mac_near_keyword(command, args, keywords, get_word_index):
- """Searches a command's output for a MAC address near a keyword.
-
- Each line of words in the output is case-insensitively searched for
- any of the given keywords. Upon a match, get_word_index is invoked
- to pick a word from the line, given the index of the match. For
- example, lambda i: 0 would get the first word on the line, while
- lambda i: i - 1 would get the word preceding the keyword.
- """
- stdout = _get_command_stdout(command, args)
- if stdout is None:
- return None
-
+
+def _find_mac_near_keyword(command, args, keywords, get_word_index):
+ """Searches a command's output for a MAC address near a keyword.
+
+ Each line of words in the output is case-insensitively searched for
+ any of the given keywords. Upon a match, get_word_index is invoked
+ to pick a word from the line, given the index of the match. For
+ example, lambda i: 0 would get the first word on the line, while
+ lambda i: i - 1 would get the word preceding the keyword.
+ """
+ stdout = _get_command_stdout(command, args)
+ if stdout is None:
+ return None
+
first_local_mac = None
- for line in stdout:
- words = line.lower().rstrip().split()
- for i in range(len(words)):
- if words[i] in keywords:
- try:
- word = words[get_word_index(i)]
- mac = int(word.replace(_MAC_DELIM, b''), 16)
- except (ValueError, IndexError):
- # Virtual interfaces, such as those provided by
- # VPNs, do not have a colon-delimited MAC address
- # as expected, but a 16-byte HWAddr separated by
- # dashes. These should be ignored in favor of a
- # real MAC address
- pass
- else:
- if _is_universal(mac):
- return mac
- first_local_mac = first_local_mac or mac
+ for line in stdout:
+ words = line.lower().rstrip().split()
+ for i in range(len(words)):
+ if words[i] in keywords:
+ try:
+ word = words[get_word_index(i)]
+ mac = int(word.replace(_MAC_DELIM, b''), 16)
+ except (ValueError, IndexError):
+ # Virtual interfaces, such as those provided by
+ # VPNs, do not have a colon-delimited MAC address
+ # as expected, but a 16-byte HWAddr separated by
+ # dashes. These should be ignored in favor of a
+ # real MAC address
+ pass
+ else:
+ if _is_universal(mac):
+ return mac
+ first_local_mac = first_local_mac or mac
return first_local_mac or None
-
-def _parse_mac(word):
- # Accept 'HH:HH:HH:HH:HH:HH' MAC address (ex: '52:54:00:9d:0e:67'),
- # but reject IPv6 address (ex: 'fe80::5054:ff:fe9' or '123:2:3:4:5:6:7:8').
- #
- # Virtual interfaces, such as those provided by VPNs, do not have a
- # colon-delimited MAC address as expected, but a 16-byte HWAddr separated
- # by dashes. These should be ignored in favor of a real MAC address
- parts = word.split(_MAC_DELIM)
- if len(parts) != 6:
- return
- if _MAC_OMITS_LEADING_ZEROES:
- # (Only) on AIX the macaddr value given is not prefixed by 0, e.g.
- # en0 1500 link#2 fa.bc.de.f7.62.4 110854824 0 160133733 0 0
- # not
- # en0 1500 link#2 fa.bc.de.f7.62.04 110854824 0 160133733 0 0
- if not all(1 <= len(part) <= 2 for part in parts):
- return
- hexstr = b''.join(part.rjust(2, b'0') for part in parts)
- else:
- if not all(len(part) == 2 for part in parts):
- return
- hexstr = b''.join(parts)
- try:
- return int(hexstr, 16)
- except ValueError:
- return
-
-
-def _find_mac_under_heading(command, args, heading):
- """Looks for a MAC address under a heading in a command's output.
-
- The first line of words in the output is searched for the given
- heading. Words at the same word index as the heading in subsequent
- lines are then examined to see if they look like MAC addresses.
- """
- stdout = _get_command_stdout(command, args)
- if stdout is None:
- return None
-
- keywords = stdout.readline().rstrip().split()
- try:
- column_index = keywords.index(heading)
- except ValueError:
- return None
-
- first_local_mac = None
- for line in stdout:
- words = line.rstrip().split()
- try:
- word = words[column_index]
- except IndexError:
- continue
-
- mac = _parse_mac(word)
- if mac is None:
- continue
- if _is_universal(mac):
- return mac
- if first_local_mac is None:
- first_local_mac = mac
-
- return first_local_mac
-
-
-# The following functions call external programs to 'get' a macaddr value to
-# be used as basis for an uuid
+
+def _parse_mac(word):
+ # Accept 'HH:HH:HH:HH:HH:HH' MAC address (ex: '52:54:00:9d:0e:67'),
+ # but reject IPv6 address (ex: 'fe80::5054:ff:fe9' or '123:2:3:4:5:6:7:8').
+ #
+ # Virtual interfaces, such as those provided by VPNs, do not have a
+ # colon-delimited MAC address as expected, but a 16-byte HWAddr separated
+ # by dashes. These should be ignored in favor of a real MAC address
+ parts = word.split(_MAC_DELIM)
+ if len(parts) != 6:
+ return
+ if _MAC_OMITS_LEADING_ZEROES:
+ # (Only) on AIX the macaddr value given is not prefixed by 0, e.g.
+ # en0 1500 link#2 fa.bc.de.f7.62.4 110854824 0 160133733 0 0
+ # not
+ # en0 1500 link#2 fa.bc.de.f7.62.04 110854824 0 160133733 0 0
+ if not all(1 <= len(part) <= 2 for part in parts):
+ return
+ hexstr = b''.join(part.rjust(2, b'0') for part in parts)
+ else:
+ if not all(len(part) == 2 for part in parts):
+ return
+ hexstr = b''.join(parts)
+ try:
+ return int(hexstr, 16)
+ except ValueError:
+ return
+
+
+def _find_mac_under_heading(command, args, heading):
+ """Looks for a MAC address under a heading in a command's output.
+
+ The first line of words in the output is searched for the given
+ heading. Words at the same word index as the heading in subsequent
+ lines are then examined to see if they look like MAC addresses.
+ """
+ stdout = _get_command_stdout(command, args)
+ if stdout is None:
+ return None
+
+ keywords = stdout.readline().rstrip().split()
+ try:
+ column_index = keywords.index(heading)
+ except ValueError:
+ return None
+
+ first_local_mac = None
+ for line in stdout:
+ words = line.rstrip().split()
+ try:
+ word = words[column_index]
+ except IndexError:
+ continue
+
+ mac = _parse_mac(word)
+ if mac is None:
+ continue
+ if _is_universal(mac):
+ return mac
+ if first_local_mac is None:
+ first_local_mac = mac
+
+ return first_local_mac
+
+
+# The following functions call external programs to 'get' a macaddr value to
+# be used as basis for an uuid
def _ifconfig_getnode():
"""Get the hardware address on Unix by running ifconfig."""
# This works on Linux ('' or '-a'), Tru64 ('-av'), but not all Unixes.
keywords = (b'hwaddr', b'ether', b'address:', b'lladdr')
for args in ('', '-a', '-av'):
- mac = _find_mac_near_keyword('ifconfig', args, keywords, lambda i: i+1)
+ mac = _find_mac_near_keyword('ifconfig', args, keywords, lambda i: i+1)
if mac:
return mac
return None
@@ -515,7 +515,7 @@ def _ifconfig_getnode():
def _ip_getnode():
"""Get the hardware address on Unix by running ip."""
# This works on Linux with iproute2.
- mac = _find_mac_near_keyword('ip', 'link', [b'link/ether'], lambda i: i+1)
+ mac = _find_mac_near_keyword('ip', 'link', [b'link/ether'], lambda i: i+1)
if mac:
return mac
return None
@@ -529,17 +529,17 @@ def _arp_getnode():
return None
# Try getting the MAC addr from arp based on our IP address (Solaris).
- mac = _find_mac_near_keyword('arp', '-an', [os.fsencode(ip_addr)], lambda i: -1)
+ mac = _find_mac_near_keyword('arp', '-an', [os.fsencode(ip_addr)], lambda i: -1)
if mac:
return mac
# This works on OpenBSD
- mac = _find_mac_near_keyword('arp', '-an', [os.fsencode(ip_addr)], lambda i: i+1)
+ mac = _find_mac_near_keyword('arp', '-an', [os.fsencode(ip_addr)], lambda i: i+1)
if mac:
return mac
# This works on Linux, FreeBSD and NetBSD
- mac = _find_mac_near_keyword('arp', '-an', [os.fsencode('(%s)' % ip_addr)],
+ mac = _find_mac_near_keyword('arp', '-an', [os.fsencode('(%s)' % ip_addr)],
lambda i: i+2)
# Return None instead of 0.
if mac:
@@ -549,52 +549,52 @@ def _arp_getnode():
def _lanscan_getnode():
"""Get the hardware address on Unix by running lanscan."""
# This might work on HP-UX.
- return _find_mac_near_keyword('lanscan', '-ai', [b'lan0'], lambda i: 0)
+ return _find_mac_near_keyword('lanscan', '-ai', [b'lan0'], lambda i: 0)
def _netstat_getnode():
"""Get the hardware address on Unix by running netstat."""
- # This works on AIX and might work on Tru64 UNIX.
- return _find_mac_under_heading('netstat', '-ian', b'Address')
+ # This works on AIX and might work on Tru64 UNIX.
+ return _find_mac_under_heading('netstat', '-ian', b'Address')
def _ipconfig_getnode():
- """[DEPRECATED] Get the hardware address on Windows."""
- # bpo-40501: UuidCreateSequential() is now the only supported approach
- return _windll_getnode()
+ """[DEPRECATED] Get the hardware address on Windows."""
+ # bpo-40501: UuidCreateSequential() is now the only supported approach
+ return _windll_getnode()
def _netbios_getnode():
- """[DEPRECATED] Get the hardware address on Windows."""
- # bpo-40501: UuidCreateSequential() is now the only supported approach
- return _windll_getnode()
+ """[DEPRECATED] Get the hardware address on Windows."""
+ # bpo-40501: UuidCreateSequential() is now the only supported approach
+ return _windll_getnode()
# Import optional C extension at toplevel, to help disabling it when testing
try:
import _uuid
- _generate_time_safe = getattr(_uuid, "generate_time_safe", None)
- _UuidCreate = getattr(_uuid, "UuidCreate", None)
- _has_uuid_generate_time_safe = _uuid.has_uuid_generate_time_safe
+ _generate_time_safe = getattr(_uuid, "generate_time_safe", None)
+ _UuidCreate = getattr(_uuid, "UuidCreate", None)
+ _has_uuid_generate_time_safe = _uuid.has_uuid_generate_time_safe
except ImportError:
_uuid = None
- _generate_time_safe = None
- _UuidCreate = None
- _has_uuid_generate_time_safe = None
+ _generate_time_safe = None
+ _UuidCreate = None
+ _has_uuid_generate_time_safe = None
def _load_system_functions():
- """[DEPRECATED] Platform-specific functions loaded at import time"""
+ """[DEPRECATED] Platform-specific functions loaded at import time"""
def _unix_getnode():
- """Get the hardware address on Unix using the _uuid extension module."""
- if _generate_time_safe:
- uuid_time, _ = _generate_time_safe()
- return UUID(bytes=uuid_time).node
+ """Get the hardware address on Unix using the _uuid extension module."""
+ if _generate_time_safe:
+ uuid_time, _ = _generate_time_safe()
+ return UUID(bytes=uuid_time).node
def _windll_getnode():
- """Get the hardware address on Windows using the _uuid extension module."""
- if _UuidCreate:
- uuid_bytes = _UuidCreate()
- return UUID(bytes_le=uuid_bytes).node
+ """Get the hardware address on Windows using the _uuid extension module."""
+ if _UuidCreate:
+ uuid_bytes = _UuidCreate()
+ return UUID(bytes_le=uuid_bytes).node
def _random_getnode():
"""Get a random node ID."""
@@ -612,34 +612,34 @@ def _random_getnode():
return random.getrandbits(48) | (1 << 40)
-# _OS_GETTERS, when known, are targeted for a specific OS or platform.
-# The order is by 'common practice' on the specified platform.
-# Note: 'posix' and 'windows' _OS_GETTERS are prefixed by a dll/dlload() method
-# which, when successful, means none of these "external" methods are called.
-# _GETTERS is (also) used by test_uuid.py to SkipUnless(), e.g.,
-# @unittest.skipUnless(_uuid._ifconfig_getnode in _uuid._GETTERS, ...)
-if _LINUX:
- _OS_GETTERS = [_ip_getnode, _ifconfig_getnode]
-elif sys.platform == 'darwin':
- _OS_GETTERS = [_ifconfig_getnode, _arp_getnode, _netstat_getnode]
-elif sys.platform == 'win32':
- # bpo-40201: _windll_getnode will always succeed, so these are not needed
- _OS_GETTERS = []
-elif _AIX:
- _OS_GETTERS = [_netstat_getnode]
-else:
- _OS_GETTERS = [_ifconfig_getnode, _ip_getnode, _arp_getnode,
- _netstat_getnode, _lanscan_getnode]
-if os.name == 'posix':
- _GETTERS = [_unix_getnode] + _OS_GETTERS
-elif os.name == 'nt':
- _GETTERS = [_windll_getnode] + _OS_GETTERS
-else:
- _GETTERS = _OS_GETTERS
-
+# _OS_GETTERS, when known, are targeted for a specific OS or platform.
+# The order is by 'common practice' on the specified platform.
+# Note: 'posix' and 'windows' _OS_GETTERS are prefixed by a dll/dlload() method
+# which, when successful, means none of these "external" methods are called.
+# _GETTERS is (also) used by test_uuid.py to SkipUnless(), e.g.,
+# @unittest.skipUnless(_uuid._ifconfig_getnode in _uuid._GETTERS, ...)
+if _LINUX:
+ _OS_GETTERS = [_ip_getnode, _ifconfig_getnode]
+elif sys.platform == 'darwin':
+ _OS_GETTERS = [_ifconfig_getnode, _arp_getnode, _netstat_getnode]
+elif sys.platform == 'win32':
+ # bpo-40201: _windll_getnode will always succeed, so these are not needed
+ _OS_GETTERS = []
+elif _AIX:
+ _OS_GETTERS = [_netstat_getnode]
+else:
+ _OS_GETTERS = [_ifconfig_getnode, _ip_getnode, _arp_getnode,
+ _netstat_getnode, _lanscan_getnode]
+if os.name == 'posix':
+ _GETTERS = [_unix_getnode] + _OS_GETTERS
+elif os.name == 'nt':
+ _GETTERS = [_windll_getnode] + _OS_GETTERS
+else:
+ _GETTERS = _OS_GETTERS
+
_node = None
-def getnode():
+def getnode():
"""Get the hardware address as a 48-bit positive integer.
The first time this runs, it may launch a separate program, which could
@@ -651,7 +651,7 @@ def getnode():
if _node is not None:
return _node
- for getter in _GETTERS + [_random_getnode]:
+ for getter in _GETTERS + [_random_getnode]:
try:
_node = getter()
except:
@@ -681,10 +681,10 @@ def uuid1(node=None, clock_seq=None):
global _last_timestamp
import time
- nanoseconds = time.time_ns()
+ nanoseconds = time.time_ns()
# 0x01b21dd213814000 is the number of 100-ns intervals between the
# UUID epoch 1582-10-15 00:00:00 and the Unix epoch 1970-01-01 00:00:00.
- timestamp = nanoseconds // 100 + 0x01b21dd213814000
+ timestamp = nanoseconds // 100 + 0x01b21dd213814000
if _last_timestamp is not None and timestamp <= _last_timestamp:
timestamp = _last_timestamp + 1
_last_timestamp = timestamp
@@ -704,11 +704,11 @@ def uuid1(node=None, clock_seq=None):
def uuid3(namespace, name):
"""Generate a UUID from the MD5 hash of a namespace UUID and a name."""
from hashlib import md5
- digest = md5(
- namespace.bytes + bytes(name, "utf-8"),
- usedforsecurity=False
- ).digest()
- return UUID(bytes=digest[:16], version=3)
+ digest = md5(
+ namespace.bytes + bytes(name, "utf-8"),
+ usedforsecurity=False
+ ).digest()
+ return UUID(bytes=digest[:16], version=3)
def uuid4():
"""Generate a random UUID."""
diff --git a/contrib/tools/python3/src/Lib/venv/__init__.py b/contrib/tools/python3/src/Lib/venv/__init__.py
index 6f1af294ae..2e8e7d4341 100644
--- a/contrib/tools/python3/src/Lib/venv/__init__.py
+++ b/contrib/tools/python3/src/Lib/venv/__init__.py
@@ -12,8 +12,8 @@ import sys
import sysconfig
import types
-
-CORE_VENV_DEPS = ('pip', 'setuptools')
+
+CORE_VENV_DEPS = ('pip', 'setuptools')
logger = logging.getLogger(__name__)
@@ -40,21 +40,21 @@ class EnvBuilder:
:param with_pip: If True, ensure pip is installed in the virtual
environment
:param prompt: Alternative terminal prefix for the environment.
- :param upgrade_deps: Update the base venv modules to the latest on PyPI
+ :param upgrade_deps: Update the base venv modules to the latest on PyPI
"""
def __init__(self, system_site_packages=False, clear=False,
- symlinks=False, upgrade=False, with_pip=False, prompt=None,
- upgrade_deps=False):
+ symlinks=False, upgrade=False, with_pip=False, prompt=None,
+ upgrade_deps=False):
self.system_site_packages = system_site_packages
self.clear = clear
self.symlinks = symlinks
self.upgrade = upgrade
self.with_pip = with_pip
- if prompt == '.': # see bpo-38901
- prompt = os.path.basename(os.getcwd())
+ if prompt == '.': # see bpo-38901
+ prompt = os.path.basename(os.getcwd())
self.prompt = prompt
- self.upgrade_deps = upgrade_deps
+ self.upgrade_deps = upgrade_deps
def create(self, env_dir):
"""
@@ -81,8 +81,8 @@ class EnvBuilder:
# restore it and rewrite the configuration
self.system_site_packages = True
self.create_configuration(context)
- if self.upgrade_deps:
- self.upgrade_dependencies(context)
+ if self.upgrade_deps:
+ self.upgrade_dependencies(context)
def clear_directory(self, path):
for fn in os.listdir(path):
@@ -114,7 +114,7 @@ class EnvBuilder:
prompt = self.prompt if self.prompt is not None else context.env_name
context.prompt = '(%s) ' % prompt
create_if_needed(env_dir)
- executable = sys._base_executable
+ executable = sys._base_executable
dirname, exename = os.path.split(os.path.abspath(executable))
context.executable = executable
context.python_dir = dirname
@@ -142,20 +142,20 @@ class EnvBuilder:
context.bin_name = binname
context.env_exe = os.path.join(binpath, exename)
create_if_needed(binpath)
- # Assign and update the command to use when launching the newly created
- # environment, in case it isn't simply the executable script (e.g. bpo-45337)
- context.env_exec_cmd = context.env_exe
- if sys.platform == 'win32':
- # bpo-45337: Fix up env_exec_cmd to account for file system redirections.
- # Some redirects only apply to CreateFile and not CreateProcess
- real_env_exe = os.path.realpath(context.env_exe)
- if os.path.normcase(real_env_exe) != os.path.normcase(context.env_exe):
- logger.warning('Actual environment location may have moved due to '
- 'redirects, links or junctions.\n'
- ' Requested location: "%s"\n'
- ' Actual location: "%s"',
- context.env_exe, real_env_exe)
- context.env_exec_cmd = real_env_exe
+ # Assign and update the command to use when launching the newly created
+ # environment, in case it isn't simply the executable script (e.g. bpo-45337)
+ context.env_exec_cmd = context.env_exe
+ if sys.platform == 'win32':
+ # bpo-45337: Fix up env_exec_cmd to account for file system redirections.
+ # Some redirects only apply to CreateFile and not CreateProcess
+ real_env_exe = os.path.realpath(context.env_exe)
+ if os.path.normcase(real_env_exe) != os.path.normcase(context.env_exe):
+ logger.warning('Actual environment location may have moved due to '
+ 'redirects, links or junctions.\n'
+ ' Requested location: "%s"\n'
+ ' Actual location: "%s"',
+ context.env_exe, real_env_exe)
+ context.env_exec_cmd = real_env_exe
return context
def create_configuration(self, context):
@@ -176,69 +176,69 @@ class EnvBuilder:
incl = 'false'
f.write('include-system-site-packages = %s\n' % incl)
f.write('version = %d.%d.%d\n' % sys.version_info[:3])
- if self.prompt is not None:
- f.write(f'prompt = {self.prompt!r}\n')
-
- if os.name != 'nt':
- def symlink_or_copy(self, src, dst, relative_symlinks_ok=False):
- """
- Try symlinking a file, and if that fails, fall back to copying.
- """
- force_copy = not self.symlinks
- if not force_copy:
- try:
- if not os.path.islink(dst): # can't link to itself!
- if relative_symlinks_ok:
- assert os.path.dirname(src) == os.path.dirname(dst)
- os.symlink(os.path.basename(src), dst)
- else:
- os.symlink(src, dst)
- except Exception: # may need to use a more specific exception
- logger.warning('Unable to symlink %r to %r', src, dst)
- force_copy = True
- if force_copy:
- shutil.copyfile(src, dst)
- else:
- def symlink_or_copy(self, src, dst, relative_symlinks_ok=False):
- """
- Try symlinking a file, and if that fails, fall back to copying.
- """
- bad_src = os.path.lexists(src) and not os.path.exists(src)
- if self.symlinks and not bad_src and not os.path.islink(dst):
- try:
+ if self.prompt is not None:
+ f.write(f'prompt = {self.prompt!r}\n')
+
+ if os.name != 'nt':
+ def symlink_or_copy(self, src, dst, relative_symlinks_ok=False):
+ """
+ Try symlinking a file, and if that fails, fall back to copying.
+ """
+ force_copy = not self.symlinks
+ if not force_copy:
+ try:
+ if not os.path.islink(dst): # can't link to itself!
+ if relative_symlinks_ok:
+ assert os.path.dirname(src) == os.path.dirname(dst)
+ os.symlink(os.path.basename(src), dst)
+ else:
+ os.symlink(src, dst)
+ except Exception: # may need to use a more specific exception
+ logger.warning('Unable to symlink %r to %r', src, dst)
+ force_copy = True
+ if force_copy:
+ shutil.copyfile(src, dst)
+ else:
+ def symlink_or_copy(self, src, dst, relative_symlinks_ok=False):
+ """
+ Try symlinking a file, and if that fails, fall back to copying.
+ """
+ bad_src = os.path.lexists(src) and not os.path.exists(src)
+ if self.symlinks and not bad_src and not os.path.islink(dst):
+ try:
if relative_symlinks_ok:
assert os.path.dirname(src) == os.path.dirname(dst)
os.symlink(os.path.basename(src), dst)
else:
os.symlink(src, dst)
- return
- except Exception: # may need to use a more specific exception
- logger.warning('Unable to symlink %r to %r', src, dst)
-
- # On Windows, we rewrite symlinks to our base python.exe into
- # copies of venvlauncher.exe
- basename, ext = os.path.splitext(os.path.basename(src))
- srcfn = os.path.join(os.path.dirname(__file__),
- "scripts",
- "nt",
- basename + ext)
- # Builds or venv's from builds need to remap source file
- # locations, as we do not put them into Lib/venv/scripts
- if sysconfig.is_python_build(True) or not os.path.isfile(srcfn):
+ return
+ except Exception: # may need to use a more specific exception
+ logger.warning('Unable to symlink %r to %r', src, dst)
+
+ # On Windows, we rewrite symlinks to our base python.exe into
+ # copies of venvlauncher.exe
+ basename, ext = os.path.splitext(os.path.basename(src))
+ srcfn = os.path.join(os.path.dirname(__file__),
+ "scripts",
+ "nt",
+ basename + ext)
+ # Builds or venv's from builds need to remap source file
+ # locations, as we do not put them into Lib/venv/scripts
+ if sysconfig.is_python_build(True) or not os.path.isfile(srcfn):
if basename.endswith('_d'):
ext = '_d' + ext
basename = basename[:-2]
- if basename == 'python':
- basename = 'venvlauncher'
- elif basename == 'pythonw':
- basename = 'venvwlauncher'
- src = os.path.join(os.path.dirname(src), basename + ext)
- else:
- src = srcfn
- if not os.path.exists(src):
- if not bad_src:
- logger.warning('Unable to copy %r', src)
- return
+ if basename == 'python':
+ basename = 'venvlauncher'
+ elif basename == 'pythonw':
+ basename = 'venvwlauncher'
+ src = os.path.join(os.path.dirname(src), basename + ext)
+ else:
+ src = srcfn
+ if not os.path.exists(src):
+ if not bad_src:
+ logger.warning('Unable to copy %r', src)
+ return
shutil.copyfile(src, dst)
@@ -257,7 +257,7 @@ class EnvBuilder:
copier(context.executable, path)
if not os.path.islink(path):
os.chmod(path, 0o755)
- for suffix in ('python', 'python3', f'python3.{sys.version_info[1]}'):
+ for suffix in ('python', 'python3', f'python3.{sys.version_info[1]}'):
path = os.path.join(binpath, suffix)
if not os.path.exists(path):
# Issue 18807: make copies if
@@ -281,13 +281,13 @@ class EnvBuilder:
os.path.normcase(f).startswith(('python', 'vcruntime'))
]
else:
- suffixes = {'python.exe', 'python_d.exe', 'pythonw.exe', 'pythonw_d.exe'}
- base_exe = os.path.basename(context.env_exe)
- suffixes.add(base_exe)
+ suffixes = {'python.exe', 'python_d.exe', 'pythonw.exe', 'pythonw_d.exe'}
+ base_exe = os.path.basename(context.env_exe)
+ suffixes.add(base_exe)
for suffix in suffixes:
src = os.path.join(dirname, suffix)
- if os.path.lexists(src):
+ if os.path.lexists(src):
copier(src, os.path.join(binpath, suffix))
if sysconfig.is_python_build(True):
@@ -308,8 +308,8 @@ class EnvBuilder:
# We run ensurepip in isolated mode to avoid side effects from
# environment vars, the current directory and anything else
# intended for the global Python environment
- cmd = [context.env_exec_cmd, '-Im', 'ensurepip', '--upgrade',
- '--default-pip']
+ cmd = [context.env_exec_cmd, '-Im', 'ensurepip', '--upgrade',
+ '--default-pip']
subprocess.check_output(cmd, stderr=subprocess.STDOUT)
def setup_scripts(self, context):
@@ -405,21 +405,21 @@ class EnvBuilder:
f.write(data)
shutil.copymode(srcfile, dstfile)
- def upgrade_dependencies(self, context):
- logger.debug(
- f'Upgrading {CORE_VENV_DEPS} packages in {context.bin_path}'
- )
- cmd = [context.env_exec_cmd, '-m', 'pip', 'install', '--upgrade']
- cmd.extend(CORE_VENV_DEPS)
- subprocess.check_call(cmd)
-
+ def upgrade_dependencies(self, context):
+ logger.debug(
+ f'Upgrading {CORE_VENV_DEPS} packages in {context.bin_path}'
+ )
+ cmd = [context.env_exec_cmd, '-m', 'pip', 'install', '--upgrade']
+ cmd.extend(CORE_VENV_DEPS)
+ subprocess.check_call(cmd)
+
def create(env_dir, system_site_packages=False, clear=False,
- symlinks=False, with_pip=False, prompt=None, upgrade_deps=False):
+ symlinks=False, with_pip=False, prompt=None, upgrade_deps=False):
"""Create a virtual environment in a directory."""
builder = EnvBuilder(system_site_packages=system_site_packages,
clear=clear, symlinks=symlinks, with_pip=with_pip,
- prompt=prompt, upgrade_deps=upgrade_deps)
+ prompt=prompt, upgrade_deps=upgrade_deps)
builder.create(env_dir)
def main(args=None):
@@ -482,11 +482,11 @@ def main(args=None):
parser.add_argument('--prompt',
help='Provides an alternative prompt prefix for '
'this environment.')
- parser.add_argument('--upgrade-deps', default=False, action='store_true',
- dest='upgrade_deps',
- help='Upgrade core dependencies: {} to the latest '
- 'version in PyPI'.format(
- ' '.join(CORE_VENV_DEPS)))
+ parser.add_argument('--upgrade-deps', default=False, action='store_true',
+ dest='upgrade_deps',
+ help='Upgrade core dependencies: {} to the latest '
+ 'version in PyPI'.format(
+ ' '.join(CORE_VENV_DEPS)))
options = parser.parse_args(args)
if options.upgrade and options.clear:
raise ValueError('you cannot supply --upgrade and --clear together.')
@@ -495,8 +495,8 @@ def main(args=None):
symlinks=options.symlinks,
upgrade=options.upgrade,
with_pip=options.with_pip,
- prompt=options.prompt,
- upgrade_deps=options.upgrade_deps)
+ prompt=options.prompt,
+ upgrade_deps=options.upgrade_deps)
for d in options.dirs:
builder.create(d)
diff --git a/contrib/tools/python3/src/Lib/warnings.py b/contrib/tools/python3/src/Lib/warnings.py
index 691ccddfa4..d00730209a 100644
--- a/contrib/tools/python3/src/Lib/warnings.py
+++ b/contrib/tools/python3/src/Lib/warnings.py
@@ -220,12 +220,12 @@ def _setoption(arg):
for s in parts]
action = _getaction(action)
category = _getcategory(category)
- if message or module:
- import re
- if message:
- message = re.escape(message)
+ if message or module:
+ import re
+ if message:
+ message = re.escape(message)
if module:
- module = re.escape(module) + r'\Z'
+ module = re.escape(module) + r'\Z'
if lineno:
try:
lineno = int(lineno)
@@ -251,19 +251,19 @@ def _getaction(action):
def _getcategory(category):
if not category:
return Warning
- if '.' not in category:
- import builtins as m
- klass = category
+ if '.' not in category:
+ import builtins as m
+ klass = category
else:
- module, _, klass = category.rpartition('.')
+ module, _, klass = category.rpartition('.')
try:
m = __import__(module, None, None, [klass])
except ImportError:
raise _OptionError("invalid module name: %r" % (module,)) from None
- try:
- cat = getattr(m, klass)
- except AttributeError:
- raise _OptionError("unknown warning category: %r" % (category,)) from None
+ try:
+ cat = getattr(m, klass)
+ except AttributeError:
+ raise _OptionError("unknown warning category: %r" % (category,)) from None
if not issubclass(cat, Warning):
raise _OptionError("invalid warning category: %r" % (category,))
return cat
@@ -310,11 +310,11 @@ def warn(message, category=None, stacklevel=1, source=None):
raise ValueError
except ValueError:
globals = sys.__dict__
- filename = "sys"
+ filename = "sys"
lineno = 1
else:
globals = frame.f_globals
- filename = frame.f_code.co_filename
+ filename = frame.f_code.co_filename
lineno = frame.f_lineno
if '__name__' in globals:
module = globals['__name__']
diff --git a/contrib/tools/python3/src/Lib/wave.py b/contrib/tools/python3/src/Lib/wave.py
index b7071198e6..95247afeda 100644
--- a/contrib/tools/python3/src/Lib/wave.py
+++ b/contrib/tools/python3/src/Lib/wave.py
@@ -53,7 +53,7 @@ This returns an instance of a class with the following public methods:
-- set all parameters at once
tell() -- return current position in output file
writeframesraw(data)
- -- write audio frames without patching up the
+ -- write audio frames without patching up the
file header
writeframes(data)
-- write audio frames and patch up the file header
@@ -71,16 +71,16 @@ The close() method is called automatically when the class instance
is destroyed.
"""
-from chunk import Chunk
-from collections import namedtuple
-import audioop
+from chunk import Chunk
+from collections import namedtuple
+import audioop
import builtins
-import struct
-import sys
+import struct
+import sys
-__all__ = ["open", "Error", "Wave_read", "Wave_write"]
-
+__all__ = ["open", "Error", "Wave_read", "Wave_write"]
+
class Error(Exception):
pass
diff --git a/contrib/tools/python3/src/Lib/weakref.py b/contrib/tools/python3/src/Lib/weakref.py
index 994ea8aa37..9ebb0a184f 100644
--- a/contrib/tools/python3/src/Lib/weakref.py
+++ b/contrib/tools/python3/src/Lib/weakref.py
@@ -2,7 +2,7 @@
This module is an implementation of PEP 205:
-https://www.python.org/dev/peps/pep-0205/
+https://www.python.org/dev/peps/pep-0205/
"""
# Naming convention: Variables named "wr" are weak reference objects;
@@ -33,9 +33,9 @@ __all__ = ["ref", "proxy", "getweakrefcount", "getweakrefs",
"WeakSet", "WeakMethod", "finalize"]
-_collections_abc.Set.register(WeakSet)
-_collections_abc.MutableSet.register(WeakSet)
-
+_collections_abc.Set.register(WeakSet)
+_collections_abc.MutableSet.register(WeakSet)
+
class WeakMethod(ref):
"""
A custom `weakref.ref` subclass which simulates a weak reference to
@@ -78,14 +78,14 @@ class WeakMethod(ref):
if not self._alive or not other._alive:
return self is other
return ref.__eq__(self, other) and self._func_ref == other._func_ref
- return NotImplemented
+ return NotImplemented
def __ne__(self, other):
if isinstance(other, WeakMethod):
if not self._alive or not other._alive:
return self is not other
return ref.__ne__(self, other) or self._func_ref != other._func_ref
- return NotImplemented
+ return NotImplemented
__hash__ = ref.__hash__
@@ -102,7 +102,7 @@ class WeakValueDictionary(_collections_abc.MutableMapping):
# objects are unwrapped on the way out, and we always wrap on the
# way in).
- def __init__(self, other=(), /, **kw):
+ def __init__(self, other=(), /, **kw):
def remove(wr, selfref=ref(self), _atomic_removal=_remove_dead_weakref):
self = selfref()
if self is not None:
@@ -111,25 +111,25 @@ class WeakValueDictionary(_collections_abc.MutableMapping):
else:
# Atomic removal is necessary since this function
# can be called asynchronously by the GC
- _atomic_removal(self.data, wr.key)
+ _atomic_removal(self.data, wr.key)
self._remove = remove
# A list of keys to be removed
self._pending_removals = []
self._iterating = set()
- self.data = {}
- self.update(other, **kw)
+ self.data = {}
+ self.update(other, **kw)
- def _commit_removals(self, _atomic_removal=_remove_dead_weakref):
- pop = self._pending_removals.pop
+ def _commit_removals(self, _atomic_removal=_remove_dead_weakref):
+ pop = self._pending_removals.pop
d = self.data
# We shouldn't encounter any KeyError, because this method should
# always be called *before* mutating the dict.
- while True:
- try:
- key = pop()
- except IndexError:
- return
- _atomic_removal(d, key)
+ while True:
+ try:
+ key = pop()
+ except IndexError:
+ return
+ _atomic_removal(d, key)
def __getitem__(self, key):
if self._pending_removals:
@@ -287,17 +287,17 @@ class WeakValueDictionary(_collections_abc.MutableMapping):
else:
return o
- def update(self, other=None, /, **kwargs):
+ def update(self, other=None, /, **kwargs):
if self._pending_removals:
self._commit_removals()
d = self.data
- if other is not None:
- if not hasattr(other, "items"):
- other = dict(other)
- for key, o in other.items():
+ if other is not None:
+ if not hasattr(other, "items"):
+ other = dict(other)
+ for key, o in other.items():
d[key] = KeyedRef(o, self._remove, key)
- for key, o in kwargs.items():
- d[key] = KeyedRef(o, self._remove, key)
+ for key, o in kwargs.items():
+ d[key] = KeyedRef(o, self._remove, key)
def valuerefs(self):
"""Return a list of weak references to the values.
@@ -313,26 +313,26 @@ class WeakValueDictionary(_collections_abc.MutableMapping):
self._commit_removals()
return list(self.data.values())
- def __ior__(self, other):
- self.update(other)
- return self
-
- def __or__(self, other):
- if isinstance(other, _collections_abc.Mapping):
- c = self.copy()
- c.update(other)
- return c
- return NotImplemented
-
- def __ror__(self, other):
- if isinstance(other, _collections_abc.Mapping):
- c = self.__class__()
- c.update(other)
- c.update(self)
- return c
- return NotImplemented
-
-
+ def __ior__(self, other):
+ self.update(other)
+ return self
+
+ def __or__(self, other):
+ if isinstance(other, _collections_abc.Mapping):
+ c = self.copy()
+ c.update(other)
+ return c
+ return NotImplemented
+
+ def __ror__(self, other):
+ if isinstance(other, _collections_abc.Mapping):
+ c = self.__class__()
+ c.update(other)
+ c.update(self)
+ return c
+ return NotImplemented
+
+
class KeyedRef(ref):
"""Specialized reference that includes a key corresponding to the value.
@@ -373,10 +373,10 @@ class WeakKeyDictionary(_collections_abc.MutableMapping):
if self._iterating:
self._pending_removals.append(k)
else:
- try:
- del self.data[k]
- except KeyError:
- pass
+ try:
+ del self.data[k]
+ except KeyError:
+ pass
self._remove = remove
# A list of dead weakrefs (keys to be removed)
self._pending_removals = []
@@ -390,16 +390,16 @@ class WeakKeyDictionary(_collections_abc.MutableMapping):
# because a dead weakref never compares equal to a live weakref,
# even if they happened to refer to equal objects.
# However, it means keys may already have been removed.
- pop = self._pending_removals.pop
+ pop = self._pending_removals.pop
d = self.data
- while True:
+ while True:
try:
- key = pop()
- except IndexError:
- return
-
- try:
- del d[key]
+ key = pop()
+ except IndexError:
+ return
+
+ try:
+ del d[key]
except KeyError:
pass
@@ -508,7 +508,7 @@ class WeakKeyDictionary(_collections_abc.MutableMapping):
def setdefault(self, key, default=None):
return self.data.setdefault(ref(key, self._remove),default)
- def update(self, dict=None, /, **kwargs):
+ def update(self, dict=None, /, **kwargs):
d = self.data
if dict is not None:
if not hasattr(dict, "items"):
@@ -518,26 +518,26 @@ class WeakKeyDictionary(_collections_abc.MutableMapping):
if len(kwargs):
self.update(kwargs)
- def __ior__(self, other):
- self.update(other)
- return self
-
- def __or__(self, other):
- if isinstance(other, _collections_abc.Mapping):
- c = self.copy()
- c.update(other)
- return c
- return NotImplemented
-
- def __ror__(self, other):
- if isinstance(other, _collections_abc.Mapping):
- c = self.__class__()
- c.update(other)
- c.update(self)
- return c
- return NotImplemented
-
-
+ def __ior__(self, other):
+ self.update(other)
+ return self
+
+ def __or__(self, other):
+ if isinstance(other, _collections_abc.Mapping):
+ c = self.copy()
+ c.update(other)
+ return c
+ return NotImplemented
+
+ def __ror__(self, other):
+ if isinstance(other, _collections_abc.Mapping):
+ c = self.__class__()
+ c.update(other)
+ c.update(self)
+ return c
+ return NotImplemented
+
+
class finalize:
"""Class for finalization of weakrefable objects
@@ -566,7 +566,7 @@ class finalize:
class _Info:
__slots__ = ("weakref", "func", "args", "kwargs", "atexit", "index")
- def __init__(self, obj, func, /, *args, **kwargs):
+ def __init__(self, obj, func, /, *args, **kwargs):
if not self._registered_with_atexit:
# We may register the exit function more than once because
# of a thread race, but that is harmless
diff --git a/contrib/tools/python3/src/Lib/webbrowser.py b/contrib/tools/python3/src/Lib/webbrowser.py
index 6023c1e138..b31608ac28 100644
--- a/contrib/tools/python3/src/Lib/webbrowser.py
+++ b/contrib/tools/python3/src/Lib/webbrowser.py
@@ -69,14 +69,14 @@ def get(using=None):
# instead of "from webbrowser import *".
def open(url, new=0, autoraise=True):
- """Display url using the default browser.
-
- If possible, open url in a location determined by new.
- - 0: the same browser window (the default).
- - 1: a new browser window.
- - 2: a new browser page ("tab").
- If possible, autoraise raises the window (the default) or not.
- """
+ """Display url using the default browser.
+
+ If possible, open url in a location determined by new.
+ - 0: the same browser window (the default).
+ - 1: a new browser window.
+ - 2: a new browser page ("tab").
+ If possible, autoraise raises the window (the default) or not.
+ """
if _tryorder is None:
with _lock:
if _tryorder is None:
@@ -88,22 +88,22 @@ def open(url, new=0, autoraise=True):
return False
def open_new(url):
- """Open url in a new window of the default browser.
-
- If not possible, then open url in the only browser window.
- """
+ """Open url in a new window of the default browser.
+
+ If not possible, then open url in the only browser window.
+ """
return open(url, 1)
def open_new_tab(url):
- """Open url in a new page ("tab") of the default browser.
-
- If not possible, then the behavior becomes equivalent to open_new().
- """
+ """Open url in a new page ("tab") of the default browser.
+
+ If not possible, then the behavior becomes equivalent to open_new().
+ """
return open(url, 2)
def _synthesize(browser, *, preferred=False):
- """Attempt to synthesize a controller based on existing controllers.
+ """Attempt to synthesize a controller based on existing controllers.
This is useful to create a controller when a user specifies a path to
an entry in the BROWSER environment variable -- we can copy a general
@@ -170,7 +170,7 @@ class GenericBrowser(BaseBrowser):
self.basename = os.path.basename(self.name)
def open(self, url, new=0, autoraise=True):
- sys.audit("webbrowser.open", url)
+ sys.audit("webbrowser.open", url)
cmdline = [self.name] + [arg.replace("%s", url)
for arg in self.args]
try:
@@ -190,7 +190,7 @@ class BackgroundBrowser(GenericBrowser):
def open(self, url, new=0, autoraise=True):
cmdline = [self.name] + [arg.replace("%s", url)
for arg in self.args]
- sys.audit("webbrowser.open", url)
+ sys.audit("webbrowser.open", url)
try:
if sys.platform[:3] == 'win':
p = subprocess.Popen(cmdline)
@@ -219,7 +219,7 @@ class UnixBrowser(BaseBrowser):
remote_action_newwin = None
remote_action_newtab = None
- def _invoke(self, args, remote, autoraise, url=None):
+ def _invoke(self, args, remote, autoraise, url=None):
raise_opt = []
if remote and self.raise_opts:
# use autoraise argument only for remote invocation
@@ -255,7 +255,7 @@ class UnixBrowser(BaseBrowser):
return not p.wait()
def open(self, url, new=0, autoraise=True):
- sys.audit("webbrowser.open", url)
+ sys.audit("webbrowser.open", url)
if new == 0:
action = self.remote_action
elif new == 1:
@@ -272,7 +272,7 @@ class UnixBrowser(BaseBrowser):
args = [arg.replace("%s", url).replace("%action", action)
for arg in self.remote_args]
args = [arg for arg in args if arg]
- success = self._invoke(args, True, autoraise, url)
+ success = self._invoke(args, True, autoraise, url)
if not success:
# remote invocation failed, try straight way
args = [arg.replace("%s", url) for arg in self.args]
@@ -356,7 +356,7 @@ class Konqueror(BaseBrowser):
"""
def open(self, url, new=0, autoraise=True):
- sys.audit("webbrowser.open", url)
+ sys.audit("webbrowser.open", url)
# XXX Currently I know no way to prevent KFM from opening a new win.
if new == 2:
action = "newTab"
@@ -413,7 +413,7 @@ class Grail(BaseBrowser):
tempdir = os.path.join(tempfile.gettempdir(),
".grail-unix")
user = pwd.getpwuid(os.getuid())[0]
- filename = os.path.join(glob.escape(tempdir), glob.escape(user) + "-*")
+ filename = os.path.join(glob.escape(tempdir), glob.escape(user) + "-*")
maybes = glob.glob(filename)
if not maybes:
return None
@@ -440,7 +440,7 @@ class Grail(BaseBrowser):
return 1
def open(self, url, new=0, autoraise=True):
- sys.audit("webbrowser.open", url)
+ sys.audit("webbrowser.open", url)
if new:
ok = self._remote("LOADNEW " + url)
else:
@@ -545,12 +545,12 @@ def register_standard_browsers():
register(browser, None, BackgroundBrowser(browser))
else:
# Prefer X browsers if present
- if os.environ.get("DISPLAY") or os.environ.get("WAYLAND_DISPLAY"):
+ if os.environ.get("DISPLAY") or os.environ.get("WAYLAND_DISPLAY"):
try:
cmd = "xdg-settings get default-web-browser".split()
raw_result = subprocess.check_output(cmd, stderr=subprocess.DEVNULL)
result = raw_result.decode().strip()
- except (FileNotFoundError, subprocess.CalledProcessError, PermissionError, NotADirectoryError) :
+ except (FileNotFoundError, subprocess.CalledProcessError, PermissionError, NotADirectoryError) :
pass
else:
global _os_preferred_browser
@@ -598,7 +598,7 @@ def register_standard_browsers():
if sys.platform[:3] == "win":
class WindowsDefault(BaseBrowser):
def open(self, url, new=0, autoraise=True):
- sys.audit("webbrowser.open", url)
+ sys.audit("webbrowser.open", url)
try:
os.startfile(url)
except OSError:
@@ -628,7 +628,7 @@ if sys.platform == 'darwin':
self.name = name
def open(self, url, new=0, autoraise=True):
- sys.audit("webbrowser.open", url)
+ sys.audit("webbrowser.open", url)
assert "'" not in url
# hack for local urls
if not ':' in url:
diff --git a/contrib/tools/python3/src/Lib/wsgiref/handlers.py b/contrib/tools/python3/src/Lib/wsgiref/handlers.py
index 31360e5878..0837493c52 100644
--- a/contrib/tools/python3/src/Lib/wsgiref/handlers.py
+++ b/contrib/tools/python3/src/Lib/wsgiref/handlers.py
@@ -136,10 +136,10 @@ class BaseHandler:
self.setup_environ()
self.result = application(self.environ, self.start_response)
self.finish_response()
- except (ConnectionAbortedError, BrokenPipeError, ConnectionResetError):
- # We expect the client to close the connection abruptly from time
- # to time.
- return
+ except (ConnectionAbortedError, BrokenPipeError, ConnectionResetError):
+ # We expect the client to close the connection abruptly from time
+ # to time.
+ return
except:
try:
self.handle_error()
@@ -183,16 +183,16 @@ class BaseHandler:
for data in self.result:
self.write(data)
self.finish_content()
- except:
- # Call close() on the iterable returned by the WSGI application
- # in case of an exception.
- if hasattr(self.result, 'close'):
- self.result.close()
- raise
- else:
- # We only call close() when no exception is raised, because it
- # will set status, result, headers, and environ fields to None.
- # See bpo-29183 for more details.
+ except:
+ # Call close() on the iterable returned by the WSGI application
+ # in case of an exception.
+ if hasattr(self.result, 'close'):
+ self.result.close()
+ raise
+ else:
+ # We only call close() when no exception is raised, because it
+ # will set status, result, headers, and environ fields to None.
+ # See bpo-29183 for more details.
self.close()
@@ -246,8 +246,8 @@ class BaseHandler:
for name, val in headers:
name = self._convert_string_type(name, "Header name")
val = self._convert_string_type(val, "Header value")
- assert not is_hop_by_hop(name),\
- f"Hop-by-hop header, '{name}: {val}', not allowed"
+ assert not is_hop_by_hop(name),\
+ f"Hop-by-hop header, '{name}: {val}', not allowed"
return self.write
diff --git a/contrib/tools/python3/src/Lib/wsgiref/simple_server.py b/contrib/tools/python3/src/Lib/wsgiref/simple_server.py
index 93d01a863a..aff668b662 100644
--- a/contrib/tools/python3/src/Lib/wsgiref/simple_server.py
+++ b/contrib/tools/python3/src/Lib/wsgiref/simple_server.py
@@ -127,8 +127,8 @@ class WSGIRequestHandler(BaseHTTPRequestHandler):
return
handler = ServerHandler(
- self.rfile, self.wfile, self.get_stderr(), self.get_environ(),
- multithread=False,
+ self.rfile, self.wfile, self.get_stderr(), self.get_environ(),
+ multithread=False,
)
handler.request_handler = self # backpointer for logging
handler.run(self.server.get_app())
diff --git a/contrib/tools/python3/src/Lib/wsgiref/util.py b/contrib/tools/python3/src/Lib/wsgiref/util.py
index cac52eb5a5..53faf6a213 100644
--- a/contrib/tools/python3/src/Lib/wsgiref/util.py
+++ b/contrib/tools/python3/src/Lib/wsgiref/util.py
@@ -18,13 +18,13 @@ class FileWrapper:
self.close = filelike.close
def __getitem__(self,key):
- import warnings
- warnings.warn(
- "FileWrapper's __getitem__ method ignores 'key' parameter. "
- "Use iterator protocol instead.",
- DeprecationWarning,
- stacklevel=2
- )
+ import warnings
+ warnings.warn(
+ "FileWrapper's __getitem__ method ignores 'key' parameter. "
+ "Use iterator protocol instead.",
+ DeprecationWarning,
+ stacklevel=2
+ )
data = self.filelike.read(self.blksize)
if data:
return data
@@ -162,9 +162,9 @@ def setup_testing_defaults(environ):
_hoppish = {
- 'connection', 'keep-alive', 'proxy-authenticate',
- 'proxy-authorization', 'te', 'trailers', 'transfer-encoding',
- 'upgrade'
+ 'connection', 'keep-alive', 'proxy-authenticate',
+ 'proxy-authorization', 'te', 'trailers', 'transfer-encoding',
+ 'upgrade'
}.__contains__
def is_hop_by_hop(header_name):
diff --git a/contrib/tools/python3/src/Lib/wsgiref/validate.py b/contrib/tools/python3/src/Lib/wsgiref/validate.py
index 6e16578dbb..73b09e3cec 100644
--- a/contrib/tools/python3/src/Lib/wsgiref/validate.py
+++ b/contrib/tools/python3/src/Lib/wsgiref/validate.py
@@ -77,7 +77,7 @@ Some of the things this checks:
* That wsgi.input is used properly:
- - .read() is called with exactly one argument
+ - .read() is called with exactly one argument
- That it returns a string
@@ -137,7 +137,7 @@ def validator(application):
"""
When applied between a WSGI server and a WSGI application, this
- middleware will check for WSGI compliance on a number of levels.
+ middleware will check for WSGI compliance on a number of levels.
This middleware does not modify the request or response in any
way, but will raise an AssertionError if anything seems off
(except for a failure to close the application iterator, which
diff --git a/contrib/tools/python3/src/Lib/xml/dom/expatbuilder.py b/contrib/tools/python3/src/Lib/xml/dom/expatbuilder.py
index 199c22d0af..88eefd2552 100644
--- a/contrib/tools/python3/src/Lib/xml/dom/expatbuilder.py
+++ b/contrib/tools/python3/src/Lib/xml/dom/expatbuilder.py
@@ -204,11 +204,11 @@ class ExpatBuilder:
buffer = file.read(16*1024)
if not buffer:
break
- parser.Parse(buffer, False)
+ parser.Parse(buffer, False)
if first_buffer and self.document.documentElement:
self._setup_subset(buffer)
first_buffer = False
- parser.Parse(b"", True)
+ parser.Parse(b"", True)
except ParseEscape:
pass
doc = self.document
@@ -637,7 +637,7 @@ class FragmentBuilder(ExpatBuilder):
nsattrs = self._getNSattrs() # get ns decls from node's ancestors
document = _FRAGMENT_BUILDER_TEMPLATE % (ident, subset, nsattrs)
try:
- parser.Parse(document, True)
+ parser.Parse(document, True)
except:
self.reset()
raise
@@ -697,7 +697,7 @@ class FragmentBuilder(ExpatBuilder):
self.fragment = self.document.createDocumentFragment()
self.curNode = self.fragment
try:
- parser.Parse(self._source, True)
+ parser.Parse(self._source, True)
finally:
self.curNode = old_cur_node
self.document = old_document
diff --git a/contrib/tools/python3/src/Lib/xml/dom/minidom.py b/contrib/tools/python3/src/Lib/xml/dom/minidom.py
index d09ef5e7d0..64cc7ecc48 100644
--- a/contrib/tools/python3/src/Lib/xml/dom/minidom.py
+++ b/contrib/tools/python3/src/Lib/xml/dom/minidom.py
@@ -43,11 +43,11 @@ class Node(xml.dom.Node):
def __bool__(self):
return True
- def toxml(self, encoding=None, standalone=None):
- return self.toprettyxml("", "", encoding, standalone)
+ def toxml(self, encoding=None, standalone=None):
+ return self.toprettyxml("", "", encoding, standalone)
- def toprettyxml(self, indent="\t", newl="\n", encoding=None,
- standalone=None):
+ def toprettyxml(self, indent="\t", newl="\n", encoding=None,
+ standalone=None):
if encoding is None:
writer = io.StringIO()
else:
@@ -57,7 +57,7 @@ class Node(xml.dom.Node):
newline='\n')
if self.nodeType == Node.DOCUMENT_NODE:
# Can pass encoding only to document, to put it into XML header
- self.writexml(writer, "", indent, newl, encoding, standalone)
+ self.writexml(writer, "", indent, newl, encoding, standalone)
else:
self.writexml(writer, "", indent, newl)
if encoding is None:
@@ -719,14 +719,14 @@ class Element(Node):
Node.unlink(self)
def getAttribute(self, attname):
- """Returns the value of the specified attribute.
-
- Returns the value of the element's attribute named attname as
- a string. An empty string is returned if the element does not
- have such an attribute. Note that an empty string may also be
- returned as an explicitly given attribute value, use the
- hasAttribute method to distinguish these two cases.
- """
+ """Returns the value of the specified attribute.
+
+ Returns the value of the element's attribute named attname as
+ a string. An empty string is returned if the element does not
+ have such an attribute. Note that an empty string may also be
+ returned as an explicitly given attribute value, use the
+ hasAttribute method to distinguish these two cases.
+ """
if self._attrs is None:
return ""
try:
@@ -832,16 +832,16 @@ class Element(Node):
# Restore this since the node is still useful and otherwise
# unlinked
node.ownerDocument = self.ownerDocument
- return node
+ return node
removeAttributeNodeNS = removeAttributeNode
def hasAttribute(self, name):
- """Checks whether the element has an attribute with the specified name.
-
- Returns True if the element has an attribute with the specified name.
- Otherwise, returns False.
- """
+ """Checks whether the element has an attribute with the specified name.
+
+ Returns True if the element has an attribute with the specified name.
+ Otherwise, returns False.
+ """
if self._attrs is None:
return False
return name in self._attrs
@@ -852,11 +852,11 @@ class Element(Node):
return (namespaceURI, localName) in self._attrsNS
def getElementsByTagName(self, name):
- """Returns all descendant elements with the given tag name.
-
- Returns the list of all descendant elements (not direct children
- only) with the specified tag name.
- """
+ """Returns all descendant elements with the given tag name.
+
+ Returns the list of all descendant elements (not direct children
+ only) with the specified tag name.
+ """
return _get_elements_by_tagName_helper(self, name, NodeList())
def getElementsByTagNameNS(self, namespaceURI, localName):
@@ -867,11 +867,11 @@ class Element(Node):
return "<DOM Element: %s at %#x>" % (self.tagName, id(self))
def writexml(self, writer, indent="", addindent="", newl=""):
- """Write an XML element to a file-like object
-
- Write the element to the writer object that must provide
- a write method (e.g. a file or StringIO object).
- """
+ """Write an XML element to a file-like object
+
+ Write the element to the writer object that must provide
+ a write method (e.g. a file or StringIO object).
+ """
# indent = current indentation
# addindent = indentation to add to higher levels
# newl = newline string
@@ -879,15 +879,15 @@ class Element(Node):
attrs = self._get_attributes()
- for a_name in attrs.keys():
+ for a_name in attrs.keys():
writer.write(" %s=\"" % a_name)
_write_data(writer, attrs[a_name].value)
writer.write("\"")
if self.childNodes:
writer.write(">")
if (len(self.childNodes) == 1 and
- self.childNodes[0].nodeType in (
- Node.TEXT_NODE, Node.CDATA_SECTION_NODE)):
+ self.childNodes[0].nodeType in (
+ Node.TEXT_NODE, Node.CDATA_SECTION_NODE)):
self.childNodes[0].writexml(writer, '', '', '')
else:
writer.write(newl)
@@ -1811,17 +1811,17 @@ class Document(Node, DocumentLS):
raise xml.dom.NotSupportedErr("cannot import document type nodes")
return _clone_node(node, deep, self)
- def writexml(self, writer, indent="", addindent="", newl="", encoding=None,
- standalone=None):
- declarations = []
-
- if encoding:
- declarations.append(f'encoding="{encoding}"')
- if standalone is not None:
- declarations.append(f'standalone="{"yes" if standalone else "no"}"')
-
- writer.write(f'<?xml version="1.0" {" ".join(declarations)}?>{newl}')
-
+ def writexml(self, writer, indent="", addindent="", newl="", encoding=None,
+ standalone=None):
+ declarations = []
+
+ if encoding:
+ declarations.append(f'encoding="{encoding}"')
+ if standalone is not None:
+ declarations.append(f'standalone="{"yes" if standalone else "no"}"')
+
+ writer.write(f'<?xml version="1.0" {" ".join(declarations)}?>{newl}')
+
for node in self.childNodes:
node.writexml(writer, indent, addindent, newl)
diff --git a/contrib/tools/python3/src/Lib/xml/dom/pulldom.py b/contrib/tools/python3/src/Lib/xml/dom/pulldom.py
index 96a8d59519..228ea40202 100644
--- a/contrib/tools/python3/src/Lib/xml/dom/pulldom.py
+++ b/contrib/tools/python3/src/Lib/xml/dom/pulldom.py
@@ -217,13 +217,13 @@ class DOMEventStream:
self.parser.setContentHandler(self.pulldom)
def __getitem__(self, pos):
- import warnings
- warnings.warn(
- "DOMEventStream's __getitem__ method ignores 'pos' parameter. "
- "Use iterator protocol instead.",
- DeprecationWarning,
- stacklevel=2
- )
+ import warnings
+ warnings.warn(
+ "DOMEventStream's __getitem__ method ignores 'pos' parameter. "
+ "Use iterator protocol instead.",
+ DeprecationWarning,
+ stacklevel=2
+ )
rc = self.getEvent()
if rc:
return rc
diff --git a/contrib/tools/python3/src/Lib/xml/etree/ElementInclude.py b/contrib/tools/python3/src/Lib/xml/etree/ElementInclude.py
index 40a9b22292..3a0b04e27a 100644
--- a/contrib/tools/python3/src/Lib/xml/etree/ElementInclude.py
+++ b/contrib/tools/python3/src/Lib/xml/etree/ElementInclude.py
@@ -42,7 +42,7 @@
# --------------------------------------------------------------------
# Licensed to PSF under a Contributor Agreement.
-# See https://www.python.org/psf/license for licensing details.
+# See https://www.python.org/psf/license for licensing details.
##
# Limited XInclude support for the ElementTree package.
@@ -50,28 +50,28 @@
import copy
from . import ElementTree
-from urllib.parse import urljoin
+from urllib.parse import urljoin
XINCLUDE = "{http://www.w3.org/2001/XInclude}"
XINCLUDE_INCLUDE = XINCLUDE + "include"
XINCLUDE_FALLBACK = XINCLUDE + "fallback"
-# For security reasons, the inclusion depth is limited to this read-only value by default.
-DEFAULT_MAX_INCLUSION_DEPTH = 6
-
-
+# For security reasons, the inclusion depth is limited to this read-only value by default.
+DEFAULT_MAX_INCLUSION_DEPTH = 6
+
+
##
# Fatal include error.
class FatalIncludeError(SyntaxError):
pass
-
-class LimitedRecursiveIncludeError(FatalIncludeError):
- pass
-
-
+
+class LimitedRecursiveIncludeError(FatalIncludeError):
+ pass
+
+
##
# Default loader. This loader reads an included resource from disk.
#
@@ -102,33 +102,33 @@ def default_loader(href, parse, encoding=None):
# @param loader Optional resource loader. If omitted, it defaults
# to {@link default_loader}. If given, it should be a callable
# that implements the same interface as <b>default_loader</b>.
-# @param base_url The base URL of the original file, to resolve
-# relative include file references.
-# @param max_depth The maximum number of recursive inclusions.
-# Limited to reduce the risk of malicious content explosion.
-# Pass a negative value to disable the limitation.
-# @throws LimitedRecursiveIncludeError If the {@link max_depth} was exceeded.
+# @param base_url The base URL of the original file, to resolve
+# relative include file references.
+# @param max_depth The maximum number of recursive inclusions.
+# Limited to reduce the risk of malicious content explosion.
+# Pass a negative value to disable the limitation.
+# @throws LimitedRecursiveIncludeError If the {@link max_depth} was exceeded.
# @throws FatalIncludeError If the function fails to include a given
# resource, or if the tree contains malformed XInclude elements.
-# @throws IOError If the function fails to load a given resource.
-# @returns the node or its replacement if it was an XInclude node
-
-def include(elem, loader=None, base_url=None,
- max_depth=DEFAULT_MAX_INCLUSION_DEPTH):
- if max_depth is None:
- max_depth = -1
- elif max_depth < 0:
- raise ValueError("expected non-negative depth or None for 'max_depth', got %r" % max_depth)
-
- if hasattr(elem, 'getroot'):
- elem = elem.getroot()
+# @throws IOError If the function fails to load a given resource.
+# @returns the node or its replacement if it was an XInclude node
+
+def include(elem, loader=None, base_url=None,
+ max_depth=DEFAULT_MAX_INCLUSION_DEPTH):
+ if max_depth is None:
+ max_depth = -1
+ elif max_depth < 0:
+ raise ValueError("expected non-negative depth or None for 'max_depth', got %r" % max_depth)
+
+ if hasattr(elem, 'getroot'):
+ elem = elem.getroot()
if loader is None:
loader = default_loader
-
- _include(elem, loader, base_url, max_depth, set())
-
-
-def _include(elem, loader, base_url, max_depth, _parent_hrefs):
+
+ _include(elem, loader, base_url, max_depth, set())
+
+
+def _include(elem, loader, base_url, max_depth, _parent_hrefs):
# look for xinclude elements
i = 0
while i < len(elem):
@@ -136,24 +136,24 @@ def _include(elem, loader, base_url, max_depth, _parent_hrefs):
if e.tag == XINCLUDE_INCLUDE:
# process xinclude directive
href = e.get("href")
- if base_url:
- href = urljoin(base_url, href)
+ if base_url:
+ href = urljoin(base_url, href)
parse = e.get("parse", "xml")
if parse == "xml":
- if href in _parent_hrefs:
- raise FatalIncludeError("recursive include of %s" % href)
- if max_depth == 0:
- raise LimitedRecursiveIncludeError(
- "maximum xinclude depth reached when including file %s" % href)
- _parent_hrefs.add(href)
+ if href in _parent_hrefs:
+ raise FatalIncludeError("recursive include of %s" % href)
+ if max_depth == 0:
+ raise LimitedRecursiveIncludeError(
+ "maximum xinclude depth reached when including file %s" % href)
+ _parent_hrefs.add(href)
node = loader(href, parse)
if node is None:
raise FatalIncludeError(
"cannot load %r as %r" % (href, parse)
)
- node = copy.copy(node) # FIXME: this makes little sense with recursive includes
- _include(node, loader, href, max_depth - 1, _parent_hrefs)
- _parent_hrefs.remove(href)
+ node = copy.copy(node) # FIXME: this makes little sense with recursive includes
+ _include(node, loader, href, max_depth - 1, _parent_hrefs)
+ _parent_hrefs.remove(href)
if e.tail:
node.tail = (node.tail or "") + e.tail
elem[i] = node
@@ -163,13 +163,13 @@ def _include(elem, loader, base_url, max_depth, _parent_hrefs):
raise FatalIncludeError(
"cannot load %r as %r" % (href, parse)
)
- if e.tail:
- text += e.tail
+ if e.tail:
+ text += e.tail
if i:
node = elem[i-1]
- node.tail = (node.tail or "") + text
+ node.tail = (node.tail or "") + text
else:
- elem.text = (elem.text or "") + text
+ elem.text = (elem.text or "") + text
del elem[i]
continue
else:
@@ -181,5 +181,5 @@ def _include(elem, loader, base_url, max_depth, _parent_hrefs):
"xi:fallback tag must be child of xi:include (%r)" % e.tag
)
else:
- _include(e, loader, base_url, max_depth, _parent_hrefs)
- i += 1
+ _include(e, loader, base_url, max_depth, _parent_hrefs)
+ i += 1
diff --git a/contrib/tools/python3/src/Lib/xml/etree/ElementPath.py b/contrib/tools/python3/src/Lib/xml/etree/ElementPath.py
index 880ea7bd99..7f7606c631 100644
--- a/contrib/tools/python3/src/Lib/xml/etree/ElementPath.py
+++ b/contrib/tools/python3/src/Lib/xml/etree/ElementPath.py
@@ -48,7 +48,7 @@
# --------------------------------------------------------------------
# Licensed to PSF under a Contributor Agreement.
-# See https://www.python.org/psf/license for licensing details.
+# See https://www.python.org/psf/license for licensing details.
##
# Implementation module for XPath support. There's usually no reason
@@ -71,29 +71,29 @@ xpath_tokenizer_re = re.compile(
)
def xpath_tokenizer(pattern, namespaces=None):
- default_namespace = namespaces.get('') if namespaces else None
- parsing_attribute = False
+ default_namespace = namespaces.get('') if namespaces else None
+ parsing_attribute = False
for token in xpath_tokenizer_re.findall(pattern):
- ttype, tag = token
- if tag and tag[0] != "{":
- if ":" in tag:
+ ttype, tag = token
+ if tag and tag[0] != "{":
+ if ":" in tag:
prefix, uri = tag.split(":", 1)
- try:
- if not namespaces:
- raise KeyError
- yield ttype, "{%s}%s" % (namespaces[prefix], uri)
- except KeyError:
- raise SyntaxError("prefix %r not found in prefix map" % prefix) from None
- elif default_namespace and not parsing_attribute:
- yield ttype, "{%s}%s" % (default_namespace, tag)
- else:
- yield token
- parsing_attribute = False
+ try:
+ if not namespaces:
+ raise KeyError
+ yield ttype, "{%s}%s" % (namespaces[prefix], uri)
+ except KeyError:
+ raise SyntaxError("prefix %r not found in prefix map" % prefix) from None
+ elif default_namespace and not parsing_attribute:
+ yield ttype, "{%s}%s" % (default_namespace, tag)
+ else:
+ yield token
+ parsing_attribute = False
else:
yield token
- parsing_attribute = ttype == '@'
-
+ parsing_attribute = ttype == '@'
+
def get_parent_map(context):
parent_map = context.parent_map
if parent_map is None:
@@ -103,69 +103,69 @@ def get_parent_map(context):
parent_map[e] = p
return parent_map
-
-def _is_wildcard_tag(tag):
- return tag[:3] == '{*}' or tag[-2:] == '}*'
-
-
-def _prepare_tag(tag):
- _isinstance, _str = isinstance, str
- if tag == '{*}*':
- # Same as '*', but no comments or processing instructions.
- # It can be a surprise that '*' includes those, but there is no
- # justification for '{*}*' doing the same.
- def select(context, result):
- for elem in result:
- if _isinstance(elem.tag, _str):
- yield elem
- elif tag == '{}*':
- # Any tag that is not in a namespace.
- def select(context, result):
- for elem in result:
- el_tag = elem.tag
- if _isinstance(el_tag, _str) and el_tag[0] != '{':
- yield elem
- elif tag[:3] == '{*}':
- # The tag in any (or no) namespace.
- suffix = tag[2:] # '}name'
- no_ns = slice(-len(suffix), None)
- tag = tag[3:]
- def select(context, result):
- for elem in result:
- el_tag = elem.tag
- if el_tag == tag or _isinstance(el_tag, _str) and el_tag[no_ns] == suffix:
- yield elem
- elif tag[-2:] == '}*':
- # Any tag in the given namespace.
- ns = tag[:-1]
- ns_only = slice(None, len(ns))
- def select(context, result):
- for elem in result:
- el_tag = elem.tag
- if _isinstance(el_tag, _str) and el_tag[ns_only] == ns:
- yield elem
- else:
- raise RuntimeError(f"internal parser error, got {tag}")
- return select
-
-
+
+def _is_wildcard_tag(tag):
+ return tag[:3] == '{*}' or tag[-2:] == '}*'
+
+
+def _prepare_tag(tag):
+ _isinstance, _str = isinstance, str
+ if tag == '{*}*':
+ # Same as '*', but no comments or processing instructions.
+ # It can be a surprise that '*' includes those, but there is no
+ # justification for '{*}*' doing the same.
+ def select(context, result):
+ for elem in result:
+ if _isinstance(elem.tag, _str):
+ yield elem
+ elif tag == '{}*':
+ # Any tag that is not in a namespace.
+ def select(context, result):
+ for elem in result:
+ el_tag = elem.tag
+ if _isinstance(el_tag, _str) and el_tag[0] != '{':
+ yield elem
+ elif tag[:3] == '{*}':
+ # The tag in any (or no) namespace.
+ suffix = tag[2:] # '}name'
+ no_ns = slice(-len(suffix), None)
+ tag = tag[3:]
+ def select(context, result):
+ for elem in result:
+ el_tag = elem.tag
+ if el_tag == tag or _isinstance(el_tag, _str) and el_tag[no_ns] == suffix:
+ yield elem
+ elif tag[-2:] == '}*':
+ # Any tag in the given namespace.
+ ns = tag[:-1]
+ ns_only = slice(None, len(ns))
+ def select(context, result):
+ for elem in result:
+ el_tag = elem.tag
+ if _isinstance(el_tag, _str) and el_tag[ns_only] == ns:
+ yield elem
+ else:
+ raise RuntimeError(f"internal parser error, got {tag}")
+ return select
+
+
def prepare_child(next, token):
tag = token[1]
- if _is_wildcard_tag(tag):
- select_tag = _prepare_tag(tag)
- def select(context, result):
- def select_child(result):
- for elem in result:
- yield from elem
- return select_tag(context, select_child(result))
- else:
- if tag[:2] == '{}':
- tag = tag[2:] # '{}tag' == 'tag'
- def select(context, result):
- for elem in result:
- for e in elem:
- if e.tag == tag:
- yield e
+ if _is_wildcard_tag(tag):
+ select_tag = _prepare_tag(tag)
+ def select(context, result):
+ def select_child(result):
+ for elem in result:
+ yield from elem
+ return select_tag(context, select_child(result))
+ else:
+ if tag[:2] == '{}':
+ tag = tag[2:] # '{}tag' == 'tag'
+ def select(context, result):
+ for elem in result:
+ for e in elem:
+ if e.tag == tag:
+ yield e
return select
def prepare_star(next, token):
@@ -190,24 +190,24 @@ def prepare_descendant(next, token):
tag = token[1]
else:
raise SyntaxError("invalid descendant")
-
- if _is_wildcard_tag(tag):
- select_tag = _prepare_tag(tag)
- def select(context, result):
- def select_child(result):
- for elem in result:
- for e in elem.iter():
- if e is not elem:
- yield e
- return select_tag(context, select_child(result))
- else:
- if tag[:2] == '{}':
- tag = tag[2:] # '{}tag' == 'tag'
- def select(context, result):
- for elem in result:
- for e in elem.iter(tag):
- if e is not elem:
- yield e
+
+ if _is_wildcard_tag(tag):
+ select_tag = _prepare_tag(tag)
+ def select(context, result):
+ def select_child(result):
+ for elem in result:
+ for e in elem.iter():
+ if e is not elem:
+ yield e
+ return select_tag(context, select_child(result))
+ else:
+ if tag[:2] == '{}':
+ tag = tag[2:] # '{}tag' == 'tag'
+ def select(context, result):
+ for elem in result:
+ for e in elem.iter(tag):
+ if e is not elem:
+ yield e
return select
def prepare_parent(next, token):
@@ -345,11 +345,11 @@ def iterfind(elem, path, namespaces=None):
# compile selector pattern
if path[-1:] == "/":
path = path + "*" # implicit all (FIXME: keep this?)
-
- cache_key = (path,)
- if namespaces:
- cache_key += tuple(sorted(namespaces.items()))
-
+
+ cache_key = (path,)
+ if namespaces:
+ cache_key += tuple(sorted(namespaces.items()))
+
try:
selector = _cache[cache_key]
except KeyError:
diff --git a/contrib/tools/python3/src/Lib/xml/etree/ElementTree.py b/contrib/tools/python3/src/Lib/xml/etree/ElementTree.py
index fde303c875..6f964670a3 100644
--- a/contrib/tools/python3/src/Lib/xml/etree/ElementTree.py
+++ b/contrib/tools/python3/src/Lib/xml/etree/ElementTree.py
@@ -35,7 +35,7 @@
#---------------------------------------------------------------------
# Licensed to PSF under a Contributor Agreement.
-# See https://www.python.org/psf/license for licensing details.
+# See https://www.python.org/psf/license for licensing details.
#
# ElementTree
# Copyright (c) 1999-2008 by Fredrik Lundh. All rights reserved.
@@ -76,7 +76,7 @@ __all__ = [
"dump",
"Element", "ElementTree",
"fromstring", "fromstringlist",
- "indent", "iselement", "iterparse",
+ "indent", "iselement", "iterparse",
"parse", "ParseError",
"PI", "ProcessingInstruction",
"QName",
@@ -87,7 +87,7 @@ __all__ = [
"XML", "XMLID",
"XMLParser", "XMLPullParser",
"register_namespace",
- "canonicalize", "C14NWriterTarget",
+ "canonicalize", "C14NWriterTarget",
]
VERSION = "1.3.0"
@@ -171,7 +171,7 @@ class Element:
raise TypeError("attrib must be dict, not %s" % (
attrib.__class__.__name__,))
self.tag = tag
- self.attrib = {**attrib, **extra}
+ self.attrib = {**attrib, **extra}
self._children = []
def __repr__(self):
@@ -195,13 +195,13 @@ class Element:
original tree.
"""
- warnings.warn(
- "elem.copy() is deprecated. Use copy.copy(elem) instead.",
- DeprecationWarning
- )
- return self.__copy__()
-
- def __copy__(self):
+ warnings.warn(
+ "elem.copy() is deprecated. Use copy.copy(elem) instead.",
+ DeprecationWarning
+ )
+ return self.__copy__()
+
+ def __copy__(self):
elem = self.makeelement(self.tag, self.attrib)
elem.text = self.text
elem.tail = self.tail
@@ -223,11 +223,11 @@ class Element:
return self._children[index]
def __setitem__(self, index, element):
- if isinstance(index, slice):
- for elt in element:
- self._assert_is_element(elt)
- else:
- self._assert_is_element(element)
+ if isinstance(index, slice):
+ for elt in element:
+ self._assert_is_element(elt)
+ else:
+ self._assert_is_element(element)
self._children[index] = element
def __delitem__(self, index):
@@ -252,7 +252,7 @@ class Element:
"""
for element in elements:
self._assert_is_element(element)
- self._children.append(element)
+ self._children.append(element)
def insert(self, index, subelement):
"""Insert *subelement* at position *index*."""
@@ -435,7 +435,7 @@ def SubElement(parent, tag, attrib={}, **extra):
additional attributes given as keyword arguments.
"""
- attrib = {**attrib, **extra}
+ attrib = {**attrib, **extra}
element = parent.makeelement(tag, attrib)
parent.append(element)
return element
@@ -897,7 +897,7 @@ def _serialize_xml(write, elem, qnames, namespaces,
k,
_escape_attrib(v)
))
- for k, v in items:
+ for k, v in items:
if isinstance(k, QName):
k = k.text
if isinstance(v, QName):
@@ -953,7 +953,7 @@ def _serialize_html(write, elem, qnames, namespaces, **kwargs):
k,
_escape_attrib(v)
))
- for k, v in items:
+ for k, v in items:
if isinstance(k, QName):
k = k.text
if isinstance(v, QName):
@@ -1057,15 +1057,15 @@ def _escape_attrib(text):
text = text.replace(">", "&gt;")
if "\"" in text:
text = text.replace("\"", "&quot;")
- # Although section 2.11 of the XML specification states that CR or
- # CR LN should be replaced with just LN, it applies only to EOLNs
- # which take part of organizing file into lines. Within attributes,
- # we are replacing these with entity numbers, so they do not count.
+ # Although section 2.11 of the XML specification states that CR or
+ # CR LN should be replaced with just LN, it applies only to EOLNs
+ # which take part of organizing file into lines. Within attributes,
+ # we are replacing these with entity numbers, so they do not count.
# http://www.w3.org/TR/REC-xml/#sec-line-ends
- # The current solution, contained in following six lines, was
- # discussed in issue 17582 and 39011.
+ # The current solution, contained in following six lines, was
+ # discussed in issue 17582 and 39011.
if "\r" in text:
- text = text.replace("\r", "&#13;")
+ text = text.replace("\r", "&#13;")
if "\n" in text:
text = text.replace("\n", "&#10;")
if "\t" in text:
@@ -1090,7 +1090,7 @@ def _escape_attrib_html(text):
# --------------------------------------------------------------------
def tostring(element, encoding=None, method=None, *,
- xml_declaration=None, default_namespace=None,
+ xml_declaration=None, default_namespace=None,
short_empty_elements=True):
"""Generate string representation of XML element.
@@ -1099,17 +1099,17 @@ def tostring(element, encoding=None, method=None, *,
*element* is an Element instance, *encoding* is an optional output
encoding defaulting to US-ASCII, *method* is an optional output which can
- be one of "xml" (default), "html", "text" or "c14n", *default_namespace*
- sets the default XML namespace (for "xmlns").
+ be one of "xml" (default), "html", "text" or "c14n", *default_namespace*
+ sets the default XML namespace (for "xmlns").
Returns an (optionally) encoded string containing the XML data.
"""
stream = io.StringIO() if encoding == 'unicode' else io.BytesIO()
- ElementTree(element).write(stream, encoding,
- xml_declaration=xml_declaration,
- default_namespace=default_namespace,
- method=method,
+ ElementTree(element).write(stream, encoding,
+ xml_declaration=xml_declaration,
+ default_namespace=default_namespace,
+ method=method,
short_empty_elements=short_empty_elements)
return stream.getvalue()
@@ -1131,14 +1131,14 @@ class _ListDataStream(io.BufferedIOBase):
return len(self.lst)
def tostringlist(element, encoding=None, method=None, *,
- xml_declaration=None, default_namespace=None,
+ xml_declaration=None, default_namespace=None,
short_empty_elements=True):
lst = []
stream = _ListDataStream(lst)
- ElementTree(element).write(stream, encoding,
- xml_declaration=xml_declaration,
- default_namespace=default_namespace,
- method=method,
+ ElementTree(element).write(stream, encoding,
+ xml_declaration=xml_declaration,
+ default_namespace=default_namespace,
+ method=method,
short_empty_elements=short_empty_elements)
return lst
@@ -1161,57 +1161,57 @@ def dump(elem):
if not tail or tail[-1] != "\n":
sys.stdout.write("\n")
-
-def indent(tree, space=" ", level=0):
- """Indent an XML document by inserting newlines and indentation space
- after elements.
-
- *tree* is the ElementTree or Element to modify. The (root) element
- itself will not be changed, but the tail text of all elements in its
- subtree will be adapted.
-
- *space* is the whitespace to insert for each indentation level, two
- space characters by default.
-
- *level* is the initial indentation level. Setting this to a higher
- value than 0 can be used for indenting subtrees that are more deeply
- nested inside of a document.
- """
- if isinstance(tree, ElementTree):
- tree = tree.getroot()
- if level < 0:
- raise ValueError(f"Initial indentation level must be >= 0, got {level}")
- if not len(tree):
- return
-
- # Reduce the memory consumption by reusing indentation strings.
- indentations = ["\n" + level * space]
-
- def _indent_children(elem, level):
- # Start a new indentation level for the first child.
- child_level = level + 1
- try:
- child_indentation = indentations[child_level]
- except IndexError:
- child_indentation = indentations[level] + space
- indentations.append(child_indentation)
-
- if not elem.text or not elem.text.strip():
- elem.text = child_indentation
-
- for child in elem:
- if len(child):
- _indent_children(child, child_level)
- if not child.tail or not child.tail.strip():
- child.tail = child_indentation
-
- # Dedent after the last child by overwriting the previous indentation.
- if not child.tail.strip():
- child.tail = indentations[level]
-
- _indent_children(tree, 0)
-
-
+
+def indent(tree, space=" ", level=0):
+ """Indent an XML document by inserting newlines and indentation space
+ after elements.
+
+ *tree* is the ElementTree or Element to modify. The (root) element
+ itself will not be changed, but the tail text of all elements in its
+ subtree will be adapted.
+
+ *space* is the whitespace to insert for each indentation level, two
+ space characters by default.
+
+ *level* is the initial indentation level. Setting this to a higher
+ value than 0 can be used for indenting subtrees that are more deeply
+ nested inside of a document.
+ """
+ if isinstance(tree, ElementTree):
+ tree = tree.getroot()
+ if level < 0:
+ raise ValueError(f"Initial indentation level must be >= 0, got {level}")
+ if not len(tree):
+ return
+
+ # Reduce the memory consumption by reusing indentation strings.
+ indentations = ["\n" + level * space]
+
+ def _indent_children(elem, level):
+ # Start a new indentation level for the first child.
+ child_level = level + 1
+ try:
+ child_indentation = indentations[child_level]
+ except IndexError:
+ child_indentation = indentations[level] + space
+ indentations.append(child_indentation)
+
+ if not elem.text or not elem.text.strip():
+ elem.text = child_indentation
+
+ for child in elem:
+ if len(child):
+ _indent_children(child, child_level)
+ if not child.tail or not child.tail.strip():
+ child.tail = child_indentation
+
+ # Dedent after the last child by overwriting the previous indentation.
+ if not child.tail.strip():
+ child.tail = indentations[level]
+
+ _indent_children(tree, 0)
+
+
# --------------------------------------------------------------------
# parsing
@@ -1283,7 +1283,7 @@ class XMLPullParser:
def __init__(self, events=None, *, _parser=None):
# The _parser argument is for internal use only and must not be relied
# upon in user code. It will be removed in a future release.
- # See https://bugs.python.org/issue17741 for more details.
+ # See https://bugs.python.org/issue17741 for more details.
self._events_queue = collections.deque()
self._parser = _parser or XMLParser(target=TreeBuilder())
@@ -1402,30 +1402,30 @@ class TreeBuilder:
*element_factory* is an optional element factory which is called
to create new Element instances, as necessary.
- *comment_factory* is a factory to create comments to be used instead of
- the standard factory. If *insert_comments* is false (the default),
- comments will not be inserted into the tree.
-
- *pi_factory* is a factory to create processing instructions to be used
- instead of the standard factory. If *insert_pis* is false (the default),
- processing instructions will not be inserted into the tree.
+ *comment_factory* is a factory to create comments to be used instead of
+ the standard factory. If *insert_comments* is false (the default),
+ comments will not be inserted into the tree.
+
+ *pi_factory* is a factory to create processing instructions to be used
+ instead of the standard factory. If *insert_pis* is false (the default),
+ processing instructions will not be inserted into the tree.
"""
- def __init__(self, element_factory=None, *,
- comment_factory=None, pi_factory=None,
- insert_comments=False, insert_pis=False):
+ def __init__(self, element_factory=None, *,
+ comment_factory=None, pi_factory=None,
+ insert_comments=False, insert_pis=False):
self._data = [] # data collector
self._elem = [] # element stack
self._last = None # last element
- self._root = None # root element
+ self._root = None # root element
self._tail = None # true if we're after an end tag
- if comment_factory is None:
- comment_factory = Comment
- self._comment_factory = comment_factory
- self.insert_comments = insert_comments
- if pi_factory is None:
- pi_factory = ProcessingInstruction
- self._pi_factory = pi_factory
- self.insert_pis = insert_pis
+ if comment_factory is None:
+ comment_factory = Comment
+ self._comment_factory = comment_factory
+ self.insert_comments = insert_comments
+ if pi_factory is None:
+ pi_factory = ProcessingInstruction
+ self._pi_factory = pi_factory
+ self.insert_pis = insert_pis
if element_factory is None:
element_factory = Element
self._factory = element_factory
@@ -1433,8 +1433,8 @@ class TreeBuilder:
def close(self):
"""Flush builder buffers and return toplevel document Element."""
assert len(self._elem) == 0, "missing end tags"
- assert self._root is not None, "missing toplevel element"
- return self._root
+ assert self._root is not None, "missing toplevel element"
+ return self._root
def _flush(self):
if self._data:
@@ -1463,8 +1463,8 @@ class TreeBuilder:
self._last = elem = self._factory(tag, attrs)
if self._elem:
self._elem[-1].append(elem)
- elif self._root is None:
- self._root = elem
+ elif self._root is None:
+ self._root = elem
self._elem.append(elem)
self._tail = 0
return elem
@@ -1483,34 +1483,34 @@ class TreeBuilder:
self._tail = 1
return self._last
- def comment(self, text):
- """Create a comment using the comment_factory.
-
- *text* is the text of the comment.
- """
- return self._handle_single(
- self._comment_factory, self.insert_comments, text)
-
- def pi(self, target, text=None):
- """Create a processing instruction using the pi_factory.
-
- *target* is the target name of the processing instruction.
- *text* is the data of the processing instruction, or ''.
- """
- return self._handle_single(
- self._pi_factory, self.insert_pis, target, text)
-
- def _handle_single(self, factory, insert, *args):
- elem = factory(*args)
- if insert:
- self._flush()
- self._last = elem
- if self._elem:
- self._elem[-1].append(elem)
- self._tail = 1
- return elem
-
-
+ def comment(self, text):
+ """Create a comment using the comment_factory.
+
+ *text* is the text of the comment.
+ """
+ return self._handle_single(
+ self._comment_factory, self.insert_comments, text)
+
+ def pi(self, target, text=None):
+ """Create a processing instruction using the pi_factory.
+
+ *target* is the target name of the processing instruction.
+ *text* is the data of the processing instruction, or ''.
+ """
+ return self._handle_single(
+ self._pi_factory, self.insert_pis, target, text)
+
+ def _handle_single(self, factory, insert, *args):
+ elem = factory(*args)
+ if insert:
+ self._flush()
+ self._last = elem
+ if self._elem:
+ self._elem[-1].append(elem)
+ self._tail = 1
+ return elem
+
+
# also see ElementTree and TreeBuilder
class XMLParser:
"""Element structure builder for XML source data based on the expat parser.
@@ -1522,7 +1522,7 @@ class XMLParser:
"""
- def __init__(self, *, target=None, encoding=None):
+ def __init__(self, *, target=None, encoding=None):
try:
from xml.parsers import expat
except ImportError:
@@ -1546,10 +1546,10 @@ class XMLParser:
parser.StartElementHandler = self._start
if hasattr(target, 'end'):
parser.EndElementHandler = self._end
- if hasattr(target, 'start_ns'):
- parser.StartNamespaceDeclHandler = self._start_ns
- if hasattr(target, 'end_ns'):
- parser.EndNamespaceDeclHandler = self._end_ns
+ if hasattr(target, 'start_ns'):
+ parser.StartNamespaceDeclHandler = self._start_ns
+ if hasattr(target, 'end_ns'):
+ parser.EndNamespaceDeclHandler = self._end_ns
if hasattr(target, 'data'):
parser.CharacterDataHandler = target.data
# miscellaneous callbacks
@@ -1591,34 +1591,34 @@ class XMLParser:
append((event, end(tag)))
parser.EndElementHandler = handler
elif event_name == "start-ns":
- # TreeBuilder does not implement .start_ns()
- if hasattr(self.target, "start_ns"):
- def handler(prefix, uri, event=event_name, append=append,
- start_ns=self._start_ns):
- append((event, start_ns(prefix, uri)))
- else:
- def handler(prefix, uri, event=event_name, append=append):
- append((event, (prefix or '', uri or '')))
+ # TreeBuilder does not implement .start_ns()
+ if hasattr(self.target, "start_ns"):
+ def handler(prefix, uri, event=event_name, append=append,
+ start_ns=self._start_ns):
+ append((event, start_ns(prefix, uri)))
+ else:
+ def handler(prefix, uri, event=event_name, append=append):
+ append((event, (prefix or '', uri or '')))
parser.StartNamespaceDeclHandler = handler
elif event_name == "end-ns":
- # TreeBuilder does not implement .end_ns()
- if hasattr(self.target, "end_ns"):
- def handler(prefix, event=event_name, append=append,
- end_ns=self._end_ns):
- append((event, end_ns(prefix)))
- else:
- def handler(prefix, event=event_name, append=append):
- append((event, None))
+ # TreeBuilder does not implement .end_ns()
+ if hasattr(self.target, "end_ns"):
+ def handler(prefix, event=event_name, append=append,
+ end_ns=self._end_ns):
+ append((event, end_ns(prefix)))
+ else:
+ def handler(prefix, event=event_name, append=append):
+ append((event, None))
parser.EndNamespaceDeclHandler = handler
- elif event_name == 'comment':
- def handler(text, event=event_name, append=append, self=self):
- append((event, self.target.comment(text)))
- parser.CommentHandler = handler
- elif event_name == 'pi':
- def handler(pi_target, data, event=event_name, append=append,
- self=self):
- append((event, self.target.pi(pi_target, data)))
- parser.ProcessingInstructionHandler = handler
+ elif event_name == 'comment':
+ def handler(text, event=event_name, append=append, self=self):
+ append((event, self.target.comment(text)))
+ parser.CommentHandler = handler
+ elif event_name == 'pi':
+ def handler(pi_target, data, event=event_name, append=append,
+ self=self):
+ append((event, self.target.pi(pi_target, data)))
+ parser.ProcessingInstructionHandler = handler
else:
raise ValueError("unknown event %r" % event_name)
@@ -1639,12 +1639,12 @@ class XMLParser:
self._names[key] = name
return name
- def _start_ns(self, prefix, uri):
- return self.target.start_ns(prefix or '', uri or '')
-
- def _end_ns(self, prefix):
- return self.target.end_ns(prefix or '')
-
+ def _start_ns(self, prefix, uri):
+ return self.target.start_ns(prefix or '', uri or '')
+
+ def _end_ns(self, prefix):
+ return self.target.end_ns(prefix or '')
+
def _start(self, tag, attr_list):
# Handler for expat's StartElementHandler. Since ordered_attributes
# is set, the attributes are reported as a list of alternating
@@ -1706,25 +1706,25 @@ class XMLParser:
return
if hasattr(self.target, "doctype"):
self.target.doctype(name, pubid, system[1:-1])
- elif hasattr(self, "doctype"):
- warnings.warn(
- "The doctype() method of XMLParser is ignored. "
- "Define doctype() method on the TreeBuilder target.",
- RuntimeWarning)
-
+ elif hasattr(self, "doctype"):
+ warnings.warn(
+ "The doctype() method of XMLParser is ignored. "
+ "Define doctype() method on the TreeBuilder target.",
+ RuntimeWarning)
+
self._doctype = None
def feed(self, data):
"""Feed encoded data to parser."""
try:
- self.parser.Parse(data, False)
+ self.parser.Parse(data, False)
except self._error as v:
self._raiseerror(v)
def close(self):
"""Finish feeding data to parser and return element structure."""
try:
- self.parser.Parse(b"", True) # end of data
+ self.parser.Parse(b"", True) # end of data
except self._error as v:
self._raiseerror(v)
try:
@@ -1739,341 +1739,341 @@ class XMLParser:
del self.target, self._target
-# --------------------------------------------------------------------
-# C14N 2.0
-
-def canonicalize(xml_data=None, *, out=None, from_file=None, **options):
- """Convert XML to its C14N 2.0 serialised form.
-
- If *out* is provided, it must be a file or file-like object that receives
- the serialised canonical XML output (text, not bytes) through its ``.write()``
- method. To write to a file, open it in text mode with encoding "utf-8".
- If *out* is not provided, this function returns the output as text string.
-
- Either *xml_data* (an XML string) or *from_file* (a file path or
- file-like object) must be provided as input.
-
- The configuration options are the same as for the ``C14NWriterTarget``.
- """
- if xml_data is None and from_file is None:
- raise ValueError("Either 'xml_data' or 'from_file' must be provided as input")
- sio = None
- if out is None:
- sio = out = io.StringIO()
-
- parser = XMLParser(target=C14NWriterTarget(out.write, **options))
-
- if xml_data is not None:
- parser.feed(xml_data)
- parser.close()
- elif from_file is not None:
- parse(from_file, parser=parser)
-
- return sio.getvalue() if sio is not None else None
-
-
-_looks_like_prefix_name = re.compile(r'^\w+:\w+$', re.UNICODE).match
-
-
-class C14NWriterTarget:
- """
- Canonicalization writer target for the XMLParser.
-
- Serialises parse events to XML C14N 2.0.
-
- The *write* function is used for writing out the resulting data stream
- as text (not bytes). To write to a file, open it in text mode with encoding
- "utf-8" and pass its ``.write`` method.
-
- Configuration options:
-
- - *with_comments*: set to true to include comments
- - *strip_text*: set to true to strip whitespace before and after text content
- - *rewrite_prefixes*: set to true to replace namespace prefixes by "n{number}"
- - *qname_aware_tags*: a set of qname aware tag names in which prefixes
- should be replaced in text content
- - *qname_aware_attrs*: a set of qname aware attribute names in which prefixes
- should be replaced in text content
- - *exclude_attrs*: a set of attribute names that should not be serialised
- - *exclude_tags*: a set of tag names that should not be serialised
- """
- def __init__(self, write, *,
- with_comments=False, strip_text=False, rewrite_prefixes=False,
- qname_aware_tags=None, qname_aware_attrs=None,
- exclude_attrs=None, exclude_tags=None):
- self._write = write
- self._data = []
- self._with_comments = with_comments
- self._strip_text = strip_text
- self._exclude_attrs = set(exclude_attrs) if exclude_attrs else None
- self._exclude_tags = set(exclude_tags) if exclude_tags else None
-
- self._rewrite_prefixes = rewrite_prefixes
- if qname_aware_tags:
- self._qname_aware_tags = set(qname_aware_tags)
- else:
- self._qname_aware_tags = None
- if qname_aware_attrs:
- self._find_qname_aware_attrs = set(qname_aware_attrs).intersection
- else:
- self._find_qname_aware_attrs = None
-
- # Stack with globally and newly declared namespaces as (uri, prefix) pairs.
- self._declared_ns_stack = [[
- ("http://www.w3.org/XML/1998/namespace", "xml"),
- ]]
- # Stack with user declared namespace prefixes as (uri, prefix) pairs.
- self._ns_stack = []
- if not rewrite_prefixes:
- self._ns_stack.append(list(_namespace_map.items()))
- self._ns_stack.append([])
- self._prefix_map = {}
- self._preserve_space = [False]
- self._pending_start = None
- self._root_seen = False
- self._root_done = False
- self._ignored_depth = 0
-
- def _iter_namespaces(self, ns_stack, _reversed=reversed):
- for namespaces in _reversed(ns_stack):
- if namespaces: # almost no element declares new namespaces
- yield from namespaces
-
- def _resolve_prefix_name(self, prefixed_name):
- prefix, name = prefixed_name.split(':', 1)
- for uri, p in self._iter_namespaces(self._ns_stack):
- if p == prefix:
- return f'{{{uri}}}{name}'
- raise ValueError(f'Prefix {prefix} of QName "{prefixed_name}" is not declared in scope')
-
- def _qname(self, qname, uri=None):
- if uri is None:
- uri, tag = qname[1:].rsplit('}', 1) if qname[:1] == '{' else ('', qname)
- else:
- tag = qname
-
- prefixes_seen = set()
- for u, prefix in self._iter_namespaces(self._declared_ns_stack):
- if u == uri and prefix not in prefixes_seen:
- return f'{prefix}:{tag}' if prefix else tag, tag, uri
- prefixes_seen.add(prefix)
-
- # Not declared yet => add new declaration.
- if self._rewrite_prefixes:
- if uri in self._prefix_map:
- prefix = self._prefix_map[uri]
- else:
- prefix = self._prefix_map[uri] = f'n{len(self._prefix_map)}'
- self._declared_ns_stack[-1].append((uri, prefix))
- return f'{prefix}:{tag}', tag, uri
-
- if not uri and '' not in prefixes_seen:
- # No default namespace declared => no prefix needed.
- return tag, tag, uri
-
- for u, prefix in self._iter_namespaces(self._ns_stack):
- if u == uri:
- self._declared_ns_stack[-1].append((uri, prefix))
- return f'{prefix}:{tag}' if prefix else tag, tag, uri
-
- if not uri:
- # As soon as a default namespace is defined,
- # anything that has no namespace (and thus, no prefix) goes there.
- return tag, tag, uri
-
- raise ValueError(f'Namespace "{uri}" is not declared in scope')
-
- def data(self, data):
- if not self._ignored_depth:
- self._data.append(data)
-
- def _flush(self, _join_text=''.join):
- data = _join_text(self._data)
- del self._data[:]
- if self._strip_text and not self._preserve_space[-1]:
- data = data.strip()
- if self._pending_start is not None:
- args, self._pending_start = self._pending_start, None
- qname_text = data if data and _looks_like_prefix_name(data) else None
- self._start(*args, qname_text)
- if qname_text is not None:
- return
- if data and self._root_seen:
- self._write(_escape_cdata_c14n(data))
-
- def start_ns(self, prefix, uri):
- if self._ignored_depth:
- return
- # we may have to resolve qnames in text content
- if self._data:
- self._flush()
- self._ns_stack[-1].append((uri, prefix))
-
- def start(self, tag, attrs):
- if self._exclude_tags is not None and (
- self._ignored_depth or tag in self._exclude_tags):
- self._ignored_depth += 1
- return
- if self._data:
- self._flush()
-
- new_namespaces = []
- self._declared_ns_stack.append(new_namespaces)
-
- if self._qname_aware_tags is not None and tag in self._qname_aware_tags:
- # Need to parse text first to see if it requires a prefix declaration.
- self._pending_start = (tag, attrs, new_namespaces)
- return
- self._start(tag, attrs, new_namespaces)
-
- def _start(self, tag, attrs, new_namespaces, qname_text=None):
- if self._exclude_attrs is not None and attrs:
- attrs = {k: v for k, v in attrs.items() if k not in self._exclude_attrs}
-
- qnames = {tag, *attrs}
- resolved_names = {}
-
- # Resolve prefixes in attribute and tag text.
- if qname_text is not None:
- qname = resolved_names[qname_text] = self._resolve_prefix_name(qname_text)
- qnames.add(qname)
- if self._find_qname_aware_attrs is not None and attrs:
- qattrs = self._find_qname_aware_attrs(attrs)
- if qattrs:
- for attr_name in qattrs:
- value = attrs[attr_name]
- if _looks_like_prefix_name(value):
- qname = resolved_names[value] = self._resolve_prefix_name(value)
- qnames.add(qname)
- else:
- qattrs = None
- else:
- qattrs = None
-
- # Assign prefixes in lexicographical order of used URIs.
- parse_qname = self._qname
- parsed_qnames = {n: parse_qname(n) for n in sorted(
- qnames, key=lambda n: n.split('}', 1))}
-
- # Write namespace declarations in prefix order ...
- if new_namespaces:
- attr_list = [
- ('xmlns:' + prefix if prefix else 'xmlns', uri)
- for uri, prefix in new_namespaces
- ]
- attr_list.sort()
- else:
- # almost always empty
- attr_list = []
-
- # ... followed by attributes in URI+name order
- if attrs:
- for k, v in sorted(attrs.items()):
- if qattrs is not None and k in qattrs and v in resolved_names:
- v = parsed_qnames[resolved_names[v]][0]
- attr_qname, attr_name, uri = parsed_qnames[k]
- # No prefix for attributes in default ('') namespace.
- attr_list.append((attr_qname if uri else attr_name, v))
-
- # Honour xml:space attributes.
- space_behaviour = attrs.get('{http://www.w3.org/XML/1998/namespace}space')
- self._preserve_space.append(
- space_behaviour == 'preserve' if space_behaviour
- else self._preserve_space[-1])
-
- # Write the tag.
- write = self._write
- write('<' + parsed_qnames[tag][0])
- if attr_list:
- write(''.join([f' {k}="{_escape_attrib_c14n(v)}"' for k, v in attr_list]))
- write('>')
-
- # Write the resolved qname text content.
- if qname_text is not None:
- write(_escape_cdata_c14n(parsed_qnames[resolved_names[qname_text]][0]))
-
- self._root_seen = True
- self._ns_stack.append([])
-
- def end(self, tag):
- if self._ignored_depth:
- self._ignored_depth -= 1
- return
- if self._data:
- self._flush()
- self._write(f'</{self._qname(tag)[0]}>')
- self._preserve_space.pop()
- self._root_done = len(self._preserve_space) == 1
- self._declared_ns_stack.pop()
- self._ns_stack.pop()
-
- def comment(self, text):
- if not self._with_comments:
- return
- if self._ignored_depth:
- return
- if self._root_done:
- self._write('\n')
- elif self._root_seen and self._data:
- self._flush()
- self._write(f'<!--{_escape_cdata_c14n(text)}-->')
- if not self._root_seen:
- self._write('\n')
-
- def pi(self, target, data):
- if self._ignored_depth:
- return
- if self._root_done:
- self._write('\n')
- elif self._root_seen and self._data:
- self._flush()
- self._write(
- f'<?{target} {_escape_cdata_c14n(data)}?>' if data else f'<?{target}?>')
- if not self._root_seen:
- self._write('\n')
-
-
-def _escape_cdata_c14n(text):
- # escape character data
- try:
- # it's worth avoiding do-nothing calls for strings that are
- # shorter than 500 character, or so. assume that's, by far,
- # the most common case in most applications.
- if '&' in text:
- text = text.replace('&', '&amp;')
- if '<' in text:
- text = text.replace('<', '&lt;')
- if '>' in text:
- text = text.replace('>', '&gt;')
- if '\r' in text:
- text = text.replace('\r', '&#xD;')
- return text
- except (TypeError, AttributeError):
- _raise_serialization_error(text)
-
-
-def _escape_attrib_c14n(text):
- # escape attribute value
- try:
- if '&' in text:
- text = text.replace('&', '&amp;')
- if '<' in text:
- text = text.replace('<', '&lt;')
- if '"' in text:
- text = text.replace('"', '&quot;')
- if '\t' in text:
- text = text.replace('\t', '&#x9;')
- if '\n' in text:
- text = text.replace('\n', '&#xA;')
- if '\r' in text:
- text = text.replace('\r', '&#xD;')
- return text
- except (TypeError, AttributeError):
- _raise_serialization_error(text)
-
-
-# --------------------------------------------------------------------
-
+# --------------------------------------------------------------------
+# C14N 2.0
+
+def canonicalize(xml_data=None, *, out=None, from_file=None, **options):
+ """Convert XML to its C14N 2.0 serialised form.
+
+ If *out* is provided, it must be a file or file-like object that receives
+ the serialised canonical XML output (text, not bytes) through its ``.write()``
+ method. To write to a file, open it in text mode with encoding "utf-8".
+ If *out* is not provided, this function returns the output as text string.
+
+ Either *xml_data* (an XML string) or *from_file* (a file path or
+ file-like object) must be provided as input.
+
+ The configuration options are the same as for the ``C14NWriterTarget``.
+ """
+ if xml_data is None and from_file is None:
+ raise ValueError("Either 'xml_data' or 'from_file' must be provided as input")
+ sio = None
+ if out is None:
+ sio = out = io.StringIO()
+
+ parser = XMLParser(target=C14NWriterTarget(out.write, **options))
+
+ if xml_data is not None:
+ parser.feed(xml_data)
+ parser.close()
+ elif from_file is not None:
+ parse(from_file, parser=parser)
+
+ return sio.getvalue() if sio is not None else None
+
+
+_looks_like_prefix_name = re.compile(r'^\w+:\w+$', re.UNICODE).match
+
+
+class C14NWriterTarget:
+ """
+ Canonicalization writer target for the XMLParser.
+
+ Serialises parse events to XML C14N 2.0.
+
+ The *write* function is used for writing out the resulting data stream
+ as text (not bytes). To write to a file, open it in text mode with encoding
+ "utf-8" and pass its ``.write`` method.
+
+ Configuration options:
+
+ - *with_comments*: set to true to include comments
+ - *strip_text*: set to true to strip whitespace before and after text content
+ - *rewrite_prefixes*: set to true to replace namespace prefixes by "n{number}"
+ - *qname_aware_tags*: a set of qname aware tag names in which prefixes
+ should be replaced in text content
+ - *qname_aware_attrs*: a set of qname aware attribute names in which prefixes
+ should be replaced in text content
+ - *exclude_attrs*: a set of attribute names that should not be serialised
+ - *exclude_tags*: a set of tag names that should not be serialised
+ """
+ def __init__(self, write, *,
+ with_comments=False, strip_text=False, rewrite_prefixes=False,
+ qname_aware_tags=None, qname_aware_attrs=None,
+ exclude_attrs=None, exclude_tags=None):
+ self._write = write
+ self._data = []
+ self._with_comments = with_comments
+ self._strip_text = strip_text
+ self._exclude_attrs = set(exclude_attrs) if exclude_attrs else None
+ self._exclude_tags = set(exclude_tags) if exclude_tags else None
+
+ self._rewrite_prefixes = rewrite_prefixes
+ if qname_aware_tags:
+ self._qname_aware_tags = set(qname_aware_tags)
+ else:
+ self._qname_aware_tags = None
+ if qname_aware_attrs:
+ self._find_qname_aware_attrs = set(qname_aware_attrs).intersection
+ else:
+ self._find_qname_aware_attrs = None
+
+ # Stack with globally and newly declared namespaces as (uri, prefix) pairs.
+ self._declared_ns_stack = [[
+ ("http://www.w3.org/XML/1998/namespace", "xml"),
+ ]]
+ # Stack with user declared namespace prefixes as (uri, prefix) pairs.
+ self._ns_stack = []
+ if not rewrite_prefixes:
+ self._ns_stack.append(list(_namespace_map.items()))
+ self._ns_stack.append([])
+ self._prefix_map = {}
+ self._preserve_space = [False]
+ self._pending_start = None
+ self._root_seen = False
+ self._root_done = False
+ self._ignored_depth = 0
+
+ def _iter_namespaces(self, ns_stack, _reversed=reversed):
+ for namespaces in _reversed(ns_stack):
+ if namespaces: # almost no element declares new namespaces
+ yield from namespaces
+
+ def _resolve_prefix_name(self, prefixed_name):
+ prefix, name = prefixed_name.split(':', 1)
+ for uri, p in self._iter_namespaces(self._ns_stack):
+ if p == prefix:
+ return f'{{{uri}}}{name}'
+ raise ValueError(f'Prefix {prefix} of QName "{prefixed_name}" is not declared in scope')
+
+ def _qname(self, qname, uri=None):
+ if uri is None:
+ uri, tag = qname[1:].rsplit('}', 1) if qname[:1] == '{' else ('', qname)
+ else:
+ tag = qname
+
+ prefixes_seen = set()
+ for u, prefix in self._iter_namespaces(self._declared_ns_stack):
+ if u == uri and prefix not in prefixes_seen:
+ return f'{prefix}:{tag}' if prefix else tag, tag, uri
+ prefixes_seen.add(prefix)
+
+ # Not declared yet => add new declaration.
+ if self._rewrite_prefixes:
+ if uri in self._prefix_map:
+ prefix = self._prefix_map[uri]
+ else:
+ prefix = self._prefix_map[uri] = f'n{len(self._prefix_map)}'
+ self._declared_ns_stack[-1].append((uri, prefix))
+ return f'{prefix}:{tag}', tag, uri
+
+ if not uri and '' not in prefixes_seen:
+ # No default namespace declared => no prefix needed.
+ return tag, tag, uri
+
+ for u, prefix in self._iter_namespaces(self._ns_stack):
+ if u == uri:
+ self._declared_ns_stack[-1].append((uri, prefix))
+ return f'{prefix}:{tag}' if prefix else tag, tag, uri
+
+ if not uri:
+ # As soon as a default namespace is defined,
+ # anything that has no namespace (and thus, no prefix) goes there.
+ return tag, tag, uri
+
+ raise ValueError(f'Namespace "{uri}" is not declared in scope')
+
+ def data(self, data):
+ if not self._ignored_depth:
+ self._data.append(data)
+
+ def _flush(self, _join_text=''.join):
+ data = _join_text(self._data)
+ del self._data[:]
+ if self._strip_text and not self._preserve_space[-1]:
+ data = data.strip()
+ if self._pending_start is not None:
+ args, self._pending_start = self._pending_start, None
+ qname_text = data if data and _looks_like_prefix_name(data) else None
+ self._start(*args, qname_text)
+ if qname_text is not None:
+ return
+ if data and self._root_seen:
+ self._write(_escape_cdata_c14n(data))
+
+ def start_ns(self, prefix, uri):
+ if self._ignored_depth:
+ return
+ # we may have to resolve qnames in text content
+ if self._data:
+ self._flush()
+ self._ns_stack[-1].append((uri, prefix))
+
+ def start(self, tag, attrs):
+ if self._exclude_tags is not None and (
+ self._ignored_depth or tag in self._exclude_tags):
+ self._ignored_depth += 1
+ return
+ if self._data:
+ self._flush()
+
+ new_namespaces = []
+ self._declared_ns_stack.append(new_namespaces)
+
+ if self._qname_aware_tags is not None and tag in self._qname_aware_tags:
+ # Need to parse text first to see if it requires a prefix declaration.
+ self._pending_start = (tag, attrs, new_namespaces)
+ return
+ self._start(tag, attrs, new_namespaces)
+
+ def _start(self, tag, attrs, new_namespaces, qname_text=None):
+ if self._exclude_attrs is not None and attrs:
+ attrs = {k: v for k, v in attrs.items() if k not in self._exclude_attrs}
+
+ qnames = {tag, *attrs}
+ resolved_names = {}
+
+ # Resolve prefixes in attribute and tag text.
+ if qname_text is not None:
+ qname = resolved_names[qname_text] = self._resolve_prefix_name(qname_text)
+ qnames.add(qname)
+ if self._find_qname_aware_attrs is not None and attrs:
+ qattrs = self._find_qname_aware_attrs(attrs)
+ if qattrs:
+ for attr_name in qattrs:
+ value = attrs[attr_name]
+ if _looks_like_prefix_name(value):
+ qname = resolved_names[value] = self._resolve_prefix_name(value)
+ qnames.add(qname)
+ else:
+ qattrs = None
+ else:
+ qattrs = None
+
+ # Assign prefixes in lexicographical order of used URIs.
+ parse_qname = self._qname
+ parsed_qnames = {n: parse_qname(n) for n in sorted(
+ qnames, key=lambda n: n.split('}', 1))}
+
+ # Write namespace declarations in prefix order ...
+ if new_namespaces:
+ attr_list = [
+ ('xmlns:' + prefix if prefix else 'xmlns', uri)
+ for uri, prefix in new_namespaces
+ ]
+ attr_list.sort()
+ else:
+ # almost always empty
+ attr_list = []
+
+ # ... followed by attributes in URI+name order
+ if attrs:
+ for k, v in sorted(attrs.items()):
+ if qattrs is not None and k in qattrs and v in resolved_names:
+ v = parsed_qnames[resolved_names[v]][0]
+ attr_qname, attr_name, uri = parsed_qnames[k]
+ # No prefix for attributes in default ('') namespace.
+ attr_list.append((attr_qname if uri else attr_name, v))
+
+ # Honour xml:space attributes.
+ space_behaviour = attrs.get('{http://www.w3.org/XML/1998/namespace}space')
+ self._preserve_space.append(
+ space_behaviour == 'preserve' if space_behaviour
+ else self._preserve_space[-1])
+
+ # Write the tag.
+ write = self._write
+ write('<' + parsed_qnames[tag][0])
+ if attr_list:
+ write(''.join([f' {k}="{_escape_attrib_c14n(v)}"' for k, v in attr_list]))
+ write('>')
+
+ # Write the resolved qname text content.
+ if qname_text is not None:
+ write(_escape_cdata_c14n(parsed_qnames[resolved_names[qname_text]][0]))
+
+ self._root_seen = True
+ self._ns_stack.append([])
+
+ def end(self, tag):
+ if self._ignored_depth:
+ self._ignored_depth -= 1
+ return
+ if self._data:
+ self._flush()
+ self._write(f'</{self._qname(tag)[0]}>')
+ self._preserve_space.pop()
+ self._root_done = len(self._preserve_space) == 1
+ self._declared_ns_stack.pop()
+ self._ns_stack.pop()
+
+ def comment(self, text):
+ if not self._with_comments:
+ return
+ if self._ignored_depth:
+ return
+ if self._root_done:
+ self._write('\n')
+ elif self._root_seen and self._data:
+ self._flush()
+ self._write(f'<!--{_escape_cdata_c14n(text)}-->')
+ if not self._root_seen:
+ self._write('\n')
+
+ def pi(self, target, data):
+ if self._ignored_depth:
+ return
+ if self._root_done:
+ self._write('\n')
+ elif self._root_seen and self._data:
+ self._flush()
+ self._write(
+ f'<?{target} {_escape_cdata_c14n(data)}?>' if data else f'<?{target}?>')
+ if not self._root_seen:
+ self._write('\n')
+
+
+def _escape_cdata_c14n(text):
+ # escape character data
+ try:
+ # it's worth avoiding do-nothing calls for strings that are
+ # shorter than 500 character, or so. assume that's, by far,
+ # the most common case in most applications.
+ if '&' in text:
+ text = text.replace('&', '&amp;')
+ if '<' in text:
+ text = text.replace('<', '&lt;')
+ if '>' in text:
+ text = text.replace('>', '&gt;')
+ if '\r' in text:
+ text = text.replace('\r', '&#xD;')
+ return text
+ except (TypeError, AttributeError):
+ _raise_serialization_error(text)
+
+
+def _escape_attrib_c14n(text):
+ # escape attribute value
+ try:
+ if '&' in text:
+ text = text.replace('&', '&amp;')
+ if '<' in text:
+ text = text.replace('<', '&lt;')
+ if '"' in text:
+ text = text.replace('"', '&quot;')
+ if '\t' in text:
+ text = text.replace('\t', '&#x9;')
+ if '\n' in text:
+ text = text.replace('\n', '&#xA;')
+ if '\r' in text:
+ text = text.replace('\r', '&#xD;')
+ return text
+ except (TypeError, AttributeError):
+ _raise_serialization_error(text)
+
+
+# --------------------------------------------------------------------
+
# Import the C accelerators
try:
# Element is going to be shadowed by the C implementation. We need to keep
@@ -2081,10 +2081,10 @@ try:
# (see tests)
_Element_Py = Element
- # Element, SubElement, ParseError, TreeBuilder, XMLParser, _set_factories
+ # Element, SubElement, ParseError, TreeBuilder, XMLParser, _set_factories
from _elementtree import *
- from _elementtree import _set_factories
+ from _elementtree import _set_factories
except ImportError:
pass
-else:
- _set_factories(Comment, ProcessingInstruction)
+else:
+ _set_factories(Comment, ProcessingInstruction)
diff --git a/contrib/tools/python3/src/Lib/xml/etree/__init__.py b/contrib/tools/python3/src/Lib/xml/etree/__init__.py
index e2ec53421d..998d37c01f 100644
--- a/contrib/tools/python3/src/Lib/xml/etree/__init__.py
+++ b/contrib/tools/python3/src/Lib/xml/etree/__init__.py
@@ -30,4 +30,4 @@
# --------------------------------------------------------------------
# Licensed to PSF under a Contributor Agreement.
-# See https://www.python.org/psf/license for licensing details.
+# See https://www.python.org/psf/license for licensing details.
diff --git a/contrib/tools/python3/src/Lib/xml/sax/__init__.py b/contrib/tools/python3/src/Lib/xml/sax/__init__.py
index 17b75879eb..c29223cf40 100644
--- a/contrib/tools/python3/src/Lib/xml/sax/__init__.py
+++ b/contrib/tools/python3/src/Lib/xml/sax/__init__.py
@@ -67,18 +67,18 @@ if sys.platform[:4] == "java" and sys.registry.containsKey(_key):
default_parser_list = sys.registry.getProperty(_key).split(",")
-def make_parser(parser_list=()):
+def make_parser(parser_list=()):
"""Creates and returns a SAX parser.
Creates the first parser it is able to instantiate of the ones
- given in the iterable created by chaining parser_list and
- default_parser_list. The iterables must contain the names of Python
+ given in the iterable created by chaining parser_list and
+ default_parser_list. The iterables must contain the names of Python
modules containing both a SAX parser and a create_parser function."""
- for parser_name in list(parser_list) + default_parser_list:
+ for parser_name in list(parser_list) + default_parser_list:
try:
return _create_parser(parser_name)
- except ImportError:
+ except ImportError:
import sys
if parser_name in sys.modules:
# The parser module was found, but importing it
diff --git a/contrib/tools/python3/src/Lib/xml/sax/expatreader.py b/contrib/tools/python3/src/Lib/xml/sax/expatreader.py
index e334ac9fea..f1220de45d 100644
--- a/contrib/tools/python3/src/Lib/xml/sax/expatreader.py
+++ b/contrib/tools/python3/src/Lib/xml/sax/expatreader.py
@@ -93,7 +93,7 @@ class ExpatParser(xmlreader.IncrementalParser, xmlreader.Locator):
self._parser = None
self._namespaces = namespaceHandling
self._lex_handler_prop = None
- self._parsing = False
+ self._parsing = False
self._entity_stack = []
self._external_ges = 0
self._interning = None
@@ -203,10 +203,10 @@ class ExpatParser(xmlreader.IncrementalParser, xmlreader.Locator):
# IncrementalParser methods
- def feed(self, data, isFinal=False):
+ def feed(self, data, isFinal=False):
if not self._parsing:
self.reset()
- self._parsing = True
+ self._parsing = True
self._cont_handler.startDocument()
try:
@@ -237,13 +237,13 @@ class ExpatParser(xmlreader.IncrementalParser, xmlreader.Locator):
# If we are completing an external entity, do nothing here
return
try:
- self.feed(b"", isFinal=True)
+ self.feed(b"", isFinal=True)
self._cont_handler.endDocument()
- self._parsing = False
+ self._parsing = False
# break cycle created by expat handlers pointing to our methods
self._parser = None
finally:
- self._parsing = False
+ self._parsing = False
if self._parser is not None:
# Keep ErrorColumnNumber and ErrorLineNumber after closing.
parser = _ClosedParser()
@@ -307,7 +307,7 @@ class ExpatParser(xmlreader.IncrementalParser, xmlreader.Locator):
self._parser.SetParamEntityParsing(
expat.XML_PARAM_ENTITY_PARSING_UNLESS_STANDALONE)
- self._parsing = False
+ self._parsing = False
self._entity_stack = []
# Locator methods
diff --git a/contrib/tools/python3/src/Lib/xml/sax/saxutils.py b/contrib/tools/python3/src/Lib/xml/sax/saxutils.py
index c1612ea1ce..9e04b5daf7 100644
--- a/contrib/tools/python3/src/Lib/xml/sax/saxutils.py
+++ b/contrib/tools/python3/src/Lib/xml/sax/saxutils.py
@@ -56,7 +56,7 @@ def quoteattr(data, entities={}):
the optional entities parameter. The keys and values must all be
strings; each key will be replaced with its corresponding value.
"""
- entities = {**entities, '\n': '&#10;', '\r': '&#13;', '\t':'&#9;'}
+ entities = {**entities, '\n': '&#10;', '\r': '&#13;', '\t':'&#9;'}
data = escape(data, entities)
if '"' in data:
if "'" in data:
@@ -339,8 +339,8 @@ def prepare_input_source(source, base=""):
"""This function takes an InputSource and an optional base URL and
returns a fully resolved InputSource object ready for reading."""
- if isinstance(source, os.PathLike):
- source = os.fspath(source)
+ if isinstance(source, os.PathLike):
+ source = os.fspath(source)
if isinstance(source, str):
source = xmlreader.InputSource(source)
elif hasattr(source, "read"):
diff --git a/contrib/tools/python3/src/Lib/xmlrpc/client.py b/contrib/tools/python3/src/Lib/xmlrpc/client.py
index a614cef6ab..66edf5d388 100644
--- a/contrib/tools/python3/src/Lib/xmlrpc/client.py
+++ b/contrib/tools/python3/src/Lib/xmlrpc/client.py
@@ -186,7 +186,7 @@ INTERNAL_ERROR = -32603
class Error(Exception):
"""Base class for client errors."""
- __str__ = object.__str__
+ __str__ = object.__str__
##
# Indicates an HTTP-level protocol error. This is raised by the HTTP
@@ -264,22 +264,22 @@ boolean = Boolean = bool
# Issue #13305: different format codes across platforms
_day0 = datetime(1, 1, 1)
-def _try(fmt):
- try:
- return _day0.strftime(fmt) == '0001'
- except ValueError:
- return False
-if _try('%Y'): # Mac OS X
+def _try(fmt):
+ try:
+ return _day0.strftime(fmt) == '0001'
+ except ValueError:
+ return False
+if _try('%Y'): # Mac OS X
def _iso8601_format(value):
return value.strftime("%Y%m%dT%H:%M:%S")
-elif _try('%4Y'): # Linux
+elif _try('%4Y'): # Linux
def _iso8601_format(value):
return value.strftime("%4Y%m%dT%H:%M:%S")
else:
def _iso8601_format(value):
return value.strftime("%Y%m%dT%H:%M:%S").zfill(17)
del _day0
-del _try
+del _try
def _strftime(value):
@@ -319,38 +319,38 @@ class DateTime:
s = self.timetuple()
o = other.timetuple()
else:
- s = self
- o = NotImplemented
+ s = self
+ o = NotImplemented
return s, o
def __lt__(self, other):
s, o = self.make_comparable(other)
- if o is NotImplemented:
- return NotImplemented
+ if o is NotImplemented:
+ return NotImplemented
return s < o
def __le__(self, other):
s, o = self.make_comparable(other)
- if o is NotImplemented:
- return NotImplemented
+ if o is NotImplemented:
+ return NotImplemented
return s <= o
def __gt__(self, other):
s, o = self.make_comparable(other)
- if o is NotImplemented:
- return NotImplemented
+ if o is NotImplemented:
+ return NotImplemented
return s > o
def __ge__(self, other):
s, o = self.make_comparable(other)
- if o is NotImplemented:
- return NotImplemented
+ if o is NotImplemented:
+ return NotImplemented
return s >= o
def __eq__(self, other):
s, o = self.make_comparable(other)
- if o is NotImplemented:
- return NotImplemented
+ if o is NotImplemented:
+ return NotImplemented
return s == o
def timetuple(self):
@@ -448,7 +448,7 @@ class ExpatParser:
target.xml(encoding, None)
def feed(self, data):
- self._parser.Parse(data, False)
+ self._parser.Parse(data, False)
def close(self):
try:
@@ -1141,12 +1141,12 @@ class Transport:
# that they can decode such a request
encode_threshold = None #None = don't encode
- def __init__(self, use_datetime=False, use_builtin_types=False,
- *, headers=()):
+ def __init__(self, use_datetime=False, use_builtin_types=False,
+ *, headers=()):
self._use_datetime = use_datetime
self._use_builtin_types = use_builtin_types
self._connection = (None, None)
- self._headers = list(headers)
+ self._headers = list(headers)
self._extra_headers = []
##
@@ -1226,7 +1226,7 @@ class Transport:
if isinstance(host, tuple):
host, x509 = host
- auth, host = urllib.parse._splituser(host)
+ auth, host = urllib.parse._splituser(host)
if auth:
auth = urllib.parse.unquote_to_bytes(auth)
@@ -1277,7 +1277,7 @@ class Transport:
def send_request(self, host, handler, request_body, debug):
connection = self.make_connection(host)
- headers = self._headers + self._extra_headers
+ headers = self._headers + self._extra_headers
if debug:
connection.set_debuglevel(1)
if self.accept_gzip_encoding and gzip:
@@ -1359,11 +1359,11 @@ class Transport:
class SafeTransport(Transport):
"""Handles an HTTPS transaction to an XML-RPC server."""
- def __init__(self, use_datetime=False, use_builtin_types=False,
- *, headers=(), context=None):
- super().__init__(use_datetime=use_datetime,
- use_builtin_types=use_builtin_types,
- headers=headers)
+ def __init__(self, use_datetime=False, use_builtin_types=False,
+ *, headers=(), context=None):
+ super().__init__(use_datetime=use_datetime,
+ use_builtin_types=use_builtin_types,
+ headers=headers)
self.context = context
# FIXME: mostly untested
@@ -1423,20 +1423,20 @@ class ServerProxy:
def __init__(self, uri, transport=None, encoding=None, verbose=False,
allow_none=False, use_datetime=False, use_builtin_types=False,
- *, headers=(), context=None):
+ *, headers=(), context=None):
# establish a "logical" server connection
# get the url
- p = urllib.parse.urlsplit(uri)
- if p.scheme not in ("http", "https"):
+ p = urllib.parse.urlsplit(uri)
+ if p.scheme not in ("http", "https"):
raise OSError("unsupported XML-RPC protocol")
- self.__host = p.netloc
- self.__handler = urllib.parse.urlunsplit(["", "", *p[2:]])
- if not self.__handler:
- self.__handler = "/RPC2"
+ self.__host = p.netloc
+ self.__handler = urllib.parse.urlunsplit(["", "", *p[2:]])
+ if not self.__handler:
+ self.__handler = "/RPC2"
if transport is None:
- if p.scheme == "https":
+ if p.scheme == "https":
handler = SafeTransport
extra_kwargs = {"context": context}
else:
@@ -1444,7 +1444,7 @@ class ServerProxy:
extra_kwargs = {}
transport = handler(use_datetime=use_datetime,
use_builtin_types=use_builtin_types,
- headers=headers,
+ headers=headers,
**extra_kwargs)
self.__transport = transport
diff --git a/contrib/tools/python3/src/Lib/xmlrpc/server.py b/contrib/tools/python3/src/Lib/xmlrpc/server.py
index 69a260f5b1..35e88f5fd5 100644
--- a/contrib/tools/python3/src/Lib/xmlrpc/server.py
+++ b/contrib/tools/python3/src/Lib/xmlrpc/server.py
@@ -108,7 +108,7 @@ from xmlrpc.client import Fault, dumps, loads, gzip_encode, gzip_decode
from http.server import BaseHTTPRequestHandler
from functools import partial
from inspect import signature
-import html
+import html
import http.server
import socketserver
import sys
@@ -732,7 +732,7 @@ class ServerHTMLDoc(pydoc.HTMLDoc):
# hyperlinking of arbitrary strings being used as method
# names. Only methods with names consisting of word characters
# and '.'s are hyperlinked.
- pattern = re.compile(r'\b((http|https|ftp)://\S+[\w/]|'
+ pattern = re.compile(r'\b((http|https|ftp)://\S+[\w/]|'
r'RFC[- ]?(\d+)|'
r'PEP[- ]?(\d+)|'
r'(self\.)?((?:\w|\.)+))\b')
@@ -750,7 +750,7 @@ class ServerHTMLDoc(pydoc.HTMLDoc):
url = 'http://www.rfc-editor.org/rfc/rfc%d.txt' % int(rfc)
results.append('<a href="%s">%s</a>' % (url, escape(all)))
elif pep:
- url = 'https://www.python.org/dev/peps/pep-%04d/' % int(pep)
+ url = 'https://www.python.org/dev/peps/pep-%04d/' % int(pep)
results.append('<a href="%s">%s</a>' % (url, escape(all)))
elif text[end:end+1] == '(':
results.append(self.namelink(name, methods, funcs, classes))
@@ -895,7 +895,7 @@ class XMLRPCDocGenerator:
methods
)
- return documenter.page(html.escape(self.server_title), documentation)
+ return documenter.page(html.escape(self.server_title), documentation)
class DocXMLRPCRequestHandler(SimpleXMLRPCRequestHandler):
"""XML-RPC and documentation request handler class.
diff --git a/contrib/tools/python3/src/Lib/zipfile.py b/contrib/tools/python3/src/Lib/zipfile.py
index 816f8582bb..23e1605f4e 100644
--- a/contrib/tools/python3/src/Lib/zipfile.py
+++ b/contrib/tools/python3/src/Lib/zipfile.py
@@ -3,19 +3,19 @@ Read and write ZIP files.
XXX references to utf-8 need further investigation.
"""
-import binascii
-import importlib.util
+import binascii
+import importlib.util
import io
-import itertools
+import itertools
import os
-import posixpath
-import shutil
+import posixpath
+import shutil
import stat
import struct
-import sys
+import sys
import threading
-import time
-import contextlib
+import time
+import contextlib
try:
import zlib # We may need its compression method
@@ -36,8 +36,8 @@ except ImportError:
__all__ = ["BadZipFile", "BadZipfile", "error",
"ZIP_STORED", "ZIP_DEFLATED", "ZIP_BZIP2", "ZIP_LZMA",
- "is_zipfile", "ZipInfo", "ZipFile", "PyZipFile", "LargeZipFile",
- "Path"]
+ "is_zipfile", "ZipInfo", "ZipFile", "PyZipFile", "LargeZipFile",
+ "Path"]
class BadZipFile(Exception):
pass
@@ -228,7 +228,7 @@ def _EndRecData64(fpin, offset, endrec):
if sig != stringEndArchive64Locator:
return endrec
- if diskno != 0 or disks > 1:
+ if diskno != 0 or disks > 1:
raise BadZipFile("zipfiles that span multiple disks are not supported")
# Assume no 'zip64 extensible data'
@@ -377,8 +377,8 @@ class ZipInfo (object):
self.volume = 0 # Volume number of file header
self.internal_attr = 0 # Internal attributes
self.external_attr = 0 # External file attributes
- self.compress_size = 0 # Size of the compressed file
- self.file_size = 0 # Size of the uncompressed file
+ self.compress_size = 0 # Size of the compressed file
+ self.file_size = 0 # Size of the uncompressed file
# Other attributes are set by class ZipFile:
# header_offset Byte offset to the file header
# CRC CRC-32 of the uncompressed file
@@ -466,28 +466,28 @@ class ZipInfo (object):
if ln+4 > len(extra):
raise BadZipFile("Corrupt extra field %04x (size=%d)" % (tp, ln))
if tp == 0x0001:
- data = extra[4:ln+4]
+ data = extra[4:ln+4]
# ZIP64 extension (large files and/or large archives)
- try:
- if self.file_size in (0xFFFF_FFFF_FFFF_FFFF, 0xFFFF_FFFF):
- field = "File size"
- self.file_size, = unpack('<Q', data[:8])
- data = data[8:]
- if self.compress_size == 0xFFFF_FFFF:
- field = "Compress size"
- self.compress_size, = unpack('<Q', data[:8])
- data = data[8:]
- if self.header_offset == 0xFFFF_FFFF:
- field = "Header offset"
- self.header_offset, = unpack('<Q', data[:8])
- except struct.error:
- raise BadZipFile(f"Corrupt zip64 extra field. "
- f"{field} not found.") from None
+ try:
+ if self.file_size in (0xFFFF_FFFF_FFFF_FFFF, 0xFFFF_FFFF):
+ field = "File size"
+ self.file_size, = unpack('<Q', data[:8])
+ data = data[8:]
+ if self.compress_size == 0xFFFF_FFFF:
+ field = "Compress size"
+ self.compress_size, = unpack('<Q', data[:8])
+ data = data[8:]
+ if self.header_offset == 0xFFFF_FFFF:
+ field = "Header offset"
+ self.header_offset, = unpack('<Q', data[:8])
+ except struct.error:
+ raise BadZipFile(f"Corrupt zip64 extra field. "
+ f"{field} not found.") from None
extra = extra[ln+4:]
@classmethod
- def from_file(cls, filename, arcname=None, *, strict_timestamps=True):
+ def from_file(cls, filename, arcname=None, *, strict_timestamps=True):
"""Construct an appropriate ZipInfo for a file on the filesystem.
filename should be the path to a file or directory on the filesystem.
@@ -502,10 +502,10 @@ class ZipInfo (object):
isdir = stat.S_ISDIR(st.st_mode)
mtime = time.localtime(st.st_mtime)
date_time = mtime[0:6]
- if not strict_timestamps and date_time[0] < 1980:
- date_time = (1980, 1, 1, 0, 0, 0)
- elif not strict_timestamps and date_time[0] > 2107:
- date_time = (2107, 12, 31, 23, 59, 59)
+ if not strict_timestamps and date_time[0] < 1980:
+ date_time = (1980, 1, 1, 0, 0, 0)
+ elif not strict_timestamps and date_time[0] > 2107:
+ date_time = (2107, 12, 31, 23, 59, 59)
# Create ZipInfo instance to store file information
if arcname is None:
arcname = filename
@@ -695,7 +695,7 @@ def _get_compressor(compress_type, compresslevel=None):
def _get_decompressor(compress_type):
- _check_compression(compress_type)
+ _check_compression(compress_type)
if compress_type == ZIP_STORED:
return None
elif compress_type == ZIP_DEFLATED:
@@ -784,10 +784,10 @@ class ZipExtFile(io.BufferedIOBase):
# Chunk size to read during seek
MAX_SEEK_READ = 1 << 24
- def __init__(self, fileobj, mode, zipinfo, pwd=None,
+ def __init__(self, fileobj, mode, zipinfo, pwd=None,
close_fileobj=False):
self._fileobj = fileobj
- self._pwd = pwd
+ self._pwd = pwd
self._close_fileobj = close_fileobj
self._compress_type = zipinfo.compress_type
@@ -822,30 +822,30 @@ class ZipExtFile(io.BufferedIOBase):
except AttributeError:
pass
- self._decrypter = None
- if pwd:
- if zipinfo.flag_bits & 0x8:
- # compare against the file type from extended local headers
- check_byte = (zipinfo._raw_time >> 8) & 0xff
- else:
- # compare against the CRC otherwise
- check_byte = (zipinfo.CRC >> 24) & 0xff
- h = self._init_decrypter()
- if h != check_byte:
- raise RuntimeError("Bad password for file %r" % zipinfo.orig_filename)
-
-
- def _init_decrypter(self):
- self._decrypter = _ZipDecrypter(self._pwd)
- # The first 12 bytes in the cypher stream is an encryption header
- # used to strengthen the algorithm. The first 11 bytes are
- # completely random, while the 12th contains the MSB of the CRC,
- # or the MSB of the file time depending on the header type
- # and is used to check the correctness of the password.
- header = self._fileobj.read(12)
- self._compress_left -= 12
- return self._decrypter(header)[11]
-
+ self._decrypter = None
+ if pwd:
+ if zipinfo.flag_bits & 0x8:
+ # compare against the file type from extended local headers
+ check_byte = (zipinfo._raw_time >> 8) & 0xff
+ else:
+ # compare against the CRC otherwise
+ check_byte = (zipinfo.CRC >> 24) & 0xff
+ h = self._init_decrypter()
+ if h != check_byte:
+ raise RuntimeError("Bad password for file %r" % zipinfo.orig_filename)
+
+
+ def _init_decrypter(self):
+ self._decrypter = _ZipDecrypter(self._pwd)
+ # The first 12 bytes in the cypher stream is an encryption header
+ # used to strengthen the algorithm. The first 11 bytes are
+ # completely random, while the 12th contains the MSB of the CRC,
+ # or the MSB of the file time depending on the header type
+ # and is used to check the correctness of the password.
+ header = self._fileobj.read(12)
+ self._compress_left -= 12
+ return self._decrypter(header)[11]
+
def __repr__(self):
result = ['<%s.%s' % (self.__class__.__module__,
self.__class__.__qualname__)]
@@ -890,16 +890,16 @@ class ZipExtFile(io.BufferedIOBase):
return self._readbuffer[self._offset: self._offset + 512]
def readable(self):
- if self.closed:
- raise ValueError("I/O operation on closed file.")
+ if self.closed:
+ raise ValueError("I/O operation on closed file.")
return True
def read(self, n=-1):
"""Read and return up to n bytes.
- If the argument is omitted, None, or negative, data is read and returned until EOF is reached.
+ If the argument is omitted, None, or negative, data is read and returned until EOF is reached.
"""
- if self.closed:
- raise ValueError("read from closed file.")
+ if self.closed:
+ raise ValueError("read from closed file.")
if n is None or n < 0:
buf = self._readbuffer[self._offset:]
self._readbuffer = b''
@@ -1036,13 +1036,13 @@ class ZipExtFile(io.BufferedIOBase):
super().close()
def seekable(self):
- if self.closed:
- raise ValueError("I/O operation on closed file.")
+ if self.closed:
+ raise ValueError("I/O operation on closed file.")
return self._seekable
def seek(self, offset, whence=0):
- if self.closed:
- raise ValueError("seek on closed file.")
+ if self.closed:
+ raise ValueError("seek on closed file.")
if not self._seekable:
raise io.UnsupportedOperation("underlying stream is not seekable")
curr_pos = self.tell()
@@ -1080,8 +1080,8 @@ class ZipExtFile(io.BufferedIOBase):
self._decompressor = _get_decompressor(self._compress_type)
self._eof = False
read_offset = new_pos
- if self._decrypter is not None:
- self._init_decrypter()
+ if self._decrypter is not None:
+ self._init_decrypter()
while read_offset > 0:
read_len = min(self.MAX_SEEK_READ, read_offset)
@@ -1091,8 +1091,8 @@ class ZipExtFile(io.BufferedIOBase):
return self.tell()
def tell(self):
- if self.closed:
- raise ValueError("tell on closed file.")
+ if self.closed:
+ raise ValueError("tell on closed file.")
if not self._seekable:
raise io.UnsupportedOperation("underlying stream is not seekable")
filepos = self._orig_file_size - self._left - len(self._readbuffer) + self._offset
@@ -1132,51 +1132,51 @@ class _ZipWriteFile(io.BufferedIOBase):
def close(self):
if self.closed:
return
- try:
- super().close()
- # Flush any data from the compressor, and update header info
- if self._compressor:
- buf = self._compressor.flush()
- self._compress_size += len(buf)
- self._fileobj.write(buf)
- self._zinfo.compress_size = self._compress_size
- else:
- self._zinfo.compress_size = self._file_size
- self._zinfo.CRC = self._crc
- self._zinfo.file_size = self._file_size
-
- # Write updated header info
- if self._zinfo.flag_bits & 0x08:
- # Write CRC and file sizes after the file data
- fmt = '<LLQQ' if self._zip64 else '<LLLL'
- self._fileobj.write(struct.pack(fmt, _DD_SIGNATURE, self._zinfo.CRC,
- self._zinfo.compress_size, self._zinfo.file_size))
- self._zipfile.start_dir = self._fileobj.tell()
- else:
- if not self._zip64:
- if self._file_size > ZIP64_LIMIT:
- raise RuntimeError(
- 'File size unexpectedly exceeded ZIP64 limit')
- if self._compress_size > ZIP64_LIMIT:
- raise RuntimeError(
- 'Compressed size unexpectedly exceeded ZIP64 limit')
- # Seek backwards and write file header (which will now include
- # correct CRC and file sizes)
-
- # Preserve current position in file
- self._zipfile.start_dir = self._fileobj.tell()
- self._fileobj.seek(self._zinfo.header_offset)
- self._fileobj.write(self._zinfo.FileHeader(self._zip64))
- self._fileobj.seek(self._zipfile.start_dir)
-
- # Successfully written: Add file to our caches
- self._zipfile.filelist.append(self._zinfo)
- self._zipfile.NameToInfo[self._zinfo.filename] = self._zinfo
- finally:
- self._zipfile._writing = False
-
-
-
+ try:
+ super().close()
+ # Flush any data from the compressor, and update header info
+ if self._compressor:
+ buf = self._compressor.flush()
+ self._compress_size += len(buf)
+ self._fileobj.write(buf)
+ self._zinfo.compress_size = self._compress_size
+ else:
+ self._zinfo.compress_size = self._file_size
+ self._zinfo.CRC = self._crc
+ self._zinfo.file_size = self._file_size
+
+ # Write updated header info
+ if self._zinfo.flag_bits & 0x08:
+ # Write CRC and file sizes after the file data
+ fmt = '<LLQQ' if self._zip64 else '<LLLL'
+ self._fileobj.write(struct.pack(fmt, _DD_SIGNATURE, self._zinfo.CRC,
+ self._zinfo.compress_size, self._zinfo.file_size))
+ self._zipfile.start_dir = self._fileobj.tell()
+ else:
+ if not self._zip64:
+ if self._file_size > ZIP64_LIMIT:
+ raise RuntimeError(
+ 'File size unexpectedly exceeded ZIP64 limit')
+ if self._compress_size > ZIP64_LIMIT:
+ raise RuntimeError(
+ 'Compressed size unexpectedly exceeded ZIP64 limit')
+ # Seek backwards and write file header (which will now include
+ # correct CRC and file sizes)
+
+ # Preserve current position in file
+ self._zipfile.start_dir = self._fileobj.tell()
+ self._fileobj.seek(self._zinfo.header_offset)
+ self._fileobj.write(self._zinfo.FileHeader(self._zip64))
+ self._fileobj.seek(self._zipfile.start_dir)
+
+ # Successfully written: Add file to our caches
+ self._zipfile.filelist.append(self._zinfo)
+ self._zipfile.NameToInfo[self._zinfo.filename] = self._zinfo
+ finally:
+ self._zipfile._writing = False
+
+
+
class ZipFile:
""" Class with methods to open, read, write, close, list zip files.
@@ -1204,7 +1204,7 @@ class ZipFile:
_windows_illegal_name_trans_table = None
def __init__(self, file, mode="r", compression=ZIP_STORED, allowZip64=True,
- compresslevel=None, *, strict_timestamps=True):
+ compresslevel=None, *, strict_timestamps=True):
"""Open the ZIP file with mode read 'r', write 'w', exclusive create 'x',
or append 'a'."""
if mode not in ('r', 'w', 'x', 'a'):
@@ -1222,7 +1222,7 @@ class ZipFile:
self.mode = mode
self.pwd = None
self._comment = b''
- self._strict_timestamps = strict_timestamps
+ self._strict_timestamps = strict_timestamps
# Check if we were passed a file-like object
if isinstance(file, os.PathLike):
@@ -1534,7 +1534,7 @@ class ZipFile:
# strong encryption
raise NotImplementedError("strong encryption (flag bit 6)")
- if fheader[_FH_GENERAL_PURPOSE_FLAG_BITS] & 0x800:
+ if fheader[_FH_GENERAL_PURPOSE_FLAG_BITS] & 0x800:
# UTF-8 filename
fname_str = fname.decode("utf-8")
else:
@@ -1553,10 +1553,10 @@ class ZipFile:
if not pwd:
raise RuntimeError("File %r is encrypted, password "
"required for extraction" % name)
- else:
- pwd = None
+ else:
+ pwd = None
- return ZipExtFile(zef_file, mode, zinfo, pwd, True)
+ return ZipExtFile(zef_file, mode, zinfo, pwd, True)
except:
zef_file.close()
raise
@@ -1572,7 +1572,7 @@ class ZipFile:
"another write handle open on it. "
"Close the first handle before opening another.")
- # Size and CRC are overwritten with correct data after processing the file
+ # Size and CRC are overwritten with correct data after processing the file
zinfo.compress_size = 0
zinfo.CRC = 0
@@ -1724,8 +1724,8 @@ class ZipFile:
"Can't write to ZIP archive while an open writing handle exists"
)
- zinfo = ZipInfo.from_file(filename, arcname,
- strict_timestamps=self._strict_timestamps)
+ zinfo = ZipInfo.from_file(filename, arcname,
+ strict_timestamps=self._strict_timestamps)
if zinfo.is_dir():
zinfo.compress_size = 0
@@ -1868,15 +1868,15 @@ class ZipFile:
extract_version = max(min_version, zinfo.extract_version)
create_version = max(min_version, zinfo.create_version)
- filename, flag_bits = zinfo._encodeFilenameFlags()
- centdir = struct.pack(structCentralDir,
- stringCentralDir, create_version,
- zinfo.create_system, extract_version, zinfo.reserved,
- flag_bits, zinfo.compress_type, dostime, dosdate,
- zinfo.CRC, compress_size, file_size,
- len(filename), len(extra_data), len(zinfo.comment),
- 0, zinfo.internal_attr, zinfo.external_attr,
- header_offset)
+ filename, flag_bits = zinfo._encodeFilenameFlags()
+ centdir = struct.pack(structCentralDir,
+ stringCentralDir, create_version,
+ zinfo.create_system, extract_version, zinfo.reserved,
+ flag_bits, zinfo.compress_type, dostime, dosdate,
+ zinfo.CRC, compress_size, file_size,
+ len(filename), len(extra_data), len(zinfo.comment),
+ 0, zinfo.internal_attr, zinfo.external_attr,
+ header_offset)
self.fp.write(centdir)
self.fp.write(filename)
self.fp.write(extra_data)
@@ -1918,8 +1918,8 @@ class ZipFile:
centDirSize, centDirOffset, len(self._comment))
self.fp.write(endrec)
self.fp.write(self._comment)
- if self.mode == "a":
- self.fp.truncate()
+ if self.mode == "a":
+ self.fp.truncate()
self.fp.flush()
def _fpclose(self, fp):
@@ -2103,266 +2103,266 @@ class PyZipFile(ZipFile):
return (fname, archivename)
-def _parents(path):
- """
- Given a path with elements separated by
- posixpath.sep, generate all parents of that path.
-
- >>> list(_parents('b/d'))
- ['b']
- >>> list(_parents('/b/d/'))
- ['/b']
- >>> list(_parents('b/d/f/'))
- ['b/d', 'b']
- >>> list(_parents('b'))
- []
- >>> list(_parents(''))
- []
- """
- return itertools.islice(_ancestry(path), 1, None)
-
-
-def _ancestry(path):
- """
- Given a path with elements separated by
- posixpath.sep, generate all elements of that path
-
- >>> list(_ancestry('b/d'))
- ['b/d', 'b']
- >>> list(_ancestry('/b/d/'))
- ['/b/d', '/b']
- >>> list(_ancestry('b/d/f/'))
- ['b/d/f', 'b/d', 'b']
- >>> list(_ancestry('b'))
- ['b']
- >>> list(_ancestry(''))
- []
- """
- path = path.rstrip(posixpath.sep)
- while path and path != posixpath.sep:
- yield path
- path, tail = posixpath.split(path)
-
-
-_dedupe = dict.fromkeys
-"""Deduplicate an iterable in original order"""
-
-
-def _difference(minuend, subtrahend):
- """
- Return items in minuend not in subtrahend, retaining order
- with O(1) lookup.
- """
- return itertools.filterfalse(set(subtrahend).__contains__, minuend)
-
-
-class CompleteDirs(ZipFile):
- """
- A ZipFile subclass that ensures that implied directories
- are always included in the namelist.
- """
-
- @staticmethod
- def _implied_dirs(names):
- parents = itertools.chain.from_iterable(map(_parents, names))
- as_dirs = (p + posixpath.sep for p in parents)
- return _dedupe(_difference(as_dirs, names))
-
- def namelist(self):
- names = super(CompleteDirs, self).namelist()
- return names + list(self._implied_dirs(names))
-
- def _name_set(self):
- return set(self.namelist())
-
- def resolve_dir(self, name):
- """
- If the name represents a directory, return that name
- as a directory (with the trailing slash).
- """
- names = self._name_set()
- dirname = name + '/'
- dir_match = name not in names and dirname in names
- return dirname if dir_match else name
-
- @classmethod
- def make(cls, source):
- """
- Given a source (filename or zipfile), return an
- appropriate CompleteDirs subclass.
- """
- if isinstance(source, CompleteDirs):
- return source
-
- if not isinstance(source, ZipFile):
- return cls(source)
-
- # Only allow for FastPath when supplied zipfile is read-only
- if 'r' not in source.mode:
- cls = CompleteDirs
-
- res = cls.__new__(cls)
- vars(res).update(vars(source))
- return res
-
-
-class FastLookup(CompleteDirs):
- """
- ZipFile subclass to ensure implicit
- dirs exist and are resolved rapidly.
- """
- def namelist(self):
- with contextlib.suppress(AttributeError):
- return self.__names
- self.__names = super(FastLookup, self).namelist()
- return self.__names
-
- def _name_set(self):
- with contextlib.suppress(AttributeError):
- return self.__lookup
- self.__lookup = super(FastLookup, self)._name_set()
- return self.__lookup
-
-
-class Path:
- """
- A pathlib-compatible interface for zip files.
-
- Consider a zip file with this structure::
-
- .
- ├── a.txt
- └── b
- ├── c.txt
- └── d
- └── e.txt
-
- >>> data = io.BytesIO()
- >>> zf = ZipFile(data, 'w')
- >>> zf.writestr('a.txt', 'content of a')
- >>> zf.writestr('b/c.txt', 'content of c')
- >>> zf.writestr('b/d/e.txt', 'content of e')
- >>> zf.filename = 'abcde.zip'
-
- Path accepts the zipfile object itself or a filename
-
- >>> root = Path(zf)
-
- From there, several path operations are available.
-
- Directory iteration (including the zip file itself):
-
- >>> a, b = root.iterdir()
- >>> a
- Path('abcde.zip', 'a.txt')
- >>> b
- Path('abcde.zip', 'b/')
-
- name property:
-
- >>> b.name
- 'b'
-
- join with divide operator:
-
- >>> c = b / 'c.txt'
- >>> c
- Path('abcde.zip', 'b/c.txt')
- >>> c.name
- 'c.txt'
-
- Read text:
-
- >>> c.read_text()
- 'content of c'
-
- existence:
-
- >>> c.exists()
- True
- >>> (b / 'missing.txt').exists()
- False
-
- Coercion to string:
-
- >>> str(c)
- 'abcde.zip/b/c.txt'
- """
-
- __repr = "{self.__class__.__name__}({self.root.filename!r}, {self.at!r})"
-
- def __init__(self, root, at=""):
- self.root = FastLookup.make(root)
- self.at = at
-
- def open(self, mode='r', *args, **kwargs):
- """
- Open this entry as text or binary following the semantics
- of ``pathlib.Path.open()`` by passing arguments through
- to io.TextIOWrapper().
- """
- pwd = kwargs.pop('pwd', None)
- zip_mode = mode[0]
- stream = self.root.open(self.at, zip_mode, pwd=pwd)
- if 'b' in mode:
- if args or kwargs:
- raise ValueError("encoding args invalid for binary operation")
- return stream
- return io.TextIOWrapper(stream, *args, **kwargs)
-
- @property
- def name(self):
- return posixpath.basename(self.at.rstrip("/"))
-
- def read_text(self, *args, **kwargs):
- with self.open('r', *args, **kwargs) as strm:
- return strm.read()
-
- def read_bytes(self):
- with self.open('rb') as strm:
- return strm.read()
-
- def _is_child(self, path):
- return posixpath.dirname(path.at.rstrip("/")) == self.at.rstrip("/")
-
- def _next(self, at):
- return Path(self.root, at)
-
- def is_dir(self):
- return not self.at or self.at.endswith("/")
-
- def is_file(self):
- return not self.is_dir()
-
- def exists(self):
- return self.at in self.root._name_set()
-
- def iterdir(self):
- if not self.is_dir():
- raise ValueError("Can't listdir a file")
- subs = map(self._next, self.root.namelist())
- return filter(self._is_child, subs)
-
- def __str__(self):
- return posixpath.join(self.root.filename, self.at)
-
- def __repr__(self):
- return self.__repr.format(self=self)
-
- def joinpath(self, add):
- next = posixpath.join(self.at, add)
- return self._next(self.root.resolve_dir(next))
-
- __truediv__ = joinpath
-
- @property
- def parent(self):
- parent_at = posixpath.dirname(self.at.rstrip('/'))
- if parent_at:
- parent_at += '/'
- return self._next(parent_at)
-
-
+def _parents(path):
+ """
+ Given a path with elements separated by
+ posixpath.sep, generate all parents of that path.
+
+ >>> list(_parents('b/d'))
+ ['b']
+ >>> list(_parents('/b/d/'))
+ ['/b']
+ >>> list(_parents('b/d/f/'))
+ ['b/d', 'b']
+ >>> list(_parents('b'))
+ []
+ >>> list(_parents(''))
+ []
+ """
+ return itertools.islice(_ancestry(path), 1, None)
+
+
+def _ancestry(path):
+ """
+ Given a path with elements separated by
+ posixpath.sep, generate all elements of that path
+
+ >>> list(_ancestry('b/d'))
+ ['b/d', 'b']
+ >>> list(_ancestry('/b/d/'))
+ ['/b/d', '/b']
+ >>> list(_ancestry('b/d/f/'))
+ ['b/d/f', 'b/d', 'b']
+ >>> list(_ancestry('b'))
+ ['b']
+ >>> list(_ancestry(''))
+ []
+ """
+ path = path.rstrip(posixpath.sep)
+ while path and path != posixpath.sep:
+ yield path
+ path, tail = posixpath.split(path)
+
+
+_dedupe = dict.fromkeys
+"""Deduplicate an iterable in original order"""
+
+
+def _difference(minuend, subtrahend):
+ """
+ Return items in minuend not in subtrahend, retaining order
+ with O(1) lookup.
+ """
+ return itertools.filterfalse(set(subtrahend).__contains__, minuend)
+
+
+class CompleteDirs(ZipFile):
+ """
+ A ZipFile subclass that ensures that implied directories
+ are always included in the namelist.
+ """
+
+ @staticmethod
+ def _implied_dirs(names):
+ parents = itertools.chain.from_iterable(map(_parents, names))
+ as_dirs = (p + posixpath.sep for p in parents)
+ return _dedupe(_difference(as_dirs, names))
+
+ def namelist(self):
+ names = super(CompleteDirs, self).namelist()
+ return names + list(self._implied_dirs(names))
+
+ def _name_set(self):
+ return set(self.namelist())
+
+ def resolve_dir(self, name):
+ """
+ If the name represents a directory, return that name
+ as a directory (with the trailing slash).
+ """
+ names = self._name_set()
+ dirname = name + '/'
+ dir_match = name not in names and dirname in names
+ return dirname if dir_match else name
+
+ @classmethod
+ def make(cls, source):
+ """
+ Given a source (filename or zipfile), return an
+ appropriate CompleteDirs subclass.
+ """
+ if isinstance(source, CompleteDirs):
+ return source
+
+ if not isinstance(source, ZipFile):
+ return cls(source)
+
+ # Only allow for FastPath when supplied zipfile is read-only
+ if 'r' not in source.mode:
+ cls = CompleteDirs
+
+ res = cls.__new__(cls)
+ vars(res).update(vars(source))
+ return res
+
+
+class FastLookup(CompleteDirs):
+ """
+ ZipFile subclass to ensure implicit
+ dirs exist and are resolved rapidly.
+ """
+ def namelist(self):
+ with contextlib.suppress(AttributeError):
+ return self.__names
+ self.__names = super(FastLookup, self).namelist()
+ return self.__names
+
+ def _name_set(self):
+ with contextlib.suppress(AttributeError):
+ return self.__lookup
+ self.__lookup = super(FastLookup, self)._name_set()
+ return self.__lookup
+
+
+class Path:
+ """
+ A pathlib-compatible interface for zip files.
+
+ Consider a zip file with this structure::
+
+ .
+ ├── a.txt
+ └── b
+ ├── c.txt
+ └── d
+ └── e.txt
+
+ >>> data = io.BytesIO()
+ >>> zf = ZipFile(data, 'w')
+ >>> zf.writestr('a.txt', 'content of a')
+ >>> zf.writestr('b/c.txt', 'content of c')
+ >>> zf.writestr('b/d/e.txt', 'content of e')
+ >>> zf.filename = 'abcde.zip'
+
+ Path accepts the zipfile object itself or a filename
+
+ >>> root = Path(zf)
+
+ From there, several path operations are available.
+
+ Directory iteration (including the zip file itself):
+
+ >>> a, b = root.iterdir()
+ >>> a
+ Path('abcde.zip', 'a.txt')
+ >>> b
+ Path('abcde.zip', 'b/')
+
+ name property:
+
+ >>> b.name
+ 'b'
+
+ join with divide operator:
+
+ >>> c = b / 'c.txt'
+ >>> c
+ Path('abcde.zip', 'b/c.txt')
+ >>> c.name
+ 'c.txt'
+
+ Read text:
+
+ >>> c.read_text()
+ 'content of c'
+
+ existence:
+
+ >>> c.exists()
+ True
+ >>> (b / 'missing.txt').exists()
+ False
+
+ Coercion to string:
+
+ >>> str(c)
+ 'abcde.zip/b/c.txt'
+ """
+
+ __repr = "{self.__class__.__name__}({self.root.filename!r}, {self.at!r})"
+
+ def __init__(self, root, at=""):
+ self.root = FastLookup.make(root)
+ self.at = at
+
+ def open(self, mode='r', *args, **kwargs):
+ """
+ Open this entry as text or binary following the semantics
+ of ``pathlib.Path.open()`` by passing arguments through
+ to io.TextIOWrapper().
+ """
+ pwd = kwargs.pop('pwd', None)
+ zip_mode = mode[0]
+ stream = self.root.open(self.at, zip_mode, pwd=pwd)
+ if 'b' in mode:
+ if args or kwargs:
+ raise ValueError("encoding args invalid for binary operation")
+ return stream
+ return io.TextIOWrapper(stream, *args, **kwargs)
+
+ @property
+ def name(self):
+ return posixpath.basename(self.at.rstrip("/"))
+
+ def read_text(self, *args, **kwargs):
+ with self.open('r', *args, **kwargs) as strm:
+ return strm.read()
+
+ def read_bytes(self):
+ with self.open('rb') as strm:
+ return strm.read()
+
+ def _is_child(self, path):
+ return posixpath.dirname(path.at.rstrip("/")) == self.at.rstrip("/")
+
+ def _next(self, at):
+ return Path(self.root, at)
+
+ def is_dir(self):
+ return not self.at or self.at.endswith("/")
+
+ def is_file(self):
+ return not self.is_dir()
+
+ def exists(self):
+ return self.at in self.root._name_set()
+
+ def iterdir(self):
+ if not self.is_dir():
+ raise ValueError("Can't listdir a file")
+ subs = map(self._next, self.root.namelist())
+ return filter(self._is_child, subs)
+
+ def __str__(self):
+ return posixpath.join(self.root.filename, self.at)
+
+ def __repr__(self):
+ return self.__repr.format(self=self)
+
+ def joinpath(self, add):
+ next = posixpath.join(self.at, add)
+ return self._next(self.root.resolve_dir(next))
+
+ __truediv__ = joinpath
+
+ @property
+ def parent(self):
+ parent_at = posixpath.dirname(self.at.rstrip('/'))
+ if parent_at:
+ parent_at += '/'
+ return self._next(parent_at)
+
+
def main(args=None):
import argparse
@@ -2423,6 +2423,6 @@ def main(args=None):
zippath = ''
addToZip(zf, path, zippath)
-
+
if __name__ == "__main__":
main()
diff --git a/contrib/tools/python3/src/Lib/zipimport.py b/contrib/tools/python3/src/Lib/zipimport.py
index 5ef0a17c2a..b93084c580 100644
--- a/contrib/tools/python3/src/Lib/zipimport.py
+++ b/contrib/tools/python3/src/Lib/zipimport.py
@@ -1,792 +1,792 @@
-"""zipimport provides support for importing Python modules from Zip archives.
-
-This module exports three objects:
-- zipimporter: a class; its constructor takes a path to a Zip archive.
-- ZipImportError: exception raised by zipimporter objects. It's a
- subclass of ImportError, so it can be caught as ImportError, too.
-- _zip_directory_cache: a dict, mapping archive paths to zip directory
- info dicts, as used in zipimporter._files.
-
-It is usually not needed to use the zipimport module explicitly; it is
-used by the builtin import mechanism for sys.path items that are paths
-to Zip archives.
-"""
-
-#from importlib import _bootstrap_external
-#from importlib import _bootstrap # for _verbose_message
-import _frozen_importlib_external as _bootstrap_external
-from _frozen_importlib_external import _unpack_uint16, _unpack_uint32
-import _frozen_importlib as _bootstrap # for _verbose_message
-import _imp # for check_hash_based_pycs
-import _io # for open
-import marshal # for loads
-import sys # for modules
-import time # for mktime
-
-__all__ = ['ZipImportError', 'zipimporter']
-
-
-path_sep = _bootstrap_external.path_sep
-alt_path_sep = _bootstrap_external.path_separators[1:]
-
-
-class ZipImportError(ImportError):
- pass
-
-# _read_directory() cache
-_zip_directory_cache = {}
-
-_module_type = type(sys)
-
-END_CENTRAL_DIR_SIZE = 22
-STRING_END_ARCHIVE = b'PK\x05\x06'
-MAX_COMMENT_LEN = (1 << 16) - 1
-
-class zipimporter:
- """zipimporter(archivepath) -> zipimporter object
-
- Create a new zipimporter instance. 'archivepath' must be a path to
- a zipfile, or to a specific path inside a zipfile. For example, it can be
- '/tmp/myimport.zip', or '/tmp/myimport.zip/mydirectory', if mydirectory is a
- valid directory inside the archive.
-
- 'ZipImportError is raised if 'archivepath' doesn't point to a valid Zip
- archive.
-
- The 'archive' attribute of zipimporter objects contains the name of the
- zipfile targeted.
- """
-
- # Split the "subdirectory" from the Zip archive path, lookup a matching
- # entry in sys.path_importer_cache, fetch the file directory from there
- # if found, or else read it from the archive.
- def __init__(self, path):
- if not isinstance(path, str):
- import os
- path = os.fsdecode(path)
- if not path:
- raise ZipImportError('archive path is empty', path=path)
- if alt_path_sep:
- path = path.replace(alt_path_sep, path_sep)
-
- prefix = []
- while True:
- try:
- st = _bootstrap_external._path_stat(path)
- except (OSError, ValueError):
- # On Windows a ValueError is raised for too long paths.
- # Back up one path element.
- dirname, basename = _bootstrap_external._path_split(path)
- if dirname == path:
- raise ZipImportError('not a Zip file', path=path)
- path = dirname
- prefix.append(basename)
- else:
- # it exists
- if (st.st_mode & 0o170000) != 0o100000: # stat.S_ISREG
- # it's a not file
- raise ZipImportError('not a Zip file', path=path)
- break
-
- try:
- files = _zip_directory_cache[path]
- except KeyError:
- files = _read_directory(path)
- _zip_directory_cache[path] = files
- self._files = files
- self.archive = path
- # a prefix directory following the ZIP file path.
- self.prefix = _bootstrap_external._path_join(*prefix[::-1])
- if self.prefix:
- self.prefix += path_sep
-
-
- # Check whether we can satisfy the import of the module named by
- # 'fullname', or whether it could be a portion of a namespace
- # package. Return self if we can load it, a string containing the
- # full path if it's a possible namespace portion, None if we
- # can't load it.
- def find_loader(self, fullname, path=None):
- """find_loader(fullname, path=None) -> self, str or None.
-
- Search for a module specified by 'fullname'. 'fullname' must be the
- fully qualified (dotted) module name. It returns the zipimporter
- instance itself if the module was found, a string containing the
- full path name if it's possibly a portion of a namespace package,
- or None otherwise. The optional 'path' argument is ignored -- it's
- there for compatibility with the importer protocol.
- """
- mi = _get_module_info(self, fullname)
- if mi is not None:
- # This is a module or package.
- return self, []
-
- # Not a module or regular package. See if this is a directory, and
- # therefore possibly a portion of a namespace package.
-
- # We're only interested in the last path component of fullname
- # earlier components are recorded in self.prefix.
- modpath = _get_module_path(self, fullname)
- if _is_dir(self, modpath):
- # This is possibly a portion of a namespace
- # package. Return the string representing its path,
- # without a trailing separator.
- return None, [f'{self.archive}{path_sep}{modpath}']
-
- return None, []
-
-
- # Check whether we can satisfy the import of the module named by
- # 'fullname'. Return self if we can, None if we can't.
- def find_module(self, fullname, path=None):
- """find_module(fullname, path=None) -> self or None.
-
- Search for a module specified by 'fullname'. 'fullname' must be the
- fully qualified (dotted) module name. It returns the zipimporter
- instance itself if the module was found, or None if it wasn't.
- The optional 'path' argument is ignored -- it's there for compatibility
- with the importer protocol.
- """
- return self.find_loader(fullname, path)[0]
-
-
- def get_code(self, fullname):
- """get_code(fullname) -> code object.
-
- Return the code object for the specified module. Raise ZipImportError
- if the module couldn't be found.
- """
- code, ispackage, modpath = _get_module_code(self, fullname)
- return code
-
-
- def get_data(self, pathname):
- """get_data(pathname) -> string with file data.
-
- Return the data associated with 'pathname'. Raise OSError if
- the file wasn't found.
- """
- if alt_path_sep:
- pathname = pathname.replace(alt_path_sep, path_sep)
-
- key = pathname
- if pathname.startswith(self.archive + path_sep):
- key = pathname[len(self.archive + path_sep):]
-
- try:
- toc_entry = self._files[key]
- except KeyError:
- raise OSError(0, '', key)
- return _get_data(self.archive, toc_entry)
-
-
- # Return a string matching __file__ for the named module
- def get_filename(self, fullname):
- """get_filename(fullname) -> filename string.
-
- Return the filename for the specified module.
- """
- # Deciding the filename requires working out where the code
- # would come from if the module was actually loaded
- code, ispackage, modpath = _get_module_code(self, fullname)
- return modpath
-
-
- def get_source(self, fullname):
- """get_source(fullname) -> source string.
-
- Return the source code for the specified module. Raise ZipImportError
- if the module couldn't be found, return None if the archive does
- contain the module, but has no source for it.
- """
- mi = _get_module_info(self, fullname)
- if mi is None:
- raise ZipImportError(f"can't find module {fullname!r}", name=fullname)
-
- path = _get_module_path(self, fullname)
- if mi:
- fullpath = _bootstrap_external._path_join(path, '__init__.py')
- else:
- fullpath = f'{path}.py'
-
- try:
- toc_entry = self._files[fullpath]
- except KeyError:
- # we have the module, but no source
- return None
- return _get_data(self.archive, toc_entry).decode()
-
-
- # Return a bool signifying whether the module is a package or not.
- def is_package(self, fullname):
- """is_package(fullname) -> bool.
-
- Return True if the module specified by fullname is a package.
- Raise ZipImportError if the module couldn't be found.
- """
- mi = _get_module_info(self, fullname)
- if mi is None:
- raise ZipImportError(f"can't find module {fullname!r}", name=fullname)
- return mi
-
-
- # Load and return the module named by 'fullname'.
- def load_module(self, fullname):
- """load_module(fullname) -> module.
-
- Load the module specified by 'fullname'. 'fullname' must be the
- fully qualified (dotted) module name. It returns the imported
- module, or raises ZipImportError if it wasn't found.
- """
- code, ispackage, modpath = _get_module_code(self, fullname)
- mod = sys.modules.get(fullname)
- if mod is None or not isinstance(mod, _module_type):
- mod = _module_type(fullname)
- sys.modules[fullname] = mod
- mod.__loader__ = self
-
- try:
- if ispackage:
- # add __path__ to the module *before* the code gets
- # executed
- path = _get_module_path(self, fullname)
- fullpath = _bootstrap_external._path_join(self.archive, path)
- mod.__path__ = [fullpath]
-
- if not hasattr(mod, '__builtins__'):
- mod.__builtins__ = __builtins__
- _bootstrap_external._fix_up_module(mod.__dict__, fullname, modpath)
- exec(code, mod.__dict__)
- except:
- del sys.modules[fullname]
- raise
-
- try:
- mod = sys.modules[fullname]
- except KeyError:
- raise ImportError(f'Loaded module {fullname!r} not found in sys.modules')
- _bootstrap._verbose_message('import {} # loaded from Zip {}', fullname, modpath)
- return mod
-
-
- def get_resource_reader(self, fullname):
- """Return the ResourceReader for a package in a zip file.
-
- If 'fullname' is a package within the zip file, return the
- 'ResourceReader' object for the package. Otherwise return None.
- """
- try:
- if not self.is_package(fullname):
- return None
- except ZipImportError:
- return None
- if not _ZipImportResourceReader._registered:
- from importlib.abc import ResourceReader
- ResourceReader.register(_ZipImportResourceReader)
- _ZipImportResourceReader._registered = True
- return _ZipImportResourceReader(self, fullname)
-
-
- def __repr__(self):
- return f'<zipimporter object "{self.archive}{path_sep}{self.prefix}">'
-
-
-# _zip_searchorder defines how we search for a module in the Zip
-# archive: we first search for a package __init__, then for
-# non-package .pyc, and .py entries. The .pyc entries
-# are swapped by initzipimport() if we run in optimized mode. Also,
-# '/' is replaced by path_sep there.
-_zip_searchorder = (
- (path_sep + '__init__.pyc', True, True),
- (path_sep + '__init__.py', False, True),
- ('.pyc', True, False),
- ('.py', False, False),
-)
-
-# Given a module name, return the potential file path in the
-# archive (without extension).
-def _get_module_path(self, fullname):
- return self.prefix + fullname.rpartition('.')[2]
-
-# Does this path represent a directory?
-def _is_dir(self, path):
- # See if this is a "directory". If so, it's eligible to be part
- # of a namespace package. We test by seeing if the name, with an
- # appended path separator, exists.
- dirpath = path + path_sep
- # If dirpath is present in self._files, we have a directory.
- return dirpath in self._files
-
-# Return some information about a module.
-def _get_module_info(self, fullname):
- path = _get_module_path(self, fullname)
- for suffix, isbytecode, ispackage in _zip_searchorder:
- fullpath = path + suffix
- if fullpath in self._files:
- return ispackage
- return None
-
-
-# implementation
-
-# _read_directory(archive) -> files dict (new reference)
-#
-# Given a path to a Zip archive, build a dict, mapping file names
-# (local to the archive, using SEP as a separator) to toc entries.
-#
-# A toc_entry is a tuple:
-#
-# (__file__, # value to use for __file__, available for all files,
-# # encoded to the filesystem encoding
-# compress, # compression kind; 0 for uncompressed
-# data_size, # size of compressed data on disk
-# file_size, # size of decompressed data
-# file_offset, # offset of file header from start of archive
-# time, # mod time of file (in dos format)
-# date, # mod data of file (in dos format)
-# crc, # crc checksum of the data
-# )
-#
-# Directories can be recognized by the trailing path_sep in the name,
-# data_size and file_offset are 0.
-def _read_directory(archive):
- try:
- fp = _io.open_code(archive)
- except OSError:
- raise ZipImportError(f"can't open Zip file: {archive!r}", path=archive)
-
- with fp:
- try:
- fp.seek(-END_CENTRAL_DIR_SIZE, 2)
- header_position = fp.tell()
- buffer = fp.read(END_CENTRAL_DIR_SIZE)
- except OSError:
- raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive)
- if len(buffer) != END_CENTRAL_DIR_SIZE:
- raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive)
- if buffer[:4] != STRING_END_ARCHIVE:
- # Bad: End of Central Dir signature
- # Check if there's a comment.
- try:
- fp.seek(0, 2)
- file_size = fp.tell()
- except OSError:
- raise ZipImportError(f"can't read Zip file: {archive!r}",
- path=archive)
- max_comment_start = max(file_size - MAX_COMMENT_LEN -
- END_CENTRAL_DIR_SIZE, 0)
- try:
- fp.seek(max_comment_start)
- data = fp.read()
- except OSError:
- raise ZipImportError(f"can't read Zip file: {archive!r}",
- path=archive)
- pos = data.rfind(STRING_END_ARCHIVE)
- if pos < 0:
- raise ZipImportError(f'not a Zip file: {archive!r}',
- path=archive)
- buffer = data[pos:pos+END_CENTRAL_DIR_SIZE]
- if len(buffer) != END_CENTRAL_DIR_SIZE:
- raise ZipImportError(f"corrupt Zip file: {archive!r}",
- path=archive)
- header_position = file_size - len(data) + pos
-
- header_size = _unpack_uint32(buffer[12:16])
- header_offset = _unpack_uint32(buffer[16:20])
- if header_position < header_size:
- raise ZipImportError(f'bad central directory size: {archive!r}', path=archive)
- if header_position < header_offset:
- raise ZipImportError(f'bad central directory offset: {archive!r}', path=archive)
- header_position -= header_size
- arc_offset = header_position - header_offset
- if arc_offset < 0:
- raise ZipImportError(f'bad central directory size or offset: {archive!r}', path=archive)
-
- files = {}
- # Start of Central Directory
- count = 0
- try:
- fp.seek(header_position)
- except OSError:
- raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive)
- while True:
- buffer = fp.read(46)
- if len(buffer) < 4:
- raise EOFError('EOF read where not expected')
- # Start of file header
- if buffer[:4] != b'PK\x01\x02':
- break # Bad: Central Dir File Header
- if len(buffer) != 46:
- raise EOFError('EOF read where not expected')
- flags = _unpack_uint16(buffer[8:10])
- compress = _unpack_uint16(buffer[10:12])
- time = _unpack_uint16(buffer[12:14])
- date = _unpack_uint16(buffer[14:16])
- crc = _unpack_uint32(buffer[16:20])
- data_size = _unpack_uint32(buffer[20:24])
- file_size = _unpack_uint32(buffer[24:28])
- name_size = _unpack_uint16(buffer[28:30])
- extra_size = _unpack_uint16(buffer[30:32])
- comment_size = _unpack_uint16(buffer[32:34])
- file_offset = _unpack_uint32(buffer[42:46])
- header_size = name_size + extra_size + comment_size
- if file_offset > header_offset:
- raise ZipImportError(f'bad local header offset: {archive!r}', path=archive)
- file_offset += arc_offset
-
- try:
- name = fp.read(name_size)
- except OSError:
- raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive)
- if len(name) != name_size:
- raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive)
- # On Windows, calling fseek to skip over the fields we don't use is
- # slower than reading the data because fseek flushes stdio's
- # internal buffers. See issue #8745.
- try:
- if len(fp.read(header_size - name_size)) != header_size - name_size:
- raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive)
- except OSError:
- raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive)
-
- if flags & 0x800:
- # UTF-8 file names extension
- name = name.decode()
- else:
- # Historical ZIP filename encoding
- try:
- name = name.decode('ascii')
- except UnicodeDecodeError:
- name = name.decode('latin1').translate(cp437_table)
-
- name = name.replace('/', path_sep)
- path = _bootstrap_external._path_join(archive, name)
- t = (path, compress, data_size, file_size, file_offset, time, date, crc)
- files[name] = t
- count += 1
- _bootstrap._verbose_message('zipimport: found {} names in {!r}', count, archive)
- return files
-
-# During bootstrap, we may need to load the encodings
-# package from a ZIP file. But the cp437 encoding is implemented
-# in Python in the encodings package.
-#
-# Break out of this dependency by using the translation table for
-# the cp437 encoding.
-cp437_table = (
- # ASCII part, 8 rows x 16 chars
- '\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f'
- '\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f'
- ' !"#$%&\'()*+,-./'
- '0123456789:;<=>?'
- '@ABCDEFGHIJKLMNO'
- 'PQRSTUVWXYZ[\\]^_'
- '`abcdefghijklmno'
- 'pqrstuvwxyz{|}~\x7f'
- # non-ASCII part, 16 rows x 8 chars
- '\xc7\xfc\xe9\xe2\xe4\xe0\xe5\xe7'
- '\xea\xeb\xe8\xef\xee\xec\xc4\xc5'
- '\xc9\xe6\xc6\xf4\xf6\xf2\xfb\xf9'
- '\xff\xd6\xdc\xa2\xa3\xa5\u20a7\u0192'
- '\xe1\xed\xf3\xfa\xf1\xd1\xaa\xba'
- '\xbf\u2310\xac\xbd\xbc\xa1\xab\xbb'
- '\u2591\u2592\u2593\u2502\u2524\u2561\u2562\u2556'
- '\u2555\u2563\u2551\u2557\u255d\u255c\u255b\u2510'
- '\u2514\u2534\u252c\u251c\u2500\u253c\u255e\u255f'
- '\u255a\u2554\u2569\u2566\u2560\u2550\u256c\u2567'
- '\u2568\u2564\u2565\u2559\u2558\u2552\u2553\u256b'
- '\u256a\u2518\u250c\u2588\u2584\u258c\u2590\u2580'
- '\u03b1\xdf\u0393\u03c0\u03a3\u03c3\xb5\u03c4'
- '\u03a6\u0398\u03a9\u03b4\u221e\u03c6\u03b5\u2229'
- '\u2261\xb1\u2265\u2264\u2320\u2321\xf7\u2248'
- '\xb0\u2219\xb7\u221a\u207f\xb2\u25a0\xa0'
-)
-
-_importing_zlib = False
-
-# Return the zlib.decompress function object, or NULL if zlib couldn't
-# be imported. The function is cached when found, so subsequent calls
-# don't import zlib again.
-def _get_decompress_func():
- global _importing_zlib
- if _importing_zlib:
- # Someone has a zlib.py[co] in their Zip file
- # let's avoid a stack overflow.
- _bootstrap._verbose_message('zipimport: zlib UNAVAILABLE')
- raise ZipImportError("can't decompress data; zlib not available")
-
- _importing_zlib = True
- try:
- from zlib import decompress
- except Exception:
- _bootstrap._verbose_message('zipimport: zlib UNAVAILABLE')
- raise ZipImportError("can't decompress data; zlib not available")
- finally:
- _importing_zlib = False
-
- _bootstrap._verbose_message('zipimport: zlib available')
- return decompress
-
-# Given a path to a Zip file and a toc_entry, return the (uncompressed) data.
-def _get_data(archive, toc_entry):
- datapath, compress, data_size, file_size, file_offset, time, date, crc = toc_entry
- if data_size < 0:
- raise ZipImportError('negative data size')
-
- with _io.open_code(archive) as fp:
- # Check to make sure the local file header is correct
- try:
- fp.seek(file_offset)
- except OSError:
- raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive)
- buffer = fp.read(30)
- if len(buffer) != 30:
- raise EOFError('EOF read where not expected')
-
- if buffer[:4] != b'PK\x03\x04':
- # Bad: Local File Header
- raise ZipImportError(f'bad local file header: {archive!r}', path=archive)
-
- name_size = _unpack_uint16(buffer[26:28])
- extra_size = _unpack_uint16(buffer[28:30])
- header_size = 30 + name_size + extra_size
- file_offset += header_size # Start of file data
- try:
- fp.seek(file_offset)
- except OSError:
- raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive)
- raw_data = fp.read(data_size)
- if len(raw_data) != data_size:
- raise OSError("zipimport: can't read data")
-
- if compress == 0:
- # data is not compressed
- return raw_data
-
- # Decompress with zlib
- try:
- decompress = _get_decompress_func()
- except Exception:
- raise ZipImportError("can't decompress data; zlib not available")
- return decompress(raw_data, -15)
-
-
-# Lenient date/time comparison function. The precision of the mtime
-# in the archive is lower than the mtime stored in a .pyc: we
-# must allow a difference of at most one second.
-def _eq_mtime(t1, t2):
- # dostime only stores even seconds, so be lenient
- return abs(t1 - t2) <= 1
-
-
-# Given the contents of a .py[co] file, unmarshal the data
-# and return the code object. Return None if it the magic word doesn't
-# match, or if the recorded .py[co] metadata does not match the source,
-# (we do this instead of raising an exception as we fall back
-# to .py if available and we don't want to mask other errors).
-def _unmarshal_code(self, pathname, fullpath, fullname, data):
- exc_details = {
- 'name': fullname,
- 'path': fullpath,
- }
-
- try:
- flags = _bootstrap_external._classify_pyc(data, fullname, exc_details)
- except ImportError:
- return None
-
- hash_based = flags & 0b1 != 0
- if hash_based:
- check_source = flags & 0b10 != 0
- if (_imp.check_hash_based_pycs != 'never' and
- (check_source or _imp.check_hash_based_pycs == 'always')):
- source_bytes = _get_pyc_source(self, fullpath)
- if source_bytes is not None:
- source_hash = _imp.source_hash(
- _bootstrap_external._RAW_MAGIC_NUMBER,
- source_bytes,
- )
-
- try:
- _bootstrap_external._validate_hash_pyc(
- data, source_hash, fullname, exc_details)
- except ImportError:
- return None
- else:
- source_mtime, source_size = \
- _get_mtime_and_size_of_source(self, fullpath)
-
- if source_mtime:
- # We don't use _bootstrap_external._validate_timestamp_pyc
- # to allow for a more lenient timestamp check.
- if (not _eq_mtime(_unpack_uint32(data[8:12]), source_mtime) or
- _unpack_uint32(data[12:16]) != source_size):
- _bootstrap._verbose_message(
- f'bytecode is stale for {fullname!r}')
- return None
-
- code = marshal.loads(data[16:])
- if not isinstance(code, _code_type):
- raise TypeError(f'compiled module {pathname!r} is not a code object')
- return code
-
-_code_type = type(_unmarshal_code.__code__)
-
-
-# Replace any occurrences of '\r\n?' in the input string with '\n'.
-# This converts DOS and Mac line endings to Unix line endings.
-def _normalize_line_endings(source):
- source = source.replace(b'\r\n', b'\n')
- source = source.replace(b'\r', b'\n')
- return source
-
-# Given a string buffer containing Python source code, compile it
-# and return a code object.
-def _compile_source(pathname, source):
- source = _normalize_line_endings(source)
- return compile(source, pathname, 'exec', dont_inherit=True)
-
-# Convert the date/time values found in the Zip archive to a value
-# that's compatible with the time stamp stored in .pyc files.
-def _parse_dostime(d, t):
- return time.mktime((
- (d >> 9) + 1980, # bits 9..15: year
- (d >> 5) & 0xF, # bits 5..8: month
- d & 0x1F, # bits 0..4: day
- t >> 11, # bits 11..15: hours
- (t >> 5) & 0x3F, # bits 8..10: minutes
- (t & 0x1F) * 2, # bits 0..7: seconds / 2
- -1, -1, -1))
-
-# Given a path to a .pyc file in the archive, return the
-# modification time of the matching .py file and its size,
-# or (0, 0) if no source is available.
-def _get_mtime_and_size_of_source(self, path):
- try:
- # strip 'c' or 'o' from *.py[co]
- assert path[-1:] in ('c', 'o')
- path = path[:-1]
- toc_entry = self._files[path]
- # fetch the time stamp of the .py file for comparison
- # with an embedded pyc time stamp
- time = toc_entry[5]
- date = toc_entry[6]
- uncompressed_size = toc_entry[3]
- return _parse_dostime(date, time), uncompressed_size
- except (KeyError, IndexError, TypeError):
- return 0, 0
-
-
-# Given a path to a .pyc file in the archive, return the
-# contents of the matching .py file, or None if no source
-# is available.
-def _get_pyc_source(self, path):
- # strip 'c' or 'o' from *.py[co]
- assert path[-1:] in ('c', 'o')
- path = path[:-1]
-
- try:
- toc_entry = self._files[path]
- except KeyError:
- return None
- else:
- return _get_data(self.archive, toc_entry)
-
-
-# Get the code object associated with the module specified by
-# 'fullname'.
-def _get_module_code(self, fullname):
- path = _get_module_path(self, fullname)
- for suffix, isbytecode, ispackage in _zip_searchorder:
- fullpath = path + suffix
- _bootstrap._verbose_message('trying {}{}{}', self.archive, path_sep, fullpath, verbosity=2)
- try:
- toc_entry = self._files[fullpath]
- except KeyError:
- pass
- else:
- modpath = toc_entry[0]
- data = _get_data(self.archive, toc_entry)
- if isbytecode:
- code = _unmarshal_code(self, modpath, fullpath, fullname, data)
- else:
- code = _compile_source(modpath, data)
- if code is None:
- # bad magic number or non-matching mtime
- # in byte code, try next
- continue
- modpath = toc_entry[0]
- return code, ispackage, modpath
- else:
- raise ZipImportError(f"can't find module {fullname!r}", name=fullname)
-
-
-class _ZipImportResourceReader:
- """Private class used to support ZipImport.get_resource_reader().
-
- This class is allowed to reference all the innards and private parts of
- the zipimporter.
- """
- _registered = False
-
- def __init__(self, zipimporter, fullname):
- self.zipimporter = zipimporter
- self.fullname = fullname
-
- def open_resource(self, resource):
- fullname_as_path = self.fullname.replace('.', '/')
- path = f'{fullname_as_path}/{resource}'
- from io import BytesIO
- try:
- return BytesIO(self.zipimporter.get_data(path))
- except OSError:
- raise FileNotFoundError(path)
-
- def resource_path(self, resource):
- # All resources are in the zip file, so there is no path to the file.
- # Raising FileNotFoundError tells the higher level API to extract the
- # binary data and create a temporary file.
- raise FileNotFoundError
-
- def is_resource(self, name):
- # Maybe we could do better, but if we can get the data, it's a
- # resource. Otherwise it isn't.
- fullname_as_path = self.fullname.replace('.', '/')
- path = f'{fullname_as_path}/{name}'
- try:
- self.zipimporter.get_data(path)
- except OSError:
- return False
- return True
-
- def contents(self):
- # This is a bit convoluted, because fullname will be a module path,
- # but _files is a list of file names relative to the top of the
- # archive's namespace. We want to compare file paths to find all the
- # names of things inside the module represented by fullname. So we
- # turn the module path of fullname into a file path relative to the
- # top of the archive, and then we iterate through _files looking for
- # names inside that "directory".
- from pathlib import Path
- fullname_path = Path(self.zipimporter.get_filename(self.fullname))
- relative_path = fullname_path.relative_to(self.zipimporter.archive)
- # Don't forget that fullname names a package, so its path will include
- # __init__.py, which we want to ignore.
- assert relative_path.name == '__init__.py'
- package_path = relative_path.parent
- subdirs_seen = set()
- for filename in self.zipimporter._files:
- try:
- relative = Path(filename).relative_to(package_path)
- except ValueError:
- continue
- # If the path of the file (which is relative to the top of the zip
- # namespace), relative to the package given when the resource
- # reader was created, has a parent, then it's a name in a
- # subdirectory and thus we skip it.
- parent_name = relative.parent.name
- if len(parent_name) == 0:
- yield relative.name
- elif parent_name not in subdirs_seen:
- subdirs_seen.add(parent_name)
- yield parent_name
+"""zipimport provides support for importing Python modules from Zip archives.
+
+This module exports three objects:
+- zipimporter: a class; its constructor takes a path to a Zip archive.
+- ZipImportError: exception raised by zipimporter objects. It's a
+ subclass of ImportError, so it can be caught as ImportError, too.
+- _zip_directory_cache: a dict, mapping archive paths to zip directory
+ info dicts, as used in zipimporter._files.
+
+It is usually not needed to use the zipimport module explicitly; it is
+used by the builtin import mechanism for sys.path items that are paths
+to Zip archives.
+"""
+
+#from importlib import _bootstrap_external
+#from importlib import _bootstrap # for _verbose_message
+import _frozen_importlib_external as _bootstrap_external
+from _frozen_importlib_external import _unpack_uint16, _unpack_uint32
+import _frozen_importlib as _bootstrap # for _verbose_message
+import _imp # for check_hash_based_pycs
+import _io # for open
+import marshal # for loads
+import sys # for modules
+import time # for mktime
+
+__all__ = ['ZipImportError', 'zipimporter']
+
+
+path_sep = _bootstrap_external.path_sep
+alt_path_sep = _bootstrap_external.path_separators[1:]
+
+
+class ZipImportError(ImportError):
+ pass
+
+# _read_directory() cache
+_zip_directory_cache = {}
+
+_module_type = type(sys)
+
+END_CENTRAL_DIR_SIZE = 22
+STRING_END_ARCHIVE = b'PK\x05\x06'
+MAX_COMMENT_LEN = (1 << 16) - 1
+
+class zipimporter:
+ """zipimporter(archivepath) -> zipimporter object
+
+ Create a new zipimporter instance. 'archivepath' must be a path to
+ a zipfile, or to a specific path inside a zipfile. For example, it can be
+ '/tmp/myimport.zip', or '/tmp/myimport.zip/mydirectory', if mydirectory is a
+ valid directory inside the archive.
+
+ 'ZipImportError is raised if 'archivepath' doesn't point to a valid Zip
+ archive.
+
+ The 'archive' attribute of zipimporter objects contains the name of the
+ zipfile targeted.
+ """
+
+ # Split the "subdirectory" from the Zip archive path, lookup a matching
+ # entry in sys.path_importer_cache, fetch the file directory from there
+ # if found, or else read it from the archive.
+ def __init__(self, path):
+ if not isinstance(path, str):
+ import os
+ path = os.fsdecode(path)
+ if not path:
+ raise ZipImportError('archive path is empty', path=path)
+ if alt_path_sep:
+ path = path.replace(alt_path_sep, path_sep)
+
+ prefix = []
+ while True:
+ try:
+ st = _bootstrap_external._path_stat(path)
+ except (OSError, ValueError):
+ # On Windows a ValueError is raised for too long paths.
+ # Back up one path element.
+ dirname, basename = _bootstrap_external._path_split(path)
+ if dirname == path:
+ raise ZipImportError('not a Zip file', path=path)
+ path = dirname
+ prefix.append(basename)
+ else:
+ # it exists
+ if (st.st_mode & 0o170000) != 0o100000: # stat.S_ISREG
+ # it's a not file
+ raise ZipImportError('not a Zip file', path=path)
+ break
+
+ try:
+ files = _zip_directory_cache[path]
+ except KeyError:
+ files = _read_directory(path)
+ _zip_directory_cache[path] = files
+ self._files = files
+ self.archive = path
+ # a prefix directory following the ZIP file path.
+ self.prefix = _bootstrap_external._path_join(*prefix[::-1])
+ if self.prefix:
+ self.prefix += path_sep
+
+
+ # Check whether we can satisfy the import of the module named by
+ # 'fullname', or whether it could be a portion of a namespace
+ # package. Return self if we can load it, a string containing the
+ # full path if it's a possible namespace portion, None if we
+ # can't load it.
+ def find_loader(self, fullname, path=None):
+ """find_loader(fullname, path=None) -> self, str or None.
+
+ Search for a module specified by 'fullname'. 'fullname' must be the
+ fully qualified (dotted) module name. It returns the zipimporter
+ instance itself if the module was found, a string containing the
+ full path name if it's possibly a portion of a namespace package,
+ or None otherwise. The optional 'path' argument is ignored -- it's
+ there for compatibility with the importer protocol.
+ """
+ mi = _get_module_info(self, fullname)
+ if mi is not None:
+ # This is a module or package.
+ return self, []
+
+ # Not a module or regular package. See if this is a directory, and
+ # therefore possibly a portion of a namespace package.
+
+ # We're only interested in the last path component of fullname
+ # earlier components are recorded in self.prefix.
+ modpath = _get_module_path(self, fullname)
+ if _is_dir(self, modpath):
+ # This is possibly a portion of a namespace
+ # package. Return the string representing its path,
+ # without a trailing separator.
+ return None, [f'{self.archive}{path_sep}{modpath}']
+
+ return None, []
+
+
+ # Check whether we can satisfy the import of the module named by
+ # 'fullname'. Return self if we can, None if we can't.
+ def find_module(self, fullname, path=None):
+ """find_module(fullname, path=None) -> self or None.
+
+ Search for a module specified by 'fullname'. 'fullname' must be the
+ fully qualified (dotted) module name. It returns the zipimporter
+ instance itself if the module was found, or None if it wasn't.
+ The optional 'path' argument is ignored -- it's there for compatibility
+ with the importer protocol.
+ """
+ return self.find_loader(fullname, path)[0]
+
+
+ def get_code(self, fullname):
+ """get_code(fullname) -> code object.
+
+ Return the code object for the specified module. Raise ZipImportError
+ if the module couldn't be found.
+ """
+ code, ispackage, modpath = _get_module_code(self, fullname)
+ return code
+
+
+ def get_data(self, pathname):
+ """get_data(pathname) -> string with file data.
+
+ Return the data associated with 'pathname'. Raise OSError if
+ the file wasn't found.
+ """
+ if alt_path_sep:
+ pathname = pathname.replace(alt_path_sep, path_sep)
+
+ key = pathname
+ if pathname.startswith(self.archive + path_sep):
+ key = pathname[len(self.archive + path_sep):]
+
+ try:
+ toc_entry = self._files[key]
+ except KeyError:
+ raise OSError(0, '', key)
+ return _get_data(self.archive, toc_entry)
+
+
+ # Return a string matching __file__ for the named module
+ def get_filename(self, fullname):
+ """get_filename(fullname) -> filename string.
+
+ Return the filename for the specified module.
+ """
+ # Deciding the filename requires working out where the code
+ # would come from if the module was actually loaded
+ code, ispackage, modpath = _get_module_code(self, fullname)
+ return modpath
+
+
+ def get_source(self, fullname):
+ """get_source(fullname) -> source string.
+
+ Return the source code for the specified module. Raise ZipImportError
+ if the module couldn't be found, return None if the archive does
+ contain the module, but has no source for it.
+ """
+ mi = _get_module_info(self, fullname)
+ if mi is None:
+ raise ZipImportError(f"can't find module {fullname!r}", name=fullname)
+
+ path = _get_module_path(self, fullname)
+ if mi:
+ fullpath = _bootstrap_external._path_join(path, '__init__.py')
+ else:
+ fullpath = f'{path}.py'
+
+ try:
+ toc_entry = self._files[fullpath]
+ except KeyError:
+ # we have the module, but no source
+ return None
+ return _get_data(self.archive, toc_entry).decode()
+
+
+ # Return a bool signifying whether the module is a package or not.
+ def is_package(self, fullname):
+ """is_package(fullname) -> bool.
+
+ Return True if the module specified by fullname is a package.
+ Raise ZipImportError if the module couldn't be found.
+ """
+ mi = _get_module_info(self, fullname)
+ if mi is None:
+ raise ZipImportError(f"can't find module {fullname!r}", name=fullname)
+ return mi
+
+
+ # Load and return the module named by 'fullname'.
+ def load_module(self, fullname):
+ """load_module(fullname) -> module.
+
+ Load the module specified by 'fullname'. 'fullname' must be the
+ fully qualified (dotted) module name. It returns the imported
+ module, or raises ZipImportError if it wasn't found.
+ """
+ code, ispackage, modpath = _get_module_code(self, fullname)
+ mod = sys.modules.get(fullname)
+ if mod is None or not isinstance(mod, _module_type):
+ mod = _module_type(fullname)
+ sys.modules[fullname] = mod
+ mod.__loader__ = self
+
+ try:
+ if ispackage:
+ # add __path__ to the module *before* the code gets
+ # executed
+ path = _get_module_path(self, fullname)
+ fullpath = _bootstrap_external._path_join(self.archive, path)
+ mod.__path__ = [fullpath]
+
+ if not hasattr(mod, '__builtins__'):
+ mod.__builtins__ = __builtins__
+ _bootstrap_external._fix_up_module(mod.__dict__, fullname, modpath)
+ exec(code, mod.__dict__)
+ except:
+ del sys.modules[fullname]
+ raise
+
+ try:
+ mod = sys.modules[fullname]
+ except KeyError:
+ raise ImportError(f'Loaded module {fullname!r} not found in sys.modules')
+ _bootstrap._verbose_message('import {} # loaded from Zip {}', fullname, modpath)
+ return mod
+
+
+ def get_resource_reader(self, fullname):
+ """Return the ResourceReader for a package in a zip file.
+
+ If 'fullname' is a package within the zip file, return the
+ 'ResourceReader' object for the package. Otherwise return None.
+ """
+ try:
+ if not self.is_package(fullname):
+ return None
+ except ZipImportError:
+ return None
+ if not _ZipImportResourceReader._registered:
+ from importlib.abc import ResourceReader
+ ResourceReader.register(_ZipImportResourceReader)
+ _ZipImportResourceReader._registered = True
+ return _ZipImportResourceReader(self, fullname)
+
+
+ def __repr__(self):
+ return f'<zipimporter object "{self.archive}{path_sep}{self.prefix}">'
+
+
+# _zip_searchorder defines how we search for a module in the Zip
+# archive: we first search for a package __init__, then for
+# non-package .pyc, and .py entries. The .pyc entries
+# are swapped by initzipimport() if we run in optimized mode. Also,
+# '/' is replaced by path_sep there.
+_zip_searchorder = (
+ (path_sep + '__init__.pyc', True, True),
+ (path_sep + '__init__.py', False, True),
+ ('.pyc', True, False),
+ ('.py', False, False),
+)
+
+# Given a module name, return the potential file path in the
+# archive (without extension).
+def _get_module_path(self, fullname):
+ return self.prefix + fullname.rpartition('.')[2]
+
+# Does this path represent a directory?
+def _is_dir(self, path):
+ # See if this is a "directory". If so, it's eligible to be part
+ # of a namespace package. We test by seeing if the name, with an
+ # appended path separator, exists.
+ dirpath = path + path_sep
+ # If dirpath is present in self._files, we have a directory.
+ return dirpath in self._files
+
+# Return some information about a module.
+def _get_module_info(self, fullname):
+ path = _get_module_path(self, fullname)
+ for suffix, isbytecode, ispackage in _zip_searchorder:
+ fullpath = path + suffix
+ if fullpath in self._files:
+ return ispackage
+ return None
+
+
+# implementation
+
+# _read_directory(archive) -> files dict (new reference)
+#
+# Given a path to a Zip archive, build a dict, mapping file names
+# (local to the archive, using SEP as a separator) to toc entries.
+#
+# A toc_entry is a tuple:
+#
+# (__file__, # value to use for __file__, available for all files,
+# # encoded to the filesystem encoding
+# compress, # compression kind; 0 for uncompressed
+# data_size, # size of compressed data on disk
+# file_size, # size of decompressed data
+# file_offset, # offset of file header from start of archive
+# time, # mod time of file (in dos format)
+# date, # mod data of file (in dos format)
+# crc, # crc checksum of the data
+# )
+#
+# Directories can be recognized by the trailing path_sep in the name,
+# data_size and file_offset are 0.
+def _read_directory(archive):
+ try:
+ fp = _io.open_code(archive)
+ except OSError:
+ raise ZipImportError(f"can't open Zip file: {archive!r}", path=archive)
+
+ with fp:
+ try:
+ fp.seek(-END_CENTRAL_DIR_SIZE, 2)
+ header_position = fp.tell()
+ buffer = fp.read(END_CENTRAL_DIR_SIZE)
+ except OSError:
+ raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive)
+ if len(buffer) != END_CENTRAL_DIR_SIZE:
+ raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive)
+ if buffer[:4] != STRING_END_ARCHIVE:
+ # Bad: End of Central Dir signature
+ # Check if there's a comment.
+ try:
+ fp.seek(0, 2)
+ file_size = fp.tell()
+ except OSError:
+ raise ZipImportError(f"can't read Zip file: {archive!r}",
+ path=archive)
+ max_comment_start = max(file_size - MAX_COMMENT_LEN -
+ END_CENTRAL_DIR_SIZE, 0)
+ try:
+ fp.seek(max_comment_start)
+ data = fp.read()
+ except OSError:
+ raise ZipImportError(f"can't read Zip file: {archive!r}",
+ path=archive)
+ pos = data.rfind(STRING_END_ARCHIVE)
+ if pos < 0:
+ raise ZipImportError(f'not a Zip file: {archive!r}',
+ path=archive)
+ buffer = data[pos:pos+END_CENTRAL_DIR_SIZE]
+ if len(buffer) != END_CENTRAL_DIR_SIZE:
+ raise ZipImportError(f"corrupt Zip file: {archive!r}",
+ path=archive)
+ header_position = file_size - len(data) + pos
+
+ header_size = _unpack_uint32(buffer[12:16])
+ header_offset = _unpack_uint32(buffer[16:20])
+ if header_position < header_size:
+ raise ZipImportError(f'bad central directory size: {archive!r}', path=archive)
+ if header_position < header_offset:
+ raise ZipImportError(f'bad central directory offset: {archive!r}', path=archive)
+ header_position -= header_size
+ arc_offset = header_position - header_offset
+ if arc_offset < 0:
+ raise ZipImportError(f'bad central directory size or offset: {archive!r}', path=archive)
+
+ files = {}
+ # Start of Central Directory
+ count = 0
+ try:
+ fp.seek(header_position)
+ except OSError:
+ raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive)
+ while True:
+ buffer = fp.read(46)
+ if len(buffer) < 4:
+ raise EOFError('EOF read where not expected')
+ # Start of file header
+ if buffer[:4] != b'PK\x01\x02':
+ break # Bad: Central Dir File Header
+ if len(buffer) != 46:
+ raise EOFError('EOF read where not expected')
+ flags = _unpack_uint16(buffer[8:10])
+ compress = _unpack_uint16(buffer[10:12])
+ time = _unpack_uint16(buffer[12:14])
+ date = _unpack_uint16(buffer[14:16])
+ crc = _unpack_uint32(buffer[16:20])
+ data_size = _unpack_uint32(buffer[20:24])
+ file_size = _unpack_uint32(buffer[24:28])
+ name_size = _unpack_uint16(buffer[28:30])
+ extra_size = _unpack_uint16(buffer[30:32])
+ comment_size = _unpack_uint16(buffer[32:34])
+ file_offset = _unpack_uint32(buffer[42:46])
+ header_size = name_size + extra_size + comment_size
+ if file_offset > header_offset:
+ raise ZipImportError(f'bad local header offset: {archive!r}', path=archive)
+ file_offset += arc_offset
+
+ try:
+ name = fp.read(name_size)
+ except OSError:
+ raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive)
+ if len(name) != name_size:
+ raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive)
+ # On Windows, calling fseek to skip over the fields we don't use is
+ # slower than reading the data because fseek flushes stdio's
+ # internal buffers. See issue #8745.
+ try:
+ if len(fp.read(header_size - name_size)) != header_size - name_size:
+ raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive)
+ except OSError:
+ raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive)
+
+ if flags & 0x800:
+ # UTF-8 file names extension
+ name = name.decode()
+ else:
+ # Historical ZIP filename encoding
+ try:
+ name = name.decode('ascii')
+ except UnicodeDecodeError:
+ name = name.decode('latin1').translate(cp437_table)
+
+ name = name.replace('/', path_sep)
+ path = _bootstrap_external._path_join(archive, name)
+ t = (path, compress, data_size, file_size, file_offset, time, date, crc)
+ files[name] = t
+ count += 1
+ _bootstrap._verbose_message('zipimport: found {} names in {!r}', count, archive)
+ return files
+
+# During bootstrap, we may need to load the encodings
+# package from a ZIP file. But the cp437 encoding is implemented
+# in Python in the encodings package.
+#
+# Break out of this dependency by using the translation table for
+# the cp437 encoding.
+cp437_table = (
+ # ASCII part, 8 rows x 16 chars
+ '\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f'
+ '\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f'
+ ' !"#$%&\'()*+,-./'
+ '0123456789:;<=>?'
+ '@ABCDEFGHIJKLMNO'
+ 'PQRSTUVWXYZ[\\]^_'
+ '`abcdefghijklmno'
+ 'pqrstuvwxyz{|}~\x7f'
+ # non-ASCII part, 16 rows x 8 chars
+ '\xc7\xfc\xe9\xe2\xe4\xe0\xe5\xe7'
+ '\xea\xeb\xe8\xef\xee\xec\xc4\xc5'
+ '\xc9\xe6\xc6\xf4\xf6\xf2\xfb\xf9'
+ '\xff\xd6\xdc\xa2\xa3\xa5\u20a7\u0192'
+ '\xe1\xed\xf3\xfa\xf1\xd1\xaa\xba'
+ '\xbf\u2310\xac\xbd\xbc\xa1\xab\xbb'
+ '\u2591\u2592\u2593\u2502\u2524\u2561\u2562\u2556'
+ '\u2555\u2563\u2551\u2557\u255d\u255c\u255b\u2510'
+ '\u2514\u2534\u252c\u251c\u2500\u253c\u255e\u255f'
+ '\u255a\u2554\u2569\u2566\u2560\u2550\u256c\u2567'
+ '\u2568\u2564\u2565\u2559\u2558\u2552\u2553\u256b'
+ '\u256a\u2518\u250c\u2588\u2584\u258c\u2590\u2580'
+ '\u03b1\xdf\u0393\u03c0\u03a3\u03c3\xb5\u03c4'
+ '\u03a6\u0398\u03a9\u03b4\u221e\u03c6\u03b5\u2229'
+ '\u2261\xb1\u2265\u2264\u2320\u2321\xf7\u2248'
+ '\xb0\u2219\xb7\u221a\u207f\xb2\u25a0\xa0'
+)
+
+_importing_zlib = False
+
+# Return the zlib.decompress function object, or NULL if zlib couldn't
+# be imported. The function is cached when found, so subsequent calls
+# don't import zlib again.
+def _get_decompress_func():
+ global _importing_zlib
+ if _importing_zlib:
+ # Someone has a zlib.py[co] in their Zip file
+ # let's avoid a stack overflow.
+ _bootstrap._verbose_message('zipimport: zlib UNAVAILABLE')
+ raise ZipImportError("can't decompress data; zlib not available")
+
+ _importing_zlib = True
+ try:
+ from zlib import decompress
+ except Exception:
+ _bootstrap._verbose_message('zipimport: zlib UNAVAILABLE')
+ raise ZipImportError("can't decompress data; zlib not available")
+ finally:
+ _importing_zlib = False
+
+ _bootstrap._verbose_message('zipimport: zlib available')
+ return decompress
+
+# Given a path to a Zip file and a toc_entry, return the (uncompressed) data.
+def _get_data(archive, toc_entry):
+ datapath, compress, data_size, file_size, file_offset, time, date, crc = toc_entry
+ if data_size < 0:
+ raise ZipImportError('negative data size')
+
+ with _io.open_code(archive) as fp:
+ # Check to make sure the local file header is correct
+ try:
+ fp.seek(file_offset)
+ except OSError:
+ raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive)
+ buffer = fp.read(30)
+ if len(buffer) != 30:
+ raise EOFError('EOF read where not expected')
+
+ if buffer[:4] != b'PK\x03\x04':
+ # Bad: Local File Header
+ raise ZipImportError(f'bad local file header: {archive!r}', path=archive)
+
+ name_size = _unpack_uint16(buffer[26:28])
+ extra_size = _unpack_uint16(buffer[28:30])
+ header_size = 30 + name_size + extra_size
+ file_offset += header_size # Start of file data
+ try:
+ fp.seek(file_offset)
+ except OSError:
+ raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive)
+ raw_data = fp.read(data_size)
+ if len(raw_data) != data_size:
+ raise OSError("zipimport: can't read data")
+
+ if compress == 0:
+ # data is not compressed
+ return raw_data
+
+ # Decompress with zlib
+ try:
+ decompress = _get_decompress_func()
+ except Exception:
+ raise ZipImportError("can't decompress data; zlib not available")
+ return decompress(raw_data, -15)
+
+
+# Lenient date/time comparison function. The precision of the mtime
+# in the archive is lower than the mtime stored in a .pyc: we
+# must allow a difference of at most one second.
+def _eq_mtime(t1, t2):
+ # dostime only stores even seconds, so be lenient
+ return abs(t1 - t2) <= 1
+
+
+# Given the contents of a .py[co] file, unmarshal the data
+# and return the code object. Return None if it the magic word doesn't
+# match, or if the recorded .py[co] metadata does not match the source,
+# (we do this instead of raising an exception as we fall back
+# to .py if available and we don't want to mask other errors).
+def _unmarshal_code(self, pathname, fullpath, fullname, data):
+ exc_details = {
+ 'name': fullname,
+ 'path': fullpath,
+ }
+
+ try:
+ flags = _bootstrap_external._classify_pyc(data, fullname, exc_details)
+ except ImportError:
+ return None
+
+ hash_based = flags & 0b1 != 0
+ if hash_based:
+ check_source = flags & 0b10 != 0
+ if (_imp.check_hash_based_pycs != 'never' and
+ (check_source or _imp.check_hash_based_pycs == 'always')):
+ source_bytes = _get_pyc_source(self, fullpath)
+ if source_bytes is not None:
+ source_hash = _imp.source_hash(
+ _bootstrap_external._RAW_MAGIC_NUMBER,
+ source_bytes,
+ )
+
+ try:
+ _bootstrap_external._validate_hash_pyc(
+ data, source_hash, fullname, exc_details)
+ except ImportError:
+ return None
+ else:
+ source_mtime, source_size = \
+ _get_mtime_and_size_of_source(self, fullpath)
+
+ if source_mtime:
+ # We don't use _bootstrap_external._validate_timestamp_pyc
+ # to allow for a more lenient timestamp check.
+ if (not _eq_mtime(_unpack_uint32(data[8:12]), source_mtime) or
+ _unpack_uint32(data[12:16]) != source_size):
+ _bootstrap._verbose_message(
+ f'bytecode is stale for {fullname!r}')
+ return None
+
+ code = marshal.loads(data[16:])
+ if not isinstance(code, _code_type):
+ raise TypeError(f'compiled module {pathname!r} is not a code object')
+ return code
+
+_code_type = type(_unmarshal_code.__code__)
+
+
+# Replace any occurrences of '\r\n?' in the input string with '\n'.
+# This converts DOS and Mac line endings to Unix line endings.
+def _normalize_line_endings(source):
+ source = source.replace(b'\r\n', b'\n')
+ source = source.replace(b'\r', b'\n')
+ return source
+
+# Given a string buffer containing Python source code, compile it
+# and return a code object.
+def _compile_source(pathname, source):
+ source = _normalize_line_endings(source)
+ return compile(source, pathname, 'exec', dont_inherit=True)
+
+# Convert the date/time values found in the Zip archive to a value
+# that's compatible with the time stamp stored in .pyc files.
+def _parse_dostime(d, t):
+ return time.mktime((
+ (d >> 9) + 1980, # bits 9..15: year
+ (d >> 5) & 0xF, # bits 5..8: month
+ d & 0x1F, # bits 0..4: day
+ t >> 11, # bits 11..15: hours
+ (t >> 5) & 0x3F, # bits 8..10: minutes
+ (t & 0x1F) * 2, # bits 0..7: seconds / 2
+ -1, -1, -1))
+
+# Given a path to a .pyc file in the archive, return the
+# modification time of the matching .py file and its size,
+# or (0, 0) if no source is available.
+def _get_mtime_and_size_of_source(self, path):
+ try:
+ # strip 'c' or 'o' from *.py[co]
+ assert path[-1:] in ('c', 'o')
+ path = path[:-1]
+ toc_entry = self._files[path]
+ # fetch the time stamp of the .py file for comparison
+ # with an embedded pyc time stamp
+ time = toc_entry[5]
+ date = toc_entry[6]
+ uncompressed_size = toc_entry[3]
+ return _parse_dostime(date, time), uncompressed_size
+ except (KeyError, IndexError, TypeError):
+ return 0, 0
+
+
+# Given a path to a .pyc file in the archive, return the
+# contents of the matching .py file, or None if no source
+# is available.
+def _get_pyc_source(self, path):
+ # strip 'c' or 'o' from *.py[co]
+ assert path[-1:] in ('c', 'o')
+ path = path[:-1]
+
+ try:
+ toc_entry = self._files[path]
+ except KeyError:
+ return None
+ else:
+ return _get_data(self.archive, toc_entry)
+
+
+# Get the code object associated with the module specified by
+# 'fullname'.
+def _get_module_code(self, fullname):
+ path = _get_module_path(self, fullname)
+ for suffix, isbytecode, ispackage in _zip_searchorder:
+ fullpath = path + suffix
+ _bootstrap._verbose_message('trying {}{}{}', self.archive, path_sep, fullpath, verbosity=2)
+ try:
+ toc_entry = self._files[fullpath]
+ except KeyError:
+ pass
+ else:
+ modpath = toc_entry[0]
+ data = _get_data(self.archive, toc_entry)
+ if isbytecode:
+ code = _unmarshal_code(self, modpath, fullpath, fullname, data)
+ else:
+ code = _compile_source(modpath, data)
+ if code is None:
+ # bad magic number or non-matching mtime
+ # in byte code, try next
+ continue
+ modpath = toc_entry[0]
+ return code, ispackage, modpath
+ else:
+ raise ZipImportError(f"can't find module {fullname!r}", name=fullname)
+
+
+class _ZipImportResourceReader:
+ """Private class used to support ZipImport.get_resource_reader().
+
+ This class is allowed to reference all the innards and private parts of
+ the zipimporter.
+ """
+ _registered = False
+
+ def __init__(self, zipimporter, fullname):
+ self.zipimporter = zipimporter
+ self.fullname = fullname
+
+ def open_resource(self, resource):
+ fullname_as_path = self.fullname.replace('.', '/')
+ path = f'{fullname_as_path}/{resource}'
+ from io import BytesIO
+ try:
+ return BytesIO(self.zipimporter.get_data(path))
+ except OSError:
+ raise FileNotFoundError(path)
+
+ def resource_path(self, resource):
+ # All resources are in the zip file, so there is no path to the file.
+ # Raising FileNotFoundError tells the higher level API to extract the
+ # binary data and create a temporary file.
+ raise FileNotFoundError
+
+ def is_resource(self, name):
+ # Maybe we could do better, but if we can get the data, it's a
+ # resource. Otherwise it isn't.
+ fullname_as_path = self.fullname.replace('.', '/')
+ path = f'{fullname_as_path}/{name}'
+ try:
+ self.zipimporter.get_data(path)
+ except OSError:
+ return False
+ return True
+
+ def contents(self):
+ # This is a bit convoluted, because fullname will be a module path,
+ # but _files is a list of file names relative to the top of the
+ # archive's namespace. We want to compare file paths to find all the
+ # names of things inside the module represented by fullname. So we
+ # turn the module path of fullname into a file path relative to the
+ # top of the archive, and then we iterate through _files looking for
+ # names inside that "directory".
+ from pathlib import Path
+ fullname_path = Path(self.zipimporter.get_filename(self.fullname))
+ relative_path = fullname_path.relative_to(self.zipimporter.archive)
+ # Don't forget that fullname names a package, so its path will include
+ # __init__.py, which we want to ignore.
+ assert relative_path.name == '__init__.py'
+ package_path = relative_path.parent
+ subdirs_seen = set()
+ for filename in self.zipimporter._files:
+ try:
+ relative = Path(filename).relative_to(package_path)
+ except ValueError:
+ continue
+ # If the path of the file (which is relative to the top of the zip
+ # namespace), relative to the package given when the resource
+ # reader was created, has a parent, then it's a name in a
+ # subdirectory and thus we skip it.
+ parent_name = relative.parent.name
+ if len(parent_name) == 0:
+ yield relative.name
+ elif parent_name not in subdirs_seen:
+ subdirs_seen.add(parent_name)
+ yield parent_name
diff --git a/contrib/tools/python3/src/Lib/zoneinfo/__init__.py b/contrib/tools/python3/src/Lib/zoneinfo/__init__.py
index f5510ee049..9f5be17ee4 100644
--- a/contrib/tools/python3/src/Lib/zoneinfo/__init__.py
+++ b/contrib/tools/python3/src/Lib/zoneinfo/__init__.py
@@ -1,31 +1,31 @@
-__all__ = [
- "ZoneInfo",
- "reset_tzpath",
- "available_timezones",
- "TZPATH",
- "ZoneInfoNotFoundError",
- "InvalidTZPathWarning",
-]
-
-from . import _tzpath
-from ._common import ZoneInfoNotFoundError
-
-try:
- from _zoneinfo import ZoneInfo
-except ImportError: # pragma: nocover
- from ._zoneinfo import ZoneInfo
-
-reset_tzpath = _tzpath.reset_tzpath
-available_timezones = _tzpath.available_timezones
-InvalidTZPathWarning = _tzpath.InvalidTZPathWarning
-
-
-def __getattr__(name):
- if name == "TZPATH":
- return _tzpath.TZPATH
- else:
- raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
-
-
-def __dir__():
- return sorted(list(globals()) + ["TZPATH"])
+__all__ = [
+ "ZoneInfo",
+ "reset_tzpath",
+ "available_timezones",
+ "TZPATH",
+ "ZoneInfoNotFoundError",
+ "InvalidTZPathWarning",
+]
+
+from . import _tzpath
+from ._common import ZoneInfoNotFoundError
+
+try:
+ from _zoneinfo import ZoneInfo
+except ImportError: # pragma: nocover
+ from ._zoneinfo import ZoneInfo
+
+reset_tzpath = _tzpath.reset_tzpath
+available_timezones = _tzpath.available_timezones
+InvalidTZPathWarning = _tzpath.InvalidTZPathWarning
+
+
+def __getattr__(name):
+ if name == "TZPATH":
+ return _tzpath.TZPATH
+ else:
+ raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
+
+
+def __dir__():
+ return sorted(list(globals()) + ["TZPATH"])
diff --git a/contrib/tools/python3/src/Lib/zoneinfo/_common.py b/contrib/tools/python3/src/Lib/zoneinfo/_common.py
index 41c898f37e..33311c1357 100644
--- a/contrib/tools/python3/src/Lib/zoneinfo/_common.py
+++ b/contrib/tools/python3/src/Lib/zoneinfo/_common.py
@@ -1,165 +1,165 @@
-import struct
-
-
-def load_tzdata(key):
- import importlib.resources
-
- components = key.split("/")
- package_name = ".".join(["tzdata.zoneinfo"] + components[:-1])
- resource_name = components[-1]
-
- try:
- return importlib.resources.open_binary(package_name, resource_name)
- except (ImportError, FileNotFoundError, UnicodeEncodeError):
- # There are three types of exception that can be raised that all amount
- # to "we cannot find this key":
- #
- # ImportError: If package_name doesn't exist (e.g. if tzdata is not
- # installed, or if there's an error in the folder name like
- # Amrica/New_York)
- # FileNotFoundError: If resource_name doesn't exist in the package
- # (e.g. Europe/Krasnoy)
- # UnicodeEncodeError: If package_name or resource_name are not UTF-8,
- # such as keys containing a surrogate character.
- raise ZoneInfoNotFoundError(f"No time zone found with key {key}")
-
-
-def load_data(fobj):
- header = _TZifHeader.from_file(fobj)
-
- if header.version == 1:
- time_size = 4
- time_type = "l"
- else:
- # Version 2+ has 64-bit integer transition times
- time_size = 8
- time_type = "q"
-
- # Version 2+ also starts with a Version 1 header and data, which
- # we need to skip now
- skip_bytes = (
- header.timecnt * 5 # Transition times and types
- + header.typecnt * 6 # Local time type records
- + header.charcnt # Time zone designations
- + header.leapcnt * 8 # Leap second records
- + header.isstdcnt # Standard/wall indicators
- + header.isutcnt # UT/local indicators
- )
-
- fobj.seek(skip_bytes, 1)
-
- # Now we need to read the second header, which is not the same
- # as the first
- header = _TZifHeader.from_file(fobj)
-
- typecnt = header.typecnt
- timecnt = header.timecnt
- charcnt = header.charcnt
-
- # The data portion starts with timecnt transitions and indices
- if timecnt:
- trans_list_utc = struct.unpack(
- f">{timecnt}{time_type}", fobj.read(timecnt * time_size)
- )
- trans_idx = struct.unpack(f">{timecnt}B", fobj.read(timecnt))
- else:
- trans_list_utc = ()
- trans_idx = ()
-
- # Read the ttinfo struct, (utoff, isdst, abbrind)
- if typecnt:
- utcoff, isdst, abbrind = zip(
- *(struct.unpack(">lbb", fobj.read(6)) for i in range(typecnt))
- )
- else:
- utcoff = ()
- isdst = ()
- abbrind = ()
-
- # Now read the abbreviations. They are null-terminated strings, indexed
- # not by position in the array but by position in the unsplit
- # abbreviation string. I suppose this makes more sense in C, which uses
- # null to terminate the strings, but it's inconvenient here...
- abbr_vals = {}
- abbr_chars = fobj.read(charcnt)
-
- def get_abbr(idx):
- # Gets a string starting at idx and running until the next \x00
- #
- # We cannot pre-populate abbr_vals by splitting on \x00 because there
- # are some zones that use subsets of longer abbreviations, like so:
- #
- # LMT\x00AHST\x00HDT\x00
- #
- # Where the idx to abbr mapping should be:
- #
- # {0: "LMT", 4: "AHST", 5: "HST", 9: "HDT"}
- if idx not in abbr_vals:
- span_end = abbr_chars.find(b"\x00", idx)
- abbr_vals[idx] = abbr_chars[idx:span_end].decode()
-
- return abbr_vals[idx]
-
- abbr = tuple(get_abbr(idx) for idx in abbrind)
-
- # The remainder of the file consists of leap seconds (currently unused) and
- # the standard/wall and ut/local indicators, which are metadata we don't need.
- # In version 2 files, we need to skip the unnecessary data to get at the TZ string:
- if header.version >= 2:
- # Each leap second record has size (time_size + 4)
- skip_bytes = header.isutcnt + header.isstdcnt + header.leapcnt * 12
- fobj.seek(skip_bytes, 1)
-
- c = fobj.read(1) # Should be \n
- assert c == b"\n", c
-
- tz_bytes = b""
- while (c := fobj.read(1)) != b"\n":
- tz_bytes += c
-
- tz_str = tz_bytes
- else:
- tz_str = None
-
- return trans_idx, trans_list_utc, utcoff, isdst, abbr, tz_str
-
-
-class _TZifHeader:
- __slots__ = [
- "version",
- "isutcnt",
- "isstdcnt",
- "leapcnt",
- "timecnt",
- "typecnt",
- "charcnt",
- ]
-
- def __init__(self, *args):
- assert len(self.__slots__) == len(args)
- for attr, val in zip(self.__slots__, args):
- setattr(self, attr, val)
-
- @classmethod
- def from_file(cls, stream):
- # The header starts with a 4-byte "magic" value
- if stream.read(4) != b"TZif":
- raise ValueError("Invalid TZif file: magic not found")
-
- _version = stream.read(1)
- if _version == b"\x00":
- version = 1
- else:
- version = int(_version)
- stream.read(15)
-
- args = (version,)
-
- # Slots are defined in the order that the bytes are arranged
- args = args + struct.unpack(">6l", stream.read(24))
-
- return cls(*args)
-
-
-class ZoneInfoNotFoundError(KeyError):
- """Exception raised when a ZoneInfo key is not found."""
+import struct
+
+
+def load_tzdata(key):
+ import importlib.resources
+
+ components = key.split("/")
+ package_name = ".".join(["tzdata.zoneinfo"] + components[:-1])
+ resource_name = components[-1]
+
+ try:
+ return importlib.resources.open_binary(package_name, resource_name)
+ except (ImportError, FileNotFoundError, UnicodeEncodeError):
+ # There are three types of exception that can be raised that all amount
+ # to "we cannot find this key":
+ #
+ # ImportError: If package_name doesn't exist (e.g. if tzdata is not
+ # installed, or if there's an error in the folder name like
+ # Amrica/New_York)
+ # FileNotFoundError: If resource_name doesn't exist in the package
+ # (e.g. Europe/Krasnoy)
+ # UnicodeEncodeError: If package_name or resource_name are not UTF-8,
+ # such as keys containing a surrogate character.
+ raise ZoneInfoNotFoundError(f"No time zone found with key {key}")
+
+
+def load_data(fobj):
+ header = _TZifHeader.from_file(fobj)
+
+ if header.version == 1:
+ time_size = 4
+ time_type = "l"
+ else:
+ # Version 2+ has 64-bit integer transition times
+ time_size = 8
+ time_type = "q"
+
+ # Version 2+ also starts with a Version 1 header and data, which
+ # we need to skip now
+ skip_bytes = (
+ header.timecnt * 5 # Transition times and types
+ + header.typecnt * 6 # Local time type records
+ + header.charcnt # Time zone designations
+ + header.leapcnt * 8 # Leap second records
+ + header.isstdcnt # Standard/wall indicators
+ + header.isutcnt # UT/local indicators
+ )
+
+ fobj.seek(skip_bytes, 1)
+
+ # Now we need to read the second header, which is not the same
+ # as the first
+ header = _TZifHeader.from_file(fobj)
+
+ typecnt = header.typecnt
+ timecnt = header.timecnt
+ charcnt = header.charcnt
+
+ # The data portion starts with timecnt transitions and indices
+ if timecnt:
+ trans_list_utc = struct.unpack(
+ f">{timecnt}{time_type}", fobj.read(timecnt * time_size)
+ )
+ trans_idx = struct.unpack(f">{timecnt}B", fobj.read(timecnt))
+ else:
+ trans_list_utc = ()
+ trans_idx = ()
+
+ # Read the ttinfo struct, (utoff, isdst, abbrind)
+ if typecnt:
+ utcoff, isdst, abbrind = zip(
+ *(struct.unpack(">lbb", fobj.read(6)) for i in range(typecnt))
+ )
+ else:
+ utcoff = ()
+ isdst = ()
+ abbrind = ()
+
+ # Now read the abbreviations. They are null-terminated strings, indexed
+ # not by position in the array but by position in the unsplit
+ # abbreviation string. I suppose this makes more sense in C, which uses
+ # null to terminate the strings, but it's inconvenient here...
+ abbr_vals = {}
+ abbr_chars = fobj.read(charcnt)
+
+ def get_abbr(idx):
+ # Gets a string starting at idx and running until the next \x00
+ #
+ # We cannot pre-populate abbr_vals by splitting on \x00 because there
+ # are some zones that use subsets of longer abbreviations, like so:
+ #
+ # LMT\x00AHST\x00HDT\x00
+ #
+ # Where the idx to abbr mapping should be:
+ #
+ # {0: "LMT", 4: "AHST", 5: "HST", 9: "HDT"}
+ if idx not in abbr_vals:
+ span_end = abbr_chars.find(b"\x00", idx)
+ abbr_vals[idx] = abbr_chars[idx:span_end].decode()
+
+ return abbr_vals[idx]
+
+ abbr = tuple(get_abbr(idx) for idx in abbrind)
+
+ # The remainder of the file consists of leap seconds (currently unused) and
+ # the standard/wall and ut/local indicators, which are metadata we don't need.
+ # In version 2 files, we need to skip the unnecessary data to get at the TZ string:
+ if header.version >= 2:
+ # Each leap second record has size (time_size + 4)
+ skip_bytes = header.isutcnt + header.isstdcnt + header.leapcnt * 12
+ fobj.seek(skip_bytes, 1)
+
+ c = fobj.read(1) # Should be \n
+ assert c == b"\n", c
+
+ tz_bytes = b""
+ while (c := fobj.read(1)) != b"\n":
+ tz_bytes += c
+
+ tz_str = tz_bytes
+ else:
+ tz_str = None
+
+ return trans_idx, trans_list_utc, utcoff, isdst, abbr, tz_str
+
+
+class _TZifHeader:
+ __slots__ = [
+ "version",
+ "isutcnt",
+ "isstdcnt",
+ "leapcnt",
+ "timecnt",
+ "typecnt",
+ "charcnt",
+ ]
+
+ def __init__(self, *args):
+ assert len(self.__slots__) == len(args)
+ for attr, val in zip(self.__slots__, args):
+ setattr(self, attr, val)
+
+ @classmethod
+ def from_file(cls, stream):
+ # The header starts with a 4-byte "magic" value
+ if stream.read(4) != b"TZif":
+ raise ValueError("Invalid TZif file: magic not found")
+
+ _version = stream.read(1)
+ if _version == b"\x00":
+ version = 1
+ else:
+ version = int(_version)
+ stream.read(15)
+
+ args = (version,)
+
+ # Slots are defined in the order that the bytes are arranged
+ args = args + struct.unpack(">6l", stream.read(24))
+
+ return cls(*args)
+
+
+class ZoneInfoNotFoundError(KeyError):
+ """Exception raised when a ZoneInfo key is not found."""
diff --git a/contrib/tools/python3/src/Lib/zoneinfo/_tzpath.py b/contrib/tools/python3/src/Lib/zoneinfo/_tzpath.py
index 672560b951..149e20ed94 100644
--- a/contrib/tools/python3/src/Lib/zoneinfo/_tzpath.py
+++ b/contrib/tools/python3/src/Lib/zoneinfo/_tzpath.py
@@ -1,175 +1,175 @@
-import os
-import sysconfig
-
-
-def reset_tzpath(to=None):
- global TZPATH
-
- tzpaths = to
- if tzpaths is not None:
- if isinstance(tzpaths, (str, bytes)):
- raise TypeError(
- f"tzpaths must be a list or tuple, "
- + f"not {type(tzpaths)}: {tzpaths!r}"
- )
-
- if not all(map(os.path.isabs, tzpaths)):
- raise ValueError(_get_invalid_paths_message(tzpaths))
- base_tzpath = tzpaths
- else:
- env_var = os.environ.get("PYTHONTZPATH", None)
- if env_var is not None:
- base_tzpath = _parse_python_tzpath(env_var)
- else:
- base_tzpath = _parse_python_tzpath(
- sysconfig.get_config_var("TZPATH")
- )
-
- TZPATH = tuple(base_tzpath)
-
-
-def _parse_python_tzpath(env_var):
- if not env_var:
- return ()
-
- raw_tzpath = env_var.split(os.pathsep)
- new_tzpath = tuple(filter(os.path.isabs, raw_tzpath))
-
- # If anything has been filtered out, we will warn about it
- if len(new_tzpath) != len(raw_tzpath):
- import warnings
-
- msg = _get_invalid_paths_message(raw_tzpath)
-
- warnings.warn(
- "Invalid paths specified in PYTHONTZPATH environment variable. "
- + msg,
- InvalidTZPathWarning,
- )
-
- return new_tzpath
-
-
-def _get_invalid_paths_message(tzpaths):
- invalid_paths = (path for path in tzpaths if not os.path.isabs(path))
-
- prefix = "\n "
- indented_str = prefix + prefix.join(invalid_paths)
-
- return (
- "Paths should be absolute but found the following relative paths:"
- + indented_str
- )
-
-
-def find_tzfile(key):
- """Retrieve the path to a TZif file from a key."""
- _validate_tzfile_path(key)
- for search_path in TZPATH:
- filepath = os.path.join(search_path, key)
- if os.path.isfile(filepath):
- return filepath
-
- return None
-
-
-_TEST_PATH = os.path.normpath(os.path.join("_", "_"))[:-1]
-
-
-def _validate_tzfile_path(path, _base=_TEST_PATH):
- if os.path.isabs(path):
- raise ValueError(
- f"ZoneInfo keys may not be absolute paths, got: {path}"
- )
-
- # We only care about the kinds of path normalizations that would change the
- # length of the key - e.g. a/../b -> a/b, or a/b/ -> a/b. On Windows,
- # normpath will also change from a/b to a\b, but that would still preserve
- # the length.
- new_path = os.path.normpath(path)
- if len(new_path) != len(path):
- raise ValueError(
- f"ZoneInfo keys must be normalized relative paths, got: {path}"
- )
-
- resolved = os.path.normpath(os.path.join(_base, new_path))
- if not resolved.startswith(_base):
- raise ValueError(
- f"ZoneInfo keys must refer to subdirectories of TZPATH, got: {path}"
- )
-
-
-del _TEST_PATH
-
-
-def available_timezones():
- """Returns a set containing all available time zones.
-
- .. caution::
-
- This may attempt to open a large number of files, since the best way to
- determine if a given file on the time zone search path is to open it
- and check for the "magic string" at the beginning.
- """
- from importlib import resources
-
- valid_zones = set()
-
- # Start with loading from the tzdata package if it exists: this has a
- # pre-assembled list of zones that only requires opening one file.
- try:
- with resources.open_text("tzdata", "zones") as f:
- for zone in f:
- zone = zone.strip()
- if zone:
- valid_zones.add(zone)
- except (ImportError, FileNotFoundError):
- pass
-
- def valid_key(fpath):
- try:
- with open(fpath, "rb") as f:
- return f.read(4) == b"TZif"
- except Exception: # pragma: nocover
- return False
-
- for tz_root in TZPATH:
- if not os.path.exists(tz_root):
- continue
-
- for root, dirnames, files in os.walk(tz_root):
- if root == tz_root:
- # right/ and posix/ are special directories and shouldn't be
- # included in the output of available zones
- if "right" in dirnames:
- dirnames.remove("right")
- if "posix" in dirnames:
- dirnames.remove("posix")
-
- for file in files:
- fpath = os.path.join(root, file)
-
- key = os.path.relpath(fpath, start=tz_root)
- if os.sep != "/": # pragma: nocover
- key = key.replace(os.sep, "/")
-
- if not key or key in valid_zones:
- continue
-
- if valid_key(fpath):
- valid_zones.add(key)
-
- if "posixrules" in valid_zones:
- # posixrules is a special symlink-only time zone where it exists, it
- # should not be included in the output
- valid_zones.remove("posixrules")
-
- return valid_zones
-
-
-class InvalidTZPathWarning(RuntimeWarning):
- """Warning raised if an invalid path is specified in PYTHONTZPATH."""
-
-
-TZPATH = ()
-reset_tzpath()
+import os
+import sysconfig
+
+
+def reset_tzpath(to=None):
+ global TZPATH
+
+ tzpaths = to
+ if tzpaths is not None:
+ if isinstance(tzpaths, (str, bytes)):
+ raise TypeError(
+ f"tzpaths must be a list or tuple, "
+ + f"not {type(tzpaths)}: {tzpaths!r}"
+ )
+
+ if not all(map(os.path.isabs, tzpaths)):
+ raise ValueError(_get_invalid_paths_message(tzpaths))
+ base_tzpath = tzpaths
+ else:
+ env_var = os.environ.get("PYTHONTZPATH", None)
+ if env_var is not None:
+ base_tzpath = _parse_python_tzpath(env_var)
+ else:
+ base_tzpath = _parse_python_tzpath(
+ sysconfig.get_config_var("TZPATH")
+ )
+
+ TZPATH = tuple(base_tzpath)
+
+
+def _parse_python_tzpath(env_var):
+ if not env_var:
+ return ()
+
+ raw_tzpath = env_var.split(os.pathsep)
+ new_tzpath = tuple(filter(os.path.isabs, raw_tzpath))
+
+ # If anything has been filtered out, we will warn about it
+ if len(new_tzpath) != len(raw_tzpath):
+ import warnings
+
+ msg = _get_invalid_paths_message(raw_tzpath)
+
+ warnings.warn(
+ "Invalid paths specified in PYTHONTZPATH environment variable. "
+ + msg,
+ InvalidTZPathWarning,
+ )
+
+ return new_tzpath
+
+
+def _get_invalid_paths_message(tzpaths):
+ invalid_paths = (path for path in tzpaths if not os.path.isabs(path))
+
+ prefix = "\n "
+ indented_str = prefix + prefix.join(invalid_paths)
+
+ return (
+ "Paths should be absolute but found the following relative paths:"
+ + indented_str
+ )
+
+
+def find_tzfile(key):
+ """Retrieve the path to a TZif file from a key."""
+ _validate_tzfile_path(key)
+ for search_path in TZPATH:
+ filepath = os.path.join(search_path, key)
+ if os.path.isfile(filepath):
+ return filepath
+
+ return None
+
+
+_TEST_PATH = os.path.normpath(os.path.join("_", "_"))[:-1]
+
+
+def _validate_tzfile_path(path, _base=_TEST_PATH):
+ if os.path.isabs(path):
+ raise ValueError(
+ f"ZoneInfo keys may not be absolute paths, got: {path}"
+ )
+
+ # We only care about the kinds of path normalizations that would change the
+ # length of the key - e.g. a/../b -> a/b, or a/b/ -> a/b. On Windows,
+ # normpath will also change from a/b to a\b, but that would still preserve
+ # the length.
+ new_path = os.path.normpath(path)
+ if len(new_path) != len(path):
+ raise ValueError(
+ f"ZoneInfo keys must be normalized relative paths, got: {path}"
+ )
+
+ resolved = os.path.normpath(os.path.join(_base, new_path))
+ if not resolved.startswith(_base):
+ raise ValueError(
+ f"ZoneInfo keys must refer to subdirectories of TZPATH, got: {path}"
+ )
+
+
+del _TEST_PATH
+
+
+def available_timezones():
+ """Returns a set containing all available time zones.
+
+ .. caution::
+
+ This may attempt to open a large number of files, since the best way to
+ determine if a given file on the time zone search path is to open it
+ and check for the "magic string" at the beginning.
+ """
+ from importlib import resources
+
+ valid_zones = set()
+
+ # Start with loading from the tzdata package if it exists: this has a
+ # pre-assembled list of zones that only requires opening one file.
+ try:
+ with resources.open_text("tzdata", "zones") as f:
+ for zone in f:
+ zone = zone.strip()
+ if zone:
+ valid_zones.add(zone)
+ except (ImportError, FileNotFoundError):
+ pass
+
+ def valid_key(fpath):
+ try:
+ with open(fpath, "rb") as f:
+ return f.read(4) == b"TZif"
+ except Exception: # pragma: nocover
+ return False
+
+ for tz_root in TZPATH:
+ if not os.path.exists(tz_root):
+ continue
+
+ for root, dirnames, files in os.walk(tz_root):
+ if root == tz_root:
+ # right/ and posix/ are special directories and shouldn't be
+ # included in the output of available zones
+ if "right" in dirnames:
+ dirnames.remove("right")
+ if "posix" in dirnames:
+ dirnames.remove("posix")
+
+ for file in files:
+ fpath = os.path.join(root, file)
+
+ key = os.path.relpath(fpath, start=tz_root)
+ if os.sep != "/": # pragma: nocover
+ key = key.replace(os.sep, "/")
+
+ if not key or key in valid_zones:
+ continue
+
+ if valid_key(fpath):
+ valid_zones.add(key)
+
+ if "posixrules" in valid_zones:
+ # posixrules is a special symlink-only time zone where it exists, it
+ # should not be included in the output
+ valid_zones.remove("posixrules")
+
+ return valid_zones
+
+
+class InvalidTZPathWarning(RuntimeWarning):
+ """Warning raised if an invalid path is specified in PYTHONTZPATH."""
+
+
+TZPATH = ()
+reset_tzpath()
diff --git a/contrib/tools/python3/src/Lib/zoneinfo/_zoneinfo.py b/contrib/tools/python3/src/Lib/zoneinfo/_zoneinfo.py
index de68380792..48077997f5 100644
--- a/contrib/tools/python3/src/Lib/zoneinfo/_zoneinfo.py
+++ b/contrib/tools/python3/src/Lib/zoneinfo/_zoneinfo.py
@@ -1,752 +1,752 @@
-import bisect
-import calendar
-import collections
-import functools
-import re
-import weakref
-from datetime import datetime, timedelta, tzinfo
-
-from . import _common, _tzpath
-
-EPOCH = datetime(1970, 1, 1)
-EPOCHORDINAL = datetime(1970, 1, 1).toordinal()
-
-# It is relatively expensive to construct new timedelta objects, and in most
-# cases we're looking at the same deltas, like integer numbers of hours, etc.
-# To improve speed and memory use, we'll keep a dictionary with references
-# to the ones we've already used so far.
-#
-# Loading every time zone in the 2020a version of the time zone database
-# requires 447 timedeltas, which requires approximately the amount of space
-# that ZoneInfo("America/New_York") with 236 transitions takes up, so we will
-# set the cache size to 512 so that in the common case we always get cache
-# hits, but specifically crafted ZoneInfo objects don't leak arbitrary amounts
-# of memory.
-@functools.lru_cache(maxsize=512)
-def _load_timedelta(seconds):
- return timedelta(seconds=seconds)
-
-
-class ZoneInfo(tzinfo):
- _strong_cache_size = 8
- _strong_cache = collections.OrderedDict()
- _weak_cache = weakref.WeakValueDictionary()
- __module__ = "zoneinfo"
-
- def __init_subclass__(cls):
- cls._strong_cache = collections.OrderedDict()
- cls._weak_cache = weakref.WeakValueDictionary()
-
- def __new__(cls, key):
- instance = cls._weak_cache.get(key, None)
- if instance is None:
- instance = cls._weak_cache.setdefault(key, cls._new_instance(key))
- instance._from_cache = True
-
- # Update the "strong" cache
- cls._strong_cache[key] = cls._strong_cache.pop(key, instance)
-
- if len(cls._strong_cache) > cls._strong_cache_size:
- cls._strong_cache.popitem(last=False)
-
- return instance
-
- @classmethod
- def no_cache(cls, key):
- obj = cls._new_instance(key)
- obj._from_cache = False
-
- return obj
-
- @classmethod
- def _new_instance(cls, key):
- obj = super().__new__(cls)
- obj._key = key
- obj._file_path = obj._find_tzfile(key)
-
- if obj._file_path is not None:
- file_obj = open(obj._file_path, "rb")
- else:
- file_obj = _common.load_tzdata(key)
-
- with file_obj as f:
- obj._load_file(f)
-
- return obj
-
- @classmethod
- def from_file(cls, fobj, /, key=None):
- obj = super().__new__(cls)
- obj._key = key
- obj._file_path = None
- obj._load_file(fobj)
- obj._file_repr = repr(fobj)
-
- # Disable pickling for objects created from files
- obj.__reduce__ = obj._file_reduce
-
- return obj
-
- @classmethod
- def clear_cache(cls, *, only_keys=None):
- if only_keys is not None:
- for key in only_keys:
- cls._weak_cache.pop(key, None)
- cls._strong_cache.pop(key, None)
-
- else:
- cls._weak_cache.clear()
- cls._strong_cache.clear()
-
- @property
- def key(self):
- return self._key
-
- def utcoffset(self, dt):
- return self._find_trans(dt).utcoff
-
- def dst(self, dt):
- return self._find_trans(dt).dstoff
-
- def tzname(self, dt):
- return self._find_trans(dt).tzname
-
- def fromutc(self, dt):
- """Convert from datetime in UTC to datetime in local time"""
-
- if not isinstance(dt, datetime):
- raise TypeError("fromutc() requires a datetime argument")
- if dt.tzinfo is not self:
- raise ValueError("dt.tzinfo is not self")
-
- timestamp = self._get_local_timestamp(dt)
- num_trans = len(self._trans_utc)
-
- if num_trans >= 1 and timestamp < self._trans_utc[0]:
- tti = self._tti_before
- fold = 0
- elif (
- num_trans == 0 or timestamp > self._trans_utc[-1]
- ) and not isinstance(self._tz_after, _ttinfo):
- tti, fold = self._tz_after.get_trans_info_fromutc(
- timestamp, dt.year
- )
- elif num_trans == 0:
- tti = self._tz_after
- fold = 0
- else:
- idx = bisect.bisect_right(self._trans_utc, timestamp)
-
- if num_trans > 1 and timestamp >= self._trans_utc[1]:
- tti_prev, tti = self._ttinfos[idx - 2 : idx]
- elif timestamp > self._trans_utc[-1]:
- tti_prev = self._ttinfos[-1]
- tti = self._tz_after
- else:
- tti_prev = self._tti_before
- tti = self._ttinfos[0]
-
- # Detect fold
- shift = tti_prev.utcoff - tti.utcoff
- fold = shift.total_seconds() > timestamp - self._trans_utc[idx - 1]
- dt += tti.utcoff
- if fold:
- return dt.replace(fold=1)
- else:
- return dt
-
- def _find_trans(self, dt):
- if dt is None:
- if self._fixed_offset:
- return self._tz_after
- else:
- return _NO_TTINFO
-
- ts = self._get_local_timestamp(dt)
-
- lt = self._trans_local[dt.fold]
-
- num_trans = len(lt)
-
- if num_trans and ts < lt[0]:
- return self._tti_before
- elif not num_trans or ts > lt[-1]:
- if isinstance(self._tz_after, _TZStr):
- return self._tz_after.get_trans_info(ts, dt.year, dt.fold)
- else:
- return self._tz_after
- else:
- # idx is the transition that occurs after this timestamp, so we
- # subtract off 1 to get the current ttinfo
- idx = bisect.bisect_right(lt, ts) - 1
- assert idx >= 0
- return self._ttinfos[idx]
-
- def _get_local_timestamp(self, dt):
- return (
- (dt.toordinal() - EPOCHORDINAL) * 86400
- + dt.hour * 3600
- + dt.minute * 60
- + dt.second
- )
-
- def __str__(self):
- if self._key is not None:
- return f"{self._key}"
- else:
- return repr(self)
-
- def __repr__(self):
- if self._key is not None:
- return f"{self.__class__.__name__}(key={self._key!r})"
- else:
- return f"{self.__class__.__name__}.from_file({self._file_repr})"
-
- def __reduce__(self):
- return (self.__class__._unpickle, (self._key, self._from_cache))
-
- def _file_reduce(self):
- import pickle
-
- raise pickle.PicklingError(
- "Cannot pickle a ZoneInfo file created from a file stream."
- )
-
- @classmethod
- def _unpickle(cls, key, from_cache, /):
- if from_cache:
- return cls(key)
- else:
- return cls.no_cache(key)
-
- def _find_tzfile(self, key):
- return _tzpath.find_tzfile(key)
-
- def _load_file(self, fobj):
- # Retrieve all the data as it exists in the zoneinfo file
- trans_idx, trans_utc, utcoff, isdst, abbr, tz_str = _common.load_data(
- fobj
- )
-
- # Infer the DST offsets (needed for .dst()) from the data
- dstoff = self._utcoff_to_dstoff(trans_idx, utcoff, isdst)
-
- # Convert all the transition times (UTC) into "seconds since 1970-01-01 local time"
- trans_local = self._ts_to_local(trans_idx, trans_utc, utcoff)
-
- # Construct `_ttinfo` objects for each transition in the file
- _ttinfo_list = [
- _ttinfo(
- _load_timedelta(utcoffset), _load_timedelta(dstoffset), tzname
- )
- for utcoffset, dstoffset, tzname in zip(utcoff, dstoff, abbr)
- ]
-
- self._trans_utc = trans_utc
- self._trans_local = trans_local
- self._ttinfos = [_ttinfo_list[idx] for idx in trans_idx]
-
- # Find the first non-DST transition
- for i in range(len(isdst)):
- if not isdst[i]:
- self._tti_before = _ttinfo_list[i]
- break
- else:
- if self._ttinfos:
- self._tti_before = self._ttinfos[0]
- else:
- self._tti_before = None
-
- # Set the "fallback" time zone
- if tz_str is not None and tz_str != b"":
- self._tz_after = _parse_tz_str(tz_str.decode())
- else:
- if not self._ttinfos and not _ttinfo_list:
- raise ValueError("No time zone information found.")
-
- if self._ttinfos:
- self._tz_after = self._ttinfos[-1]
- else:
- self._tz_after = _ttinfo_list[-1]
-
- # Determine if this is a "fixed offset" zone, meaning that the output
- # of the utcoffset, dst and tzname functions does not depend on the
- # specific datetime passed.
- #
- # We make three simplifying assumptions here:
- #
- # 1. If _tz_after is not a _ttinfo, it has transitions that might
- # actually occur (it is possible to construct TZ strings that
- # specify STD and DST but no transitions ever occur, such as
- # AAA0BBB,0/0,J365/25).
- # 2. If _ttinfo_list contains more than one _ttinfo object, the objects
- # represent different offsets.
- # 3. _ttinfo_list contains no unused _ttinfos (in which case an
- # otherwise fixed-offset zone with extra _ttinfos defined may
- # appear to *not* be a fixed offset zone).
- #
- # Violations to these assumptions would be fairly exotic, and exotic
- # zones should almost certainly not be used with datetime.time (the
- # only thing that would be affected by this).
- if len(_ttinfo_list) > 1 or not isinstance(self._tz_after, _ttinfo):
- self._fixed_offset = False
- elif not _ttinfo_list:
- self._fixed_offset = True
- else:
- self._fixed_offset = _ttinfo_list[0] == self._tz_after
-
- @staticmethod
- def _utcoff_to_dstoff(trans_idx, utcoffsets, isdsts):
- # Now we must transform our ttis and abbrs into `_ttinfo` objects,
- # but there is an issue: .dst() must return a timedelta with the
- # difference between utcoffset() and the "standard" offset, but
- # the "base offset" and "DST offset" are not encoded in the file;
- # we can infer what they are from the isdst flag, but it is not
- # sufficient to to just look at the last standard offset, because
- # occasionally countries will shift both DST offset and base offset.
-
- typecnt = len(isdsts)
- dstoffs = [0] * typecnt # Provisionally assign all to 0.
- dst_cnt = sum(isdsts)
- dst_found = 0
-
- for i in range(1, len(trans_idx)):
- if dst_cnt == dst_found:
- break
-
- idx = trans_idx[i]
-
- dst = isdsts[idx]
-
- # We're only going to look at daylight saving time
- if not dst:
- continue
-
- # Skip any offsets that have already been assigned
- if dstoffs[idx] != 0:
- continue
-
- dstoff = 0
- utcoff = utcoffsets[idx]
-
- comp_idx = trans_idx[i - 1]
-
- if not isdsts[comp_idx]:
- dstoff = utcoff - utcoffsets[comp_idx]
-
- if not dstoff and idx < (typecnt - 1):
- comp_idx = trans_idx[i + 1]
-
- # If the following transition is also DST and we couldn't
- # find the DST offset by this point, we're going to have to
- # skip it and hope this transition gets assigned later
- if isdsts[comp_idx]:
- continue
-
- dstoff = utcoff - utcoffsets[comp_idx]
-
- if dstoff:
- dst_found += 1
- dstoffs[idx] = dstoff
- else:
- # If we didn't find a valid value for a given index, we'll end up
- # with dstoff = 0 for something where `isdst=1`. This is obviously
- # wrong - one hour will be a much better guess than 0
- for idx in range(typecnt):
- if not dstoffs[idx] and isdsts[idx]:
- dstoffs[idx] = 3600
-
- return dstoffs
-
- @staticmethod
- def _ts_to_local(trans_idx, trans_list_utc, utcoffsets):
- """Generate number of seconds since 1970 *in the local time*.
-
- This is necessary to easily find the transition times in local time"""
- if not trans_list_utc:
- return [[], []]
-
- # Start with the timestamps and modify in-place
- trans_list_wall = [list(trans_list_utc), list(trans_list_utc)]
-
- if len(utcoffsets) > 1:
- offset_0 = utcoffsets[0]
- offset_1 = utcoffsets[trans_idx[0]]
- if offset_1 > offset_0:
- offset_1, offset_0 = offset_0, offset_1
- else:
- offset_0 = offset_1 = utcoffsets[0]
-
- trans_list_wall[0][0] += offset_0
- trans_list_wall[1][0] += offset_1
-
- for i in range(1, len(trans_idx)):
- offset_0 = utcoffsets[trans_idx[i - 1]]
- offset_1 = utcoffsets[trans_idx[i]]
-
- if offset_1 > offset_0:
- offset_1, offset_0 = offset_0, offset_1
-
- trans_list_wall[0][i] += offset_0
- trans_list_wall[1][i] += offset_1
-
- return trans_list_wall
-
-
-class _ttinfo:
- __slots__ = ["utcoff", "dstoff", "tzname"]
-
- def __init__(self, utcoff, dstoff, tzname):
- self.utcoff = utcoff
- self.dstoff = dstoff
- self.tzname = tzname
-
- def __eq__(self, other):
- return (
- self.utcoff == other.utcoff
- and self.dstoff == other.dstoff
- and self.tzname == other.tzname
- )
-
- def __repr__(self): # pragma: nocover
- return (
- f"{self.__class__.__name__}"
- + f"({self.utcoff}, {self.dstoff}, {self.tzname})"
- )
-
-
-_NO_TTINFO = _ttinfo(None, None, None)
-
-
-class _TZStr:
- __slots__ = (
- "std",
- "dst",
- "start",
- "end",
- "get_trans_info",
- "get_trans_info_fromutc",
- "dst_diff",
- )
-
- def __init__(
- self, std_abbr, std_offset, dst_abbr, dst_offset, start=None, end=None
- ):
- self.dst_diff = dst_offset - std_offset
- std_offset = _load_timedelta(std_offset)
- self.std = _ttinfo(
- utcoff=std_offset, dstoff=_load_timedelta(0), tzname=std_abbr
- )
-
- self.start = start
- self.end = end
-
- dst_offset = _load_timedelta(dst_offset)
- delta = _load_timedelta(self.dst_diff)
- self.dst = _ttinfo(utcoff=dst_offset, dstoff=delta, tzname=dst_abbr)
-
- # These are assertions because the constructor should only be called
- # by functions that would fail before passing start or end
- assert start is not None, "No transition start specified"
- assert end is not None, "No transition end specified"
-
- self.get_trans_info = self._get_trans_info
- self.get_trans_info_fromutc = self._get_trans_info_fromutc
-
- def transitions(self, year):
- start = self.start.year_to_epoch(year)
- end = self.end.year_to_epoch(year)
- return start, end
-
- def _get_trans_info(self, ts, year, fold):
- """Get the information about the current transition - tti"""
- start, end = self.transitions(year)
-
- # With fold = 0, the period (denominated in local time) with the
- # smaller offset starts at the end of the gap and ends at the end of
- # the fold; with fold = 1, it runs from the start of the gap to the
- # beginning of the fold.
- #
- # So in order to determine the DST boundaries we need to know both
- # the fold and whether DST is positive or negative (rare), and it
- # turns out that this boils down to fold XOR is_positive.
- if fold == (self.dst_diff >= 0):
- end -= self.dst_diff
- else:
- start += self.dst_diff
-
- if start < end:
- isdst = start <= ts < end
- else:
- isdst = not (end <= ts < start)
-
- return self.dst if isdst else self.std
-
- def _get_trans_info_fromutc(self, ts, year):
- start, end = self.transitions(year)
- start -= self.std.utcoff.total_seconds()
- end -= self.dst.utcoff.total_seconds()
-
- if start < end:
- isdst = start <= ts < end
- else:
- isdst = not (end <= ts < start)
-
- # For positive DST, the ambiguous period is one dst_diff after the end
- # of DST; for negative DST, the ambiguous period is one dst_diff before
- # the start of DST.
- if self.dst_diff > 0:
- ambig_start = end
- ambig_end = end + self.dst_diff
- else:
- ambig_start = start
- ambig_end = start - self.dst_diff
-
- fold = ambig_start <= ts < ambig_end
-
- return (self.dst if isdst else self.std, fold)
-
-
-def _post_epoch_days_before_year(year):
- """Get the number of days between 1970-01-01 and YEAR-01-01"""
- y = year - 1
- return y * 365 + y // 4 - y // 100 + y // 400 - EPOCHORDINAL
-
-
-class _DayOffset:
- __slots__ = ["d", "julian", "hour", "minute", "second"]
-
- def __init__(self, d, julian, hour=2, minute=0, second=0):
- if not (0 + julian) <= d <= 365:
- min_day = 0 + julian
- raise ValueError(f"d must be in [{min_day}, 365], not: {d}")
-
- self.d = d
- self.julian = julian
- self.hour = hour
- self.minute = minute
- self.second = second
-
- def year_to_epoch(self, year):
- days_before_year = _post_epoch_days_before_year(year)
-
- d = self.d
- if self.julian and d >= 59 and calendar.isleap(year):
- d += 1
-
- epoch = (days_before_year + d) * 86400
- epoch += self.hour * 3600 + self.minute * 60 + self.second
-
- return epoch
-
-
-class _CalendarOffset:
- __slots__ = ["m", "w", "d", "hour", "minute", "second"]
-
- _DAYS_BEFORE_MONTH = (
- -1,
- 0,
- 31,
- 59,
- 90,
- 120,
- 151,
- 181,
- 212,
- 243,
- 273,
- 304,
- 334,
- )
-
- def __init__(self, m, w, d, hour=2, minute=0, second=0):
- if not 0 < m <= 12:
- raise ValueError("m must be in (0, 12]")
-
- if not 0 < w <= 5:
- raise ValueError("w must be in (0, 5]")
-
- if not 0 <= d <= 6:
- raise ValueError("d must be in [0, 6]")
-
- self.m = m
- self.w = w
- self.d = d
- self.hour = hour
- self.minute = minute
- self.second = second
-
- @classmethod
- def _ymd2ord(cls, year, month, day):
- return (
- _post_epoch_days_before_year(year)
- + cls._DAYS_BEFORE_MONTH[month]
- + (month > 2 and calendar.isleap(year))
- + day
- )
-
- # TODO: These are not actually epoch dates as they are expressed in local time
- def year_to_epoch(self, year):
- """Calculates the datetime of the occurrence from the year"""
- # We know year and month, we need to convert w, d into day of month
- #
- # Week 1 is the first week in which day `d` (where 0 = Sunday) appears.
- # Week 5 represents the last occurrence of day `d`, so we need to know
- # the range of the month.
- first_day, days_in_month = calendar.monthrange(year, self.m)
-
- # This equation seems magical, so I'll break it down:
- # 1. calendar says 0 = Monday, POSIX says 0 = Sunday
- # so we need first_day + 1 to get 1 = Monday -> 7 = Sunday,
- # which is still equivalent because this math is mod 7
- # 2. Get first day - desired day mod 7: -1 % 7 = 6, so we don't need
- # to do anything to adjust negative numbers.
- # 3. Add 1 because month days are a 1-based index.
- month_day = (self.d - (first_day + 1)) % 7 + 1
-
- # Now use a 0-based index version of `w` to calculate the w-th
- # occurrence of `d`
- month_day += (self.w - 1) * 7
-
- # month_day will only be > days_in_month if w was 5, and `w` means
- # "last occurrence of `d`", so now we just check if we over-shot the
- # end of the month and if so knock off 1 week.
- if month_day > days_in_month:
- month_day -= 7
-
- ordinal = self._ymd2ord(year, self.m, month_day)
- epoch = ordinal * 86400
- epoch += self.hour * 3600 + self.minute * 60 + self.second
- return epoch
-
-
-def _parse_tz_str(tz_str):
- # The tz string has the format:
- #
- # std[offset[dst[offset],start[/time],end[/time]]]
- #
- # std and dst must be 3 or more characters long and must not contain
- # a leading colon, embedded digits, commas, nor a plus or minus signs;
- # The spaces between "std" and "offset" are only for display and are
- # not actually present in the string.
- #
- # The format of the offset is ``[+|-]hh[:mm[:ss]]``
-
- offset_str, *start_end_str = tz_str.split(",", 1)
-
- # fmt: off
- parser_re = re.compile(
- r"(?P<std>[^<0-9:.+-]+|<[a-zA-Z0-9+\-]+>)" +
- r"((?P<stdoff>[+-]?\d{1,2}(:\d{2}(:\d{2})?)?)" +
- r"((?P<dst>[^0-9:.+-]+|<[a-zA-Z0-9+\-]+>)" +
- r"((?P<dstoff>[+-]?\d{1,2}(:\d{2}(:\d{2})?)?))?" +
- r")?" + # dst
- r")?$" # stdoff
- )
- # fmt: on
-
- m = parser_re.match(offset_str)
-
- if m is None:
- raise ValueError(f"{tz_str} is not a valid TZ string")
-
- std_abbr = m.group("std")
- dst_abbr = m.group("dst")
- dst_offset = None
-
- std_abbr = std_abbr.strip("<>")
-
- if dst_abbr:
- dst_abbr = dst_abbr.strip("<>")
-
- if std_offset := m.group("stdoff"):
- try:
- std_offset = _parse_tz_delta(std_offset)
- except ValueError as e:
- raise ValueError(f"Invalid STD offset in {tz_str}") from e
- else:
- std_offset = 0
-
- if dst_abbr is not None:
- if dst_offset := m.group("dstoff"):
- try:
- dst_offset = _parse_tz_delta(dst_offset)
- except ValueError as e:
- raise ValueError(f"Invalid DST offset in {tz_str}") from e
- else:
- dst_offset = std_offset + 3600
-
- if not start_end_str:
- raise ValueError(f"Missing transition rules: {tz_str}")
-
- start_end_strs = start_end_str[0].split(",", 1)
- try:
- start, end = (_parse_dst_start_end(x) for x in start_end_strs)
- except ValueError as e:
- raise ValueError(f"Invalid TZ string: {tz_str}") from e
-
- return _TZStr(std_abbr, std_offset, dst_abbr, dst_offset, start, end)
- elif start_end_str:
- raise ValueError(f"Transition rule present without DST: {tz_str}")
- else:
- # This is a static ttinfo, don't return _TZStr
- return _ttinfo(
- _load_timedelta(std_offset), _load_timedelta(0), std_abbr
- )
-
-
-def _parse_dst_start_end(dststr):
- date, *time = dststr.split("/")
- if date[0] == "M":
- n_is_julian = False
- m = re.match(r"M(\d{1,2})\.(\d).(\d)$", date)
- if m is None:
- raise ValueError(f"Invalid dst start/end date: {dststr}")
- date_offset = tuple(map(int, m.groups()))
- offset = _CalendarOffset(*date_offset)
- else:
- if date[0] == "J":
- n_is_julian = True
- date = date[1:]
- else:
- n_is_julian = False
-
- doy = int(date)
- offset = _DayOffset(doy, n_is_julian)
-
- if time:
- time_components = list(map(int, time[0].split(":")))
- n_components = len(time_components)
- if n_components < 3:
- time_components.extend([0] * (3 - n_components))
- offset.hour, offset.minute, offset.second = time_components
-
- return offset
-
-
-def _parse_tz_delta(tz_delta):
- match = re.match(
- r"(?P<sign>[+-])?(?P<h>\d{1,2})(:(?P<m>\d{2})(:(?P<s>\d{2}))?)?",
- tz_delta,
- )
- # Anything passed to this function should already have hit an equivalent
- # regular expression to find the section to parse.
- assert match is not None, tz_delta
-
- h, m, s = (
- int(v) if v is not None else 0
- for v in map(match.group, ("h", "m", "s"))
- )
-
- total = h * 3600 + m * 60 + s
-
- if not -86400 < total < 86400:
- raise ValueError(
- f"Offset must be strictly between -24h and +24h: {tz_delta}"
- )
-
- # Yes, +5 maps to an offset of -5h
- if match.group("sign") != "-":
- total *= -1
-
- return total
+import bisect
+import calendar
+import collections
+import functools
+import re
+import weakref
+from datetime import datetime, timedelta, tzinfo
+
+from . import _common, _tzpath
+
+EPOCH = datetime(1970, 1, 1)
+EPOCHORDINAL = datetime(1970, 1, 1).toordinal()
+
+# It is relatively expensive to construct new timedelta objects, and in most
+# cases we're looking at the same deltas, like integer numbers of hours, etc.
+# To improve speed and memory use, we'll keep a dictionary with references
+# to the ones we've already used so far.
+#
+# Loading every time zone in the 2020a version of the time zone database
+# requires 447 timedeltas, which requires approximately the amount of space
+# that ZoneInfo("America/New_York") with 236 transitions takes up, so we will
+# set the cache size to 512 so that in the common case we always get cache
+# hits, but specifically crafted ZoneInfo objects don't leak arbitrary amounts
+# of memory.
+@functools.lru_cache(maxsize=512)
+def _load_timedelta(seconds):
+ return timedelta(seconds=seconds)
+
+
+class ZoneInfo(tzinfo):
+ _strong_cache_size = 8
+ _strong_cache = collections.OrderedDict()
+ _weak_cache = weakref.WeakValueDictionary()
+ __module__ = "zoneinfo"
+
+ def __init_subclass__(cls):
+ cls._strong_cache = collections.OrderedDict()
+ cls._weak_cache = weakref.WeakValueDictionary()
+
+ def __new__(cls, key):
+ instance = cls._weak_cache.get(key, None)
+ if instance is None:
+ instance = cls._weak_cache.setdefault(key, cls._new_instance(key))
+ instance._from_cache = True
+
+ # Update the "strong" cache
+ cls._strong_cache[key] = cls._strong_cache.pop(key, instance)
+
+ if len(cls._strong_cache) > cls._strong_cache_size:
+ cls._strong_cache.popitem(last=False)
+
+ return instance
+
+ @classmethod
+ def no_cache(cls, key):
+ obj = cls._new_instance(key)
+ obj._from_cache = False
+
+ return obj
+
+ @classmethod
+ def _new_instance(cls, key):
+ obj = super().__new__(cls)
+ obj._key = key
+ obj._file_path = obj._find_tzfile(key)
+
+ if obj._file_path is not None:
+ file_obj = open(obj._file_path, "rb")
+ else:
+ file_obj = _common.load_tzdata(key)
+
+ with file_obj as f:
+ obj._load_file(f)
+
+ return obj
+
+ @classmethod
+ def from_file(cls, fobj, /, key=None):
+ obj = super().__new__(cls)
+ obj._key = key
+ obj._file_path = None
+ obj._load_file(fobj)
+ obj._file_repr = repr(fobj)
+
+ # Disable pickling for objects created from files
+ obj.__reduce__ = obj._file_reduce
+
+ return obj
+
+ @classmethod
+ def clear_cache(cls, *, only_keys=None):
+ if only_keys is not None:
+ for key in only_keys:
+ cls._weak_cache.pop(key, None)
+ cls._strong_cache.pop(key, None)
+
+ else:
+ cls._weak_cache.clear()
+ cls._strong_cache.clear()
+
+ @property
+ def key(self):
+ return self._key
+
+ def utcoffset(self, dt):
+ return self._find_trans(dt).utcoff
+
+ def dst(self, dt):
+ return self._find_trans(dt).dstoff
+
+ def tzname(self, dt):
+ return self._find_trans(dt).tzname
+
+ def fromutc(self, dt):
+ """Convert from datetime in UTC to datetime in local time"""
+
+ if not isinstance(dt, datetime):
+ raise TypeError("fromutc() requires a datetime argument")
+ if dt.tzinfo is not self:
+ raise ValueError("dt.tzinfo is not self")
+
+ timestamp = self._get_local_timestamp(dt)
+ num_trans = len(self._trans_utc)
+
+ if num_trans >= 1 and timestamp < self._trans_utc[0]:
+ tti = self._tti_before
+ fold = 0
+ elif (
+ num_trans == 0 or timestamp > self._trans_utc[-1]
+ ) and not isinstance(self._tz_after, _ttinfo):
+ tti, fold = self._tz_after.get_trans_info_fromutc(
+ timestamp, dt.year
+ )
+ elif num_trans == 0:
+ tti = self._tz_after
+ fold = 0
+ else:
+ idx = bisect.bisect_right(self._trans_utc, timestamp)
+
+ if num_trans > 1 and timestamp >= self._trans_utc[1]:
+ tti_prev, tti = self._ttinfos[idx - 2 : idx]
+ elif timestamp > self._trans_utc[-1]:
+ tti_prev = self._ttinfos[-1]
+ tti = self._tz_after
+ else:
+ tti_prev = self._tti_before
+ tti = self._ttinfos[0]
+
+ # Detect fold
+ shift = tti_prev.utcoff - tti.utcoff
+ fold = shift.total_seconds() > timestamp - self._trans_utc[idx - 1]
+ dt += tti.utcoff
+ if fold:
+ return dt.replace(fold=1)
+ else:
+ return dt
+
+ def _find_trans(self, dt):
+ if dt is None:
+ if self._fixed_offset:
+ return self._tz_after
+ else:
+ return _NO_TTINFO
+
+ ts = self._get_local_timestamp(dt)
+
+ lt = self._trans_local[dt.fold]
+
+ num_trans = len(lt)
+
+ if num_trans and ts < lt[0]:
+ return self._tti_before
+ elif not num_trans or ts > lt[-1]:
+ if isinstance(self._tz_after, _TZStr):
+ return self._tz_after.get_trans_info(ts, dt.year, dt.fold)
+ else:
+ return self._tz_after
+ else:
+ # idx is the transition that occurs after this timestamp, so we
+ # subtract off 1 to get the current ttinfo
+ idx = bisect.bisect_right(lt, ts) - 1
+ assert idx >= 0
+ return self._ttinfos[idx]
+
+ def _get_local_timestamp(self, dt):
+ return (
+ (dt.toordinal() - EPOCHORDINAL) * 86400
+ + dt.hour * 3600
+ + dt.minute * 60
+ + dt.second
+ )
+
+ def __str__(self):
+ if self._key is not None:
+ return f"{self._key}"
+ else:
+ return repr(self)
+
+ def __repr__(self):
+ if self._key is not None:
+ return f"{self.__class__.__name__}(key={self._key!r})"
+ else:
+ return f"{self.__class__.__name__}.from_file({self._file_repr})"
+
+ def __reduce__(self):
+ return (self.__class__._unpickle, (self._key, self._from_cache))
+
+ def _file_reduce(self):
+ import pickle
+
+ raise pickle.PicklingError(
+ "Cannot pickle a ZoneInfo file created from a file stream."
+ )
+
+ @classmethod
+ def _unpickle(cls, key, from_cache, /):
+ if from_cache:
+ return cls(key)
+ else:
+ return cls.no_cache(key)
+
+ def _find_tzfile(self, key):
+ return _tzpath.find_tzfile(key)
+
+ def _load_file(self, fobj):
+ # Retrieve all the data as it exists in the zoneinfo file
+ trans_idx, trans_utc, utcoff, isdst, abbr, tz_str = _common.load_data(
+ fobj
+ )
+
+ # Infer the DST offsets (needed for .dst()) from the data
+ dstoff = self._utcoff_to_dstoff(trans_idx, utcoff, isdst)
+
+ # Convert all the transition times (UTC) into "seconds since 1970-01-01 local time"
+ trans_local = self._ts_to_local(trans_idx, trans_utc, utcoff)
+
+ # Construct `_ttinfo` objects for each transition in the file
+ _ttinfo_list = [
+ _ttinfo(
+ _load_timedelta(utcoffset), _load_timedelta(dstoffset), tzname
+ )
+ for utcoffset, dstoffset, tzname in zip(utcoff, dstoff, abbr)
+ ]
+
+ self._trans_utc = trans_utc
+ self._trans_local = trans_local
+ self._ttinfos = [_ttinfo_list[idx] for idx in trans_idx]
+
+ # Find the first non-DST transition
+ for i in range(len(isdst)):
+ if not isdst[i]:
+ self._tti_before = _ttinfo_list[i]
+ break
+ else:
+ if self._ttinfos:
+ self._tti_before = self._ttinfos[0]
+ else:
+ self._tti_before = None
+
+ # Set the "fallback" time zone
+ if tz_str is not None and tz_str != b"":
+ self._tz_after = _parse_tz_str(tz_str.decode())
+ else:
+ if not self._ttinfos and not _ttinfo_list:
+ raise ValueError("No time zone information found.")
+
+ if self._ttinfos:
+ self._tz_after = self._ttinfos[-1]
+ else:
+ self._tz_after = _ttinfo_list[-1]
+
+ # Determine if this is a "fixed offset" zone, meaning that the output
+ # of the utcoffset, dst and tzname functions does not depend on the
+ # specific datetime passed.
+ #
+ # We make three simplifying assumptions here:
+ #
+ # 1. If _tz_after is not a _ttinfo, it has transitions that might
+ # actually occur (it is possible to construct TZ strings that
+ # specify STD and DST but no transitions ever occur, such as
+ # AAA0BBB,0/0,J365/25).
+ # 2. If _ttinfo_list contains more than one _ttinfo object, the objects
+ # represent different offsets.
+ # 3. _ttinfo_list contains no unused _ttinfos (in which case an
+ # otherwise fixed-offset zone with extra _ttinfos defined may
+ # appear to *not* be a fixed offset zone).
+ #
+ # Violations to these assumptions would be fairly exotic, and exotic
+ # zones should almost certainly not be used with datetime.time (the
+ # only thing that would be affected by this).
+ if len(_ttinfo_list) > 1 or not isinstance(self._tz_after, _ttinfo):
+ self._fixed_offset = False
+ elif not _ttinfo_list:
+ self._fixed_offset = True
+ else:
+ self._fixed_offset = _ttinfo_list[0] == self._tz_after
+
+ @staticmethod
+ def _utcoff_to_dstoff(trans_idx, utcoffsets, isdsts):
+ # Now we must transform our ttis and abbrs into `_ttinfo` objects,
+ # but there is an issue: .dst() must return a timedelta with the
+ # difference between utcoffset() and the "standard" offset, but
+ # the "base offset" and "DST offset" are not encoded in the file;
+ # we can infer what they are from the isdst flag, but it is not
+ # sufficient to to just look at the last standard offset, because
+ # occasionally countries will shift both DST offset and base offset.
+
+ typecnt = len(isdsts)
+ dstoffs = [0] * typecnt # Provisionally assign all to 0.
+ dst_cnt = sum(isdsts)
+ dst_found = 0
+
+ for i in range(1, len(trans_idx)):
+ if dst_cnt == dst_found:
+ break
+
+ idx = trans_idx[i]
+
+ dst = isdsts[idx]
+
+ # We're only going to look at daylight saving time
+ if not dst:
+ continue
+
+ # Skip any offsets that have already been assigned
+ if dstoffs[idx] != 0:
+ continue
+
+ dstoff = 0
+ utcoff = utcoffsets[idx]
+
+ comp_idx = trans_idx[i - 1]
+
+ if not isdsts[comp_idx]:
+ dstoff = utcoff - utcoffsets[comp_idx]
+
+ if not dstoff and idx < (typecnt - 1):
+ comp_idx = trans_idx[i + 1]
+
+ # If the following transition is also DST and we couldn't
+ # find the DST offset by this point, we're going to have to
+ # skip it and hope this transition gets assigned later
+ if isdsts[comp_idx]:
+ continue
+
+ dstoff = utcoff - utcoffsets[comp_idx]
+
+ if dstoff:
+ dst_found += 1
+ dstoffs[idx] = dstoff
+ else:
+ # If we didn't find a valid value for a given index, we'll end up
+ # with dstoff = 0 for something where `isdst=1`. This is obviously
+ # wrong - one hour will be a much better guess than 0
+ for idx in range(typecnt):
+ if not dstoffs[idx] and isdsts[idx]:
+ dstoffs[idx] = 3600
+
+ return dstoffs
+
+ @staticmethod
+ def _ts_to_local(trans_idx, trans_list_utc, utcoffsets):
+ """Generate number of seconds since 1970 *in the local time*.
+
+ This is necessary to easily find the transition times in local time"""
+ if not trans_list_utc:
+ return [[], []]
+
+ # Start with the timestamps and modify in-place
+ trans_list_wall = [list(trans_list_utc), list(trans_list_utc)]
+
+ if len(utcoffsets) > 1:
+ offset_0 = utcoffsets[0]
+ offset_1 = utcoffsets[trans_idx[0]]
+ if offset_1 > offset_0:
+ offset_1, offset_0 = offset_0, offset_1
+ else:
+ offset_0 = offset_1 = utcoffsets[0]
+
+ trans_list_wall[0][0] += offset_0
+ trans_list_wall[1][0] += offset_1
+
+ for i in range(1, len(trans_idx)):
+ offset_0 = utcoffsets[trans_idx[i - 1]]
+ offset_1 = utcoffsets[trans_idx[i]]
+
+ if offset_1 > offset_0:
+ offset_1, offset_0 = offset_0, offset_1
+
+ trans_list_wall[0][i] += offset_0
+ trans_list_wall[1][i] += offset_1
+
+ return trans_list_wall
+
+
+class _ttinfo:
+ __slots__ = ["utcoff", "dstoff", "tzname"]
+
+ def __init__(self, utcoff, dstoff, tzname):
+ self.utcoff = utcoff
+ self.dstoff = dstoff
+ self.tzname = tzname
+
+ def __eq__(self, other):
+ return (
+ self.utcoff == other.utcoff
+ and self.dstoff == other.dstoff
+ and self.tzname == other.tzname
+ )
+
+ def __repr__(self): # pragma: nocover
+ return (
+ f"{self.__class__.__name__}"
+ + f"({self.utcoff}, {self.dstoff}, {self.tzname})"
+ )
+
+
+_NO_TTINFO = _ttinfo(None, None, None)
+
+
+class _TZStr:
+ __slots__ = (
+ "std",
+ "dst",
+ "start",
+ "end",
+ "get_trans_info",
+ "get_trans_info_fromutc",
+ "dst_diff",
+ )
+
+ def __init__(
+ self, std_abbr, std_offset, dst_abbr, dst_offset, start=None, end=None
+ ):
+ self.dst_diff = dst_offset - std_offset
+ std_offset = _load_timedelta(std_offset)
+ self.std = _ttinfo(
+ utcoff=std_offset, dstoff=_load_timedelta(0), tzname=std_abbr
+ )
+
+ self.start = start
+ self.end = end
+
+ dst_offset = _load_timedelta(dst_offset)
+ delta = _load_timedelta(self.dst_diff)
+ self.dst = _ttinfo(utcoff=dst_offset, dstoff=delta, tzname=dst_abbr)
+
+ # These are assertions because the constructor should only be called
+ # by functions that would fail before passing start or end
+ assert start is not None, "No transition start specified"
+ assert end is not None, "No transition end specified"
+
+ self.get_trans_info = self._get_trans_info
+ self.get_trans_info_fromutc = self._get_trans_info_fromutc
+
+ def transitions(self, year):
+ start = self.start.year_to_epoch(year)
+ end = self.end.year_to_epoch(year)
+ return start, end
+
+ def _get_trans_info(self, ts, year, fold):
+ """Get the information about the current transition - tti"""
+ start, end = self.transitions(year)
+
+ # With fold = 0, the period (denominated in local time) with the
+ # smaller offset starts at the end of the gap and ends at the end of
+ # the fold; with fold = 1, it runs from the start of the gap to the
+ # beginning of the fold.
+ #
+ # So in order to determine the DST boundaries we need to know both
+ # the fold and whether DST is positive or negative (rare), and it
+ # turns out that this boils down to fold XOR is_positive.
+ if fold == (self.dst_diff >= 0):
+ end -= self.dst_diff
+ else:
+ start += self.dst_diff
+
+ if start < end:
+ isdst = start <= ts < end
+ else:
+ isdst = not (end <= ts < start)
+
+ return self.dst if isdst else self.std
+
+ def _get_trans_info_fromutc(self, ts, year):
+ start, end = self.transitions(year)
+ start -= self.std.utcoff.total_seconds()
+ end -= self.dst.utcoff.total_seconds()
+
+ if start < end:
+ isdst = start <= ts < end
+ else:
+ isdst = not (end <= ts < start)
+
+ # For positive DST, the ambiguous period is one dst_diff after the end
+ # of DST; for negative DST, the ambiguous period is one dst_diff before
+ # the start of DST.
+ if self.dst_diff > 0:
+ ambig_start = end
+ ambig_end = end + self.dst_diff
+ else:
+ ambig_start = start
+ ambig_end = start - self.dst_diff
+
+ fold = ambig_start <= ts < ambig_end
+
+ return (self.dst if isdst else self.std, fold)
+
+
+def _post_epoch_days_before_year(year):
+ """Get the number of days between 1970-01-01 and YEAR-01-01"""
+ y = year - 1
+ return y * 365 + y // 4 - y // 100 + y // 400 - EPOCHORDINAL
+
+
+class _DayOffset:
+ __slots__ = ["d", "julian", "hour", "minute", "second"]
+
+ def __init__(self, d, julian, hour=2, minute=0, second=0):
+ if not (0 + julian) <= d <= 365:
+ min_day = 0 + julian
+ raise ValueError(f"d must be in [{min_day}, 365], not: {d}")
+
+ self.d = d
+ self.julian = julian
+ self.hour = hour
+ self.minute = minute
+ self.second = second
+
+ def year_to_epoch(self, year):
+ days_before_year = _post_epoch_days_before_year(year)
+
+ d = self.d
+ if self.julian and d >= 59 and calendar.isleap(year):
+ d += 1
+
+ epoch = (days_before_year + d) * 86400
+ epoch += self.hour * 3600 + self.minute * 60 + self.second
+
+ return epoch
+
+
+class _CalendarOffset:
+ __slots__ = ["m", "w", "d", "hour", "minute", "second"]
+
+ _DAYS_BEFORE_MONTH = (
+ -1,
+ 0,
+ 31,
+ 59,
+ 90,
+ 120,
+ 151,
+ 181,
+ 212,
+ 243,
+ 273,
+ 304,
+ 334,
+ )
+
+ def __init__(self, m, w, d, hour=2, minute=0, second=0):
+ if not 0 < m <= 12:
+ raise ValueError("m must be in (0, 12]")
+
+ if not 0 < w <= 5:
+ raise ValueError("w must be in (0, 5]")
+
+ if not 0 <= d <= 6:
+ raise ValueError("d must be in [0, 6]")
+
+ self.m = m
+ self.w = w
+ self.d = d
+ self.hour = hour
+ self.minute = minute
+ self.second = second
+
+ @classmethod
+ def _ymd2ord(cls, year, month, day):
+ return (
+ _post_epoch_days_before_year(year)
+ + cls._DAYS_BEFORE_MONTH[month]
+ + (month > 2 and calendar.isleap(year))
+ + day
+ )
+
+ # TODO: These are not actually epoch dates as they are expressed in local time
+ def year_to_epoch(self, year):
+ """Calculates the datetime of the occurrence from the year"""
+ # We know year and month, we need to convert w, d into day of month
+ #
+ # Week 1 is the first week in which day `d` (where 0 = Sunday) appears.
+ # Week 5 represents the last occurrence of day `d`, so we need to know
+ # the range of the month.
+ first_day, days_in_month = calendar.monthrange(year, self.m)
+
+ # This equation seems magical, so I'll break it down:
+ # 1. calendar says 0 = Monday, POSIX says 0 = Sunday
+ # so we need first_day + 1 to get 1 = Monday -> 7 = Sunday,
+ # which is still equivalent because this math is mod 7
+ # 2. Get first day - desired day mod 7: -1 % 7 = 6, so we don't need
+ # to do anything to adjust negative numbers.
+ # 3. Add 1 because month days are a 1-based index.
+ month_day = (self.d - (first_day + 1)) % 7 + 1
+
+ # Now use a 0-based index version of `w` to calculate the w-th
+ # occurrence of `d`
+ month_day += (self.w - 1) * 7
+
+ # month_day will only be > days_in_month if w was 5, and `w` means
+ # "last occurrence of `d`", so now we just check if we over-shot the
+ # end of the month and if so knock off 1 week.
+ if month_day > days_in_month:
+ month_day -= 7
+
+ ordinal = self._ymd2ord(year, self.m, month_day)
+ epoch = ordinal * 86400
+ epoch += self.hour * 3600 + self.minute * 60 + self.second
+ return epoch
+
+
+def _parse_tz_str(tz_str):
+ # The tz string has the format:
+ #
+ # std[offset[dst[offset],start[/time],end[/time]]]
+ #
+ # std and dst must be 3 or more characters long and must not contain
+ # a leading colon, embedded digits, commas, nor a plus or minus signs;
+ # The spaces between "std" and "offset" are only for display and are
+ # not actually present in the string.
+ #
+ # The format of the offset is ``[+|-]hh[:mm[:ss]]``
+
+ offset_str, *start_end_str = tz_str.split(",", 1)
+
+ # fmt: off
+ parser_re = re.compile(
+ r"(?P<std>[^<0-9:.+-]+|<[a-zA-Z0-9+\-]+>)" +
+ r"((?P<stdoff>[+-]?\d{1,2}(:\d{2}(:\d{2})?)?)" +
+ r"((?P<dst>[^0-9:.+-]+|<[a-zA-Z0-9+\-]+>)" +
+ r"((?P<dstoff>[+-]?\d{1,2}(:\d{2}(:\d{2})?)?))?" +
+ r")?" + # dst
+ r")?$" # stdoff
+ )
+ # fmt: on
+
+ m = parser_re.match(offset_str)
+
+ if m is None:
+ raise ValueError(f"{tz_str} is not a valid TZ string")
+
+ std_abbr = m.group("std")
+ dst_abbr = m.group("dst")
+ dst_offset = None
+
+ std_abbr = std_abbr.strip("<>")
+
+ if dst_abbr:
+ dst_abbr = dst_abbr.strip("<>")
+
+ if std_offset := m.group("stdoff"):
+ try:
+ std_offset = _parse_tz_delta(std_offset)
+ except ValueError as e:
+ raise ValueError(f"Invalid STD offset in {tz_str}") from e
+ else:
+ std_offset = 0
+
+ if dst_abbr is not None:
+ if dst_offset := m.group("dstoff"):
+ try:
+ dst_offset = _parse_tz_delta(dst_offset)
+ except ValueError as e:
+ raise ValueError(f"Invalid DST offset in {tz_str}") from e
+ else:
+ dst_offset = std_offset + 3600
+
+ if not start_end_str:
+ raise ValueError(f"Missing transition rules: {tz_str}")
+
+ start_end_strs = start_end_str[0].split(",", 1)
+ try:
+ start, end = (_parse_dst_start_end(x) for x in start_end_strs)
+ except ValueError as e:
+ raise ValueError(f"Invalid TZ string: {tz_str}") from e
+
+ return _TZStr(std_abbr, std_offset, dst_abbr, dst_offset, start, end)
+ elif start_end_str:
+ raise ValueError(f"Transition rule present without DST: {tz_str}")
+ else:
+ # This is a static ttinfo, don't return _TZStr
+ return _ttinfo(
+ _load_timedelta(std_offset), _load_timedelta(0), std_abbr
+ )
+
+
+def _parse_dst_start_end(dststr):
+ date, *time = dststr.split("/")
+ if date[0] == "M":
+ n_is_julian = False
+ m = re.match(r"M(\d{1,2})\.(\d).(\d)$", date)
+ if m is None:
+ raise ValueError(f"Invalid dst start/end date: {dststr}")
+ date_offset = tuple(map(int, m.groups()))
+ offset = _CalendarOffset(*date_offset)
+ else:
+ if date[0] == "J":
+ n_is_julian = True
+ date = date[1:]
+ else:
+ n_is_julian = False
+
+ doy = int(date)
+ offset = _DayOffset(doy, n_is_julian)
+
+ if time:
+ time_components = list(map(int, time[0].split(":")))
+ n_components = len(time_components)
+ if n_components < 3:
+ time_components.extend([0] * (3 - n_components))
+ offset.hour, offset.minute, offset.second = time_components
+
+ return offset
+
+
+def _parse_tz_delta(tz_delta):
+ match = re.match(
+ r"(?P<sign>[+-])?(?P<h>\d{1,2})(:(?P<m>\d{2})(:(?P<s>\d{2}))?)?",
+ tz_delta,
+ )
+ # Anything passed to this function should already have hit an equivalent
+ # regular expression to find the section to parse.
+ assert match is not None, tz_delta
+
+ h, m, s = (
+ int(v) if v is not None else 0
+ for v in map(match.group, ("h", "m", "s"))
+ )
+
+ total = h * 3600 + m * 60 + s
+
+ if not -86400 < total < 86400:
+ raise ValueError(
+ f"Offset must be strictly between -24h and +24h: {tz_delta}"
+ )
+
+ # Yes, +5 maps to an offset of -5h
+ if match.group("sign") != "-":
+ total *= -1
+
+ return total